public class oadd.org.apache.hadoop.fs.FSDataInputStream extends java.io.DataInputStream implements oadd.org.apache.hadoop.fs.Seekable, oadd.org.apache.hadoop.fs.PositionedReadable, oadd.org.apache.hadoop.fs.ByteBufferReadable, oadd.org.apache.hadoop.fs.HasFileDescriptor, oadd.org.apache.hadoop.fs.CanSetDropBehind, oadd.org.apache.hadoop.fs.CanSetReadahead, oadd.org.apache.hadoop.fs.HasEnhancedByteBufferAccess, oadd.org.apache.hadoop.fs.CanUnbuffer, oadd.org.apache.hadoop.fs.StreamCapabilities
{
private final oadd.org.apache.hadoop.util.IdentityHashStore extendedReadBuffers;
private static final java.util.EnumSet EMPTY_READ_OPTIONS_SET;
public void <init>(java.io.InputStream)
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.lang.IllegalArgumentException v;
oadd.org.apache.hadoop.util.IdentityHashStore v;
boolean v, v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: java.io.InputStream;
specialinvoke v.<java.io.DataInputStream: void <init>(java.io.InputStream)>(v);
v = new oadd.org.apache.hadoop.util.IdentityHashStore;
specialinvoke v.<oadd.org.apache.hadoop.util.IdentityHashStore: void <init>(int)>(0);
v.<oadd.org.apache.hadoop.fs.FSDataInputStream: oadd.org.apache.hadoop.util.IdentityHashStore extendedReadBuffers> = v;
v = v instanceof oadd.org.apache.hadoop.fs.Seekable;
if v == 0 goto label;
v = v instanceof oadd.org.apache.hadoop.fs.PositionedReadable;
if v != 0 goto label;
label:
v = new java.lang.IllegalArgumentException;
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String)>("In is not an instance of Seekable or PositionedReadable");
throw v;
label:
return;
}
public void seek(long) throws java.io.IOException
{
long v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: long;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
interfaceinvoke v.<oadd.org.apache.hadoop.fs.Seekable: void seek(long)>(v);
return;
}
public long getPos() throws java.io.IOException
{
long v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.fs.Seekable: long getPos()>();
return v;
}
public int read(long, byte[], int, int) throws java.io.IOException
{
byte[] v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
int v, v, v;
long v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: long;
v := @parameter: byte[];
v := @parameter: int;
v := @parameter: int;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.fs.PositionedReadable: int read(long,byte[],int,int)>(v, v, v, v);
return v;
}
public void readFully(long, byte[], int, int) throws java.io.IOException
{
byte[] v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
int v, v;
long v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: long;
v := @parameter: byte[];
v := @parameter: int;
v := @parameter: int;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
interfaceinvoke v.<oadd.org.apache.hadoop.fs.PositionedReadable: void readFully(long,byte[],int,int)>(v, v, v, v);
return;
}
public void readFully(long, byte[]) throws java.io.IOException
{
byte[] v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
int v;
long v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: long;
v := @parameter: byte[];
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = lengthof v;
interfaceinvoke v.<oadd.org.apache.hadoop.fs.PositionedReadable: void readFully(long,byte[],int,int)>(v, v, 0, v);
return;
}
public boolean seekToNewSource(long) throws java.io.IOException
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
long v;
boolean v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: long;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.fs.Seekable: boolean seekToNewSource(long)>(v);
return v;
}
public java.io.InputStream getWrappedStream()
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
return v;
}
public int read(java.nio.ByteBuffer) throws java.io.IOException
{
java.lang.UnsupportedOperationException v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
int v;
java.nio.ByteBuffer v;
boolean v;
java.io.InputStream v, v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: java.nio.ByteBuffer;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = v instanceof oadd.org.apache.hadoop.fs.ByteBufferReadable;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.fs.ByteBufferReadable: int read(java.nio.ByteBuffer)>(v);
return v;
label:
v = new java.lang.UnsupportedOperationException;
specialinvoke v.<java.lang.UnsupportedOperationException: void <init>(java.lang.String)>("Byte-buffer read unsupported by input stream");
throw v;
}
public java.io.FileDescriptor getFileDescriptor() throws java.io.IOException
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.io.FileDescriptor v, v;
boolean v, v;
java.io.InputStream v, v, v, v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = v instanceof oadd.org.apache.hadoop.fs.HasFileDescriptor;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.fs.HasFileDescriptor: java.io.FileDescriptor getFileDescriptor()>();
return v;
label:
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = v instanceof java.io.FileInputStream;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = virtualinvoke v.<java.io.FileInputStream: java.io.FileDescriptor getFD()>();
return v;
label:
return null;
}
public void setReadahead(java.lang.Long) throws java.io.IOException, java.lang.UnsupportedOperationException
{
java.lang.UnsupportedOperationException v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.lang.Long v;
java.lang.ClassCastException v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: java.lang.Long;
label:
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
interfaceinvoke v.<oadd.org.apache.hadoop.fs.CanSetReadahead: void setReadahead(java.lang.Long)>(v);
label:
goto label;
label:
v := @caughtexception;
v = new java.lang.UnsupportedOperationException;
specialinvoke v.<java.lang.UnsupportedOperationException: void <init>(java.lang.String)>("this stream does not support setting the readahead caching strategy.");
throw v;
label:
return;
catch java.lang.ClassCastException from label to label with label;
}
public void setDropBehind(java.lang.Boolean) throws java.io.IOException, java.lang.UnsupportedOperationException
{
java.lang.UnsupportedOperationException v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.lang.Boolean v;
java.lang.ClassCastException v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: java.lang.Boolean;
label:
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
interfaceinvoke v.<oadd.org.apache.hadoop.fs.CanSetDropBehind: void setDropBehind(java.lang.Boolean)>(v);
label:
goto label;
label:
v := @caughtexception;
v = new java.lang.UnsupportedOperationException;
specialinvoke v.<java.lang.UnsupportedOperationException: void <init>(java.lang.String)>("this stream does not support setting the drop-behind caching setting.");
throw v;
label:
return;
catch java.lang.ClassCastException from label to label with label;
}
public java.nio.ByteBuffer read(oadd.org.apache.hadoop.io.ByteBufferPool, int, java.util.EnumSet) throws java.io.IOException, java.lang.UnsupportedOperationException
{
oadd.org.apache.hadoop.io.ByteBufferPool v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.lang.ClassCastException v;
java.nio.ByteBuffer v, v;
int v;
oadd.org.apache.hadoop.util.IdentityHashStore v;
java.util.EnumSet v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: oadd.org.apache.hadoop.io.ByteBufferPool;
v := @parameter: int;
v := @parameter: java.util.EnumSet;
label:
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.fs.HasEnhancedByteBufferAccess: java.nio.ByteBuffer read(oadd.org.apache.hadoop.io.ByteBufferPool,int,java.util.EnumSet)>(v, v, v);
label:
return v;
label:
v := @caughtexception;
v = staticinvoke <oadd.org.apache.hadoop.fs.ByteBufferUtil: java.nio.ByteBuffer fallbackRead(java.io.InputStream,oadd.org.apache.hadoop.io.ByteBufferPool,int)>(v, v, v);
if v == null goto label;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: oadd.org.apache.hadoop.util.IdentityHashStore extendedReadBuffers>;
virtualinvoke v.<oadd.org.apache.hadoop.util.IdentityHashStore: void put(java.lang.Object,java.lang.Object)>(v, v);
label:
return v;
catch java.lang.ClassCastException from label to label with label;
}
public final java.nio.ByteBuffer read(oadd.org.apache.hadoop.io.ByteBufferPool, int) throws java.io.IOException, java.lang.UnsupportedOperationException
{
oadd.org.apache.hadoop.io.ByteBufferPool v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
int v;
java.nio.ByteBuffer v;
java.util.EnumSet v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: oadd.org.apache.hadoop.io.ByteBufferPool;
v := @parameter: int;
v = <oadd.org.apache.hadoop.fs.FSDataInputStream: java.util.EnumSet EMPTY_READ_OPTIONS_SET>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.nio.ByteBuffer read(oadd.org.apache.hadoop.io.ByteBufferPool,int,java.util.EnumSet)>(v, v, v);
return v;
}
public void releaseBuffer(java.nio.ByteBuffer)
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.lang.Object v;
java.lang.IllegalArgumentException v;
java.lang.ClassCastException v;
oadd.org.apache.hadoop.util.IdentityHashStore v;
java.nio.ByteBuffer v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: java.nio.ByteBuffer;
label:
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
interfaceinvoke v.<oadd.org.apache.hadoop.fs.HasEnhancedByteBufferAccess: void releaseBuffer(java.nio.ByteBuffer)>(v);
label:
goto label;
label:
v := @caughtexception;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: oadd.org.apache.hadoop.util.IdentityHashStore extendedReadBuffers>;
v = virtualinvoke v.<oadd.org.apache.hadoop.util.IdentityHashStore: java.lang.Object remove(java.lang.Object)>(v);
if v != null goto label;
v = new java.lang.IllegalArgumentException;
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String)>("tried to release a buffer that was not created by this stream.");
throw v;
label:
interfaceinvoke v.<oadd.org.apache.hadoop.io.ByteBufferPool: void putBuffer(java.nio.ByteBuffer)>(v);
label:
return;
catch java.lang.ClassCastException from label to label with label;
}
public void unbuffer()
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
staticinvoke <oadd.org.apache.hadoop.fs.StreamCapabilitiesPolicy: void unbuffer(java.io.InputStream)>(v);
return;
}
public boolean hasCapability(java.lang.String)
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.lang.String v;
boolean v, v;
java.io.InputStream v, v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: java.lang.String;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = v instanceof oadd.org.apache.hadoop.fs.StreamCapabilities;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.fs.StreamCapabilities: boolean hasCapability(java.lang.String)>(v);
return v;
label:
return 0;
}
public java.lang.String toString()
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
java.lang.String v, v;
java.io.InputStream v;
v := @this: oadd.org.apache.hadoop.fs.FSDataInputStream;
v = specialinvoke v.<java.lang.Object: java.lang.String toString()>();
v = v.<oadd.org.apache.hadoop.fs.FSDataInputStream: java.io.InputStream in>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.io.InputStream)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u0001: \u0001");
return v;
}
static void <clinit>()
{
java.util.EnumSet v;
v = staticinvoke <java.util.EnumSet: java.util.EnumSet noneOf(java.lang.Class)>(class "Loadd/org/apache/hadoop/fs/ReadOption;");
<oadd.org.apache.hadoop.fs.FSDataInputStream: java.util.EnumSet EMPTY_READ_OPTIONS_SET> = v;
return;
}
}