public class oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream extends java.io.InputStream
{
private int pos;
private byte[] buffer;
private java.io.ByteArrayOutputStream output;
private oadd.org.apache.avro.file.FileReader fileReader;
private oadd.org.apache.avro.io.DatumWriter writer;
private oadd.org.apache.avro.io.JsonEncoder encoder;
public void <init>(oadd.org.apache.hadoop.fs.FileStatus) throws java.io.IOException
{
byte[] v;
oadd.org.apache.hadoop.fs.FileContext v;
java.io.ByteArrayOutputStream v, v;
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.avro.file.FileReader v, v;
oadd.org.apache.avro.Schema v;
oadd.org.apache.avro.generic.GenericDatumReader v;
oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream v;
oadd.org.apache.avro.generic.GenericDatumWriter v;
oadd.org.apache.hadoop.fs.FileStatus v;
oadd.org.apache.hadoop.fs.AvroFSInput v;
oadd.org.apache.avro.io.JsonEncoder v;
oadd.org.apache.hadoop.fs.Path v;
oadd.org.apache.avro.io.EncoderFactory v;
v := @this: oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream;
v := @parameter: oadd.org.apache.hadoop.fs.FileStatus;
specialinvoke v.<java.io.InputStream: void <init>()>();
v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: int pos> = 0;
v = newarray (byte)[0];
v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: byte[] buffer> = v;
v = new oadd.org.apache.avro.generic.GenericDatumReader;
specialinvoke v.<oadd.org.apache.avro.generic.GenericDatumReader: void <init>()>();
v = new oadd.org.apache.hadoop.conf.Configuration;
specialinvoke v.<oadd.org.apache.hadoop.conf.Configuration: void <init>()>();
v = staticinvoke <oadd.org.apache.hadoop.fs.FileContext: oadd.org.apache.hadoop.fs.FileContext getFileContext(oadd.org.apache.hadoop.conf.Configuration)>(v);
v = new oadd.org.apache.hadoop.fs.AvroFSInput;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FileStatus: oadd.org.apache.hadoop.fs.Path getPath()>();
specialinvoke v.<oadd.org.apache.hadoop.fs.AvroFSInput: void <init>(oadd.org.apache.hadoop.fs.FileContext,oadd.org.apache.hadoop.fs.Path)>(v, v);
v = staticinvoke <oadd.org.apache.avro.file.DataFileReader: oadd.org.apache.avro.file.FileReader openReader(oadd.org.apache.avro.file.SeekableInput,oadd.org.apache.avro.io.DatumReader)>(v, v);
v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: oadd.org.apache.avro.file.FileReader fileReader> = v;
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: oadd.org.apache.avro.file.FileReader fileReader>;
v = interfaceinvoke v.<oadd.org.apache.avro.file.FileReader: oadd.org.apache.avro.Schema getSchema()>();
v = new oadd.org.apache.avro.generic.GenericDatumWriter;
specialinvoke v.<oadd.org.apache.avro.generic.GenericDatumWriter: void <init>(oadd.org.apache.avro.Schema)>(v);
v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: oadd.org.apache.avro.io.DatumWriter writer> = v;
v = new java.io.ByteArrayOutputStream;
specialinvoke v.<java.io.ByteArrayOutputStream: void <init>()>();
v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: java.io.ByteArrayOutputStream output> = v;
v = staticinvoke <oadd.org.apache.avro.io.EncoderFactory: oadd.org.apache.avro.io.EncoderFactory get()>();
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: java.io.ByteArrayOutputStream output>;
v = virtualinvoke v.<oadd.org.apache.avro.io.EncoderFactory: oadd.org.apache.avro.io.JsonEncoder jsonEncoder(oadd.org.apache.avro.Schema,java.io.OutputStream)>(v, v);
v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: oadd.org.apache.avro.io.JsonEncoder encoder> = v;
return;
}
public int read() throws java.io.IOException
{
byte[] v, v, v, v;
java.io.ByteArrayOutputStream v, v, v, v;
oadd.org.apache.avro.io.DatumWriter v;
byte v;
java.nio.charset.Charset v;
int v, v, v, v, v, v;
java.lang.String v;
boolean v, v;
oadd.org.apache.avro.file.FileReader v, v, v;
oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream v;
oadd.org.apache.avro.io.JsonEncoder v, v;
java.lang.Object v;
v := @this: oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream;
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: int pos>;
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: byte[] buffer>;
v = lengthof v;
if v >= v goto label;
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: byte[] buffer>;
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: int pos>;
v = v + 1;
v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: int pos> = v;
v = v[v];
return v;
label:
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: oadd.org.apache.avro.file.FileReader fileReader>;
v = interfaceinvoke v.<oadd.org.apache.avro.file.FileReader: boolean hasNext()>();
if v != 0 goto label;
v = (int) -1;
return v;
label:
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: oadd.org.apache.avro.io.DatumWriter writer>;
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: oadd.org.apache.avro.file.FileReader fileReader>;
v = interfaceinvoke v.<oadd.org.apache.avro.file.FileReader: java.lang.Object next()>();
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: oadd.org.apache.avro.io.JsonEncoder encoder>;
interfaceinvoke v.<oadd.org.apache.avro.io.DatumWriter: void write(java.lang.Object,oadd.org.apache.avro.io.Encoder)>(v, v);
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: oadd.org.apache.avro.io.JsonEncoder encoder>;
virtualinvoke v.<oadd.org.apache.avro.io.JsonEncoder: void flush()>();
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: oadd.org.apache.avro.file.FileReader fileReader>;
v = interfaceinvoke v.<oadd.org.apache.avro.file.FileReader: boolean hasNext()>();
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: java.io.ByteArrayOutputStream output>;
v = staticinvoke <java.lang.System: java.lang.String getProperty(java.lang.String)>("line.separator");
v = <java.nio.charset.StandardCharsets: java.nio.charset.Charset UTF_8>;
v = virtualinvoke v.<java.lang.String: byte[] getBytes(java.nio.charset.Charset)>(v);
virtualinvoke v.<java.io.ByteArrayOutputStream: void write(byte[])>(v);
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: java.io.ByteArrayOutputStream output>;
virtualinvoke v.<java.io.ByteArrayOutputStream: void flush()>();
label:
v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: int pos> = 0;
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: java.io.ByteArrayOutputStream output>;
v = virtualinvoke v.<java.io.ByteArrayOutputStream: byte[] toByteArray()>();
v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: byte[] buffer> = v;
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: java.io.ByteArrayOutputStream output>;
virtualinvoke v.<java.io.ByteArrayOutputStream: void reset()>();
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: int read()>();
return v;
}
public void close() throws java.io.IOException
{
java.io.ByteArrayOutputStream v;
oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream v;
oadd.org.apache.avro.file.FileReader v;
v := @this: oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream;
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: oadd.org.apache.avro.file.FileReader fileReader>;
interfaceinvoke v.<oadd.org.apache.avro.file.FileReader: void close()>();
v = v.<oadd.org.apache.hadoop.fs.shell.Display$AvroFileInputStream: java.io.ByteArrayOutputStream output>;
virtualinvoke v.<java.io.ByteArrayOutputStream: void close()>();
specialinvoke v.<java.io.InputStream: void close()>();
return;
}
}