public class oadd.org.apache.hadoop.io.SequenceFile$Reader extends java.lang.Object implements java.io.Closeable
{
private java.lang.String filename;
private oadd.org.apache.hadoop.fs.FSDataInputStream in;
private oadd.org.apache.hadoop.io.DataOutputBuffer outBuf;
private byte version;
private java.lang.String keyClassName;
private java.lang.String valClassName;
private java.lang.Class keyClass;
private java.lang.Class valClass;
private oadd.org.apache.hadoop.io.compress.CompressionCodec codec;
private oadd.org.apache.hadoop.io.SequenceFile$Metadata metadata;
private byte[] sync;
private byte[] syncCheck;
private boolean syncSeen;
private long headerEnd;
private long end;
private int keyLength;
private int recordLength;
private boolean decompress;
private boolean blockCompressed;
private oadd.org.apache.hadoop.conf.Configuration conf;
private int noBufferedRecords;
private boolean lazyDecompress;
private boolean valuesDecompressed;
private int noBufferedKeys;
private int noBufferedValues;
private oadd.org.apache.hadoop.io.DataInputBuffer keyLenBuffer;
private oadd.org.apache.hadoop.io.compress.CompressionInputStream keyLenInFilter;
private java.io.DataInputStream keyLenIn;
private oadd.org.apache.hadoop.io.compress.Decompressor keyLenDecompressor;
private oadd.org.apache.hadoop.io.DataInputBuffer keyBuffer;
private oadd.org.apache.hadoop.io.compress.CompressionInputStream keyInFilter;
private java.io.DataInputStream keyIn;
private oadd.org.apache.hadoop.io.compress.Decompressor keyDecompressor;
private oadd.org.apache.hadoop.io.DataInputBuffer valLenBuffer;
private oadd.org.apache.hadoop.io.compress.CompressionInputStream valLenInFilter;
private java.io.DataInputStream valLenIn;
private oadd.org.apache.hadoop.io.compress.Decompressor valLenDecompressor;
private oadd.org.apache.hadoop.io.DataInputBuffer valBuffer;
private oadd.org.apache.hadoop.io.compress.CompressionInputStream valInFilter;
private java.io.DataInputStream valIn;
private oadd.org.apache.hadoop.io.compress.Decompressor valDecompressor;
private oadd.org.apache.hadoop.io.serializer.Deserializer keyDeserializer;
private oadd.org.apache.hadoop.io.serializer.Deserializer valDeserializer;
public static oadd.org.apache.hadoop.io.SequenceFile$Reader$Option file(oadd.org.apache.hadoop.fs.Path)
{
oadd.org.apache.hadoop.io.SequenceFile$Reader$FileOption v;
oadd.org.apache.hadoop.fs.Path v;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v = new oadd.org.apache.hadoop.io.SequenceFile$Reader$FileOption;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader$FileOption: void <init>(oadd.org.apache.hadoop.fs.Path)>(v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Reader$Option stream(oadd.org.apache.hadoop.fs.FSDataInputStream)
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
oadd.org.apache.hadoop.io.SequenceFile$Reader$InputStreamOption v;
v := @parameter: oadd.org.apache.hadoop.fs.FSDataInputStream;
v = new oadd.org.apache.hadoop.io.SequenceFile$Reader$InputStreamOption;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader$InputStreamOption: void <init>(oadd.org.apache.hadoop.fs.FSDataInputStream)>(v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Reader$Option start(long)
{
oadd.org.apache.hadoop.io.SequenceFile$Reader$StartOption v;
long v;
v := @parameter: long;
v = new oadd.org.apache.hadoop.io.SequenceFile$Reader$StartOption;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader$StartOption: void <init>(long)>(v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Reader$Option length(long)
{
long v;
oadd.org.apache.hadoop.io.SequenceFile$Reader$LengthOption v;
v := @parameter: long;
v = new oadd.org.apache.hadoop.io.SequenceFile$Reader$LengthOption;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader$LengthOption: void <init>(long)>(v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Reader$Option bufferSize(int)
{
int v;
oadd.org.apache.hadoop.io.SequenceFile$Reader$BufferSizeOption v;
v := @parameter: int;
v = new oadd.org.apache.hadoop.io.SequenceFile$Reader$BufferSizeOption;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader$BufferSizeOption: void <init>(int)>(v);
return v;
}
public transient void <init>(oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.io.SequenceFile$Reader$Option[]) throws java.io.IOException
{
byte[] v, v;
long v, v, v, v, v, v;
oadd.org.apache.hadoop.io.SequenceFile$Reader$Option[] v;
oadd.org.apache.hadoop.io.DataOutputBuffer v;
int v, v;
oadd.org.apache.hadoop.conf.Configuration v;
boolean v, v, v;
oadd.org.apache.hadoop.fs.FSDataInputStream v, v;
java.lang.IllegalArgumentException v, v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
oadd.org.apache.hadoop.fs.FileStatus v;
java.lang.Object v, v, v, v, v, v;
oadd.org.apache.hadoop.fs.Path v, v;
oadd.org.apache.hadoop.fs.FileSystem v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$Reader$Option[];
specialinvoke v.<java.lang.Object: void <init>()>();
v = new oadd.org.apache.hadoop.io.DataOutputBuffer;
specialinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: void <init>()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataOutputBuffer outBuf> = v;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.SequenceFile$Metadata metadata> = null;
v = newarray (byte)[16];
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] sync> = v;
v = newarray (byte)[16];
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] syncCheck> = v;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedRecords> = 0;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean lazyDecompress> = 1;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean valuesDecompressed> = 1;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys> = 0;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues> = 0;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer keyLenBuffer> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream keyLenInFilter> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyLenIn> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor keyLenDecompressor> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer keyBuffer> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream keyInFilter> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyIn> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor keyDecompressor> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valLenBuffer> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream valLenInFilter> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valLenIn> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor valLenDecompressor> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream valInFilter> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor valDecompressor> = null;
v = staticinvoke <oadd.org.apache.hadoop.util.Options: java.lang.Object getOption(java.lang.Class,java.lang.Object[])>(class "Loadd/org/apache/hadoop/io/SequenceFile$Reader$FileOption;", v);
v = staticinvoke <oadd.org.apache.hadoop.util.Options: java.lang.Object getOption(java.lang.Class,java.lang.Object[])>(class "Loadd/org/apache/hadoop/io/SequenceFile$Reader$InputStreamOption;", v);
v = staticinvoke <oadd.org.apache.hadoop.util.Options: java.lang.Object getOption(java.lang.Class,java.lang.Object[])>(class "Loadd/org/apache/hadoop/io/SequenceFile$Reader$StartOption;", v);
v = staticinvoke <oadd.org.apache.hadoop.util.Options: java.lang.Object getOption(java.lang.Class,java.lang.Object[])>(class "Loadd/org/apache/hadoop/io/SequenceFile$Reader$LengthOption;", v);
v = staticinvoke <oadd.org.apache.hadoop.util.Options: java.lang.Object getOption(java.lang.Class,java.lang.Object[])>(class "Loadd/org/apache/hadoop/io/SequenceFile$Reader$BufferSizeOption;", v);
v = staticinvoke <oadd.org.apache.hadoop.util.Options: java.lang.Object getOption(java.lang.Class,java.lang.Object[])>(class "Loadd/org/apache/hadoop/io/SequenceFile$Reader$OnlyHeaderOption;", v);
if v != null goto label;
v = 1;
goto label;
label:
v = 0;
label:
if v != null goto label;
v = 1;
goto label;
label:
v = 0;
label:
if v != v goto label;
v = new java.lang.IllegalArgumentException;
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String)>("File or stream option must be specified");
throw v;
label:
if v != null goto label;
if v == null goto label;
v = new java.lang.IllegalArgumentException;
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String)>("buffer size can only be set when a file is specified.");
throw v;
label:
v = null;
if v == null goto label;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader$FileOption: oadd.org.apache.hadoop.fs.Path getValue()>();
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.Path: oadd.org.apache.hadoop.fs.FileSystem getFileSystem(oadd.org.apache.hadoop.conf.Configuration)>(v);
if v != null goto label;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: int getBufferSize(oadd.org.apache.hadoop.conf.Configuration)>(v);
goto label;
label:
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader$BufferSizeOption: int getValue()>();
label:
v = v;
if null != v goto label;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FileSystem: oadd.org.apache.hadoop.fs.FileStatus getFileStatus(oadd.org.apache.hadoop.fs.Path)>(v);
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FileStatus: long getLen()>();
goto label;
label:
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader$LengthOption: long getValue()>();
label:
v = v;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream openFile(oadd.org.apache.hadoop.fs.FileSystem,oadd.org.apache.hadoop.fs.Path,int,long)>(v, v, v, v);
goto label;
label:
if null != v goto label;
v = 9223372036854775807L;
goto label;
label:
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader$LengthOption: long getValue()>();
label:
v = v;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader$InputStreamOption: oadd.org.apache.hadoop.fs.FSDataInputStream getValue()>();
label:
if v != null goto label;
v = 0L;
goto label;
label:
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader$StartOption: long getValue()>();
label:
v = v;
v = v;
v = v;
v = v;
if v == null goto label;
v = 1;
goto label;
label:
v = 0;
label:
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void initialize(oadd.org.apache.hadoop.fs.Path,oadd.org.apache.hadoop.fs.FSDataInputStream,long,long,oadd.org.apache.hadoop.conf.Configuration,boolean)>(v, v, v, v, v, v);
return;
}
public void <init>(oadd.org.apache.hadoop.fs.FileSystem, oadd.org.apache.hadoop.fs.Path, oadd.org.apache.hadoop.conf.Configuration) throws java.io.IOException
{
oadd.org.apache.hadoop.io.SequenceFile$Reader$Option[] v;
oadd.org.apache.hadoop.io.SequenceFile$Reader$Option v;
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.hadoop.fs.Path v, v;
oadd.org.apache.hadoop.fs.FileSystem v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.fs.FileSystem;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Reader$Option)[1];
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FileSystem: oadd.org.apache.hadoop.fs.Path makeQualified(oadd.org.apache.hadoop.fs.Path)>(v);
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.SequenceFile$Reader$Option file(oadd.org.apache.hadoop.fs.Path)>(v);
v[0] = v;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void <init>(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Reader$Option[])>(v, v);
return;
}
public void <init>(oadd.org.apache.hadoop.fs.FSDataInputStream, int, long, long, oadd.org.apache.hadoop.conf.Configuration) throws java.io.IOException
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
oadd.org.apache.hadoop.io.SequenceFile$Reader$Option v, v, v;
long v, v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
oadd.org.apache.hadoop.io.SequenceFile$Reader$Option[] v;
int v;
oadd.org.apache.hadoop.conf.Configuration v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: int;
v := @parameter: long;
v := @parameter: long;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Reader$Option)[3];
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.SequenceFile$Reader$Option stream(oadd.org.apache.hadoop.fs.FSDataInputStream)>(v);
v[0] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.SequenceFile$Reader$Option start(long)>(v);
v[1] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.SequenceFile$Reader$Option length(long)>(v);
v[2] = v;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void <init>(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Reader$Option[])>(v, v);
return;
}
private void initialize(oadd.org.apache.hadoop.fs.Path, oadd.org.apache.hadoop.fs.FSDataInputStream, long, long, oadd.org.apache.hadoop.conf.Configuration, boolean) throws java.io.IOException
{
java.lang.Throwable v;
long v, v, v, v, v;
byte v;
java.io.Closeable[] v;
oadd.org.apache.hadoop.conf.Configuration v;
java.lang.String v;
boolean v;
oadd.org.apache.hadoop.fs.FSDataInputStream v, v, v;
org.slf4j.Logger v;
java.lang.IllegalArgumentException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
oadd.org.apache.hadoop.fs.Path v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: oadd.org.apache.hadoop.fs.FSDataInputStream;
v := @parameter: long;
v := @parameter: long;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: boolean;
if v != null goto label;
v = new java.lang.IllegalArgumentException;
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String)>("in == null");
throw v;
label:
if v != null goto label;
v = "<unknown>";
goto label;
label:
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.Path: java.lang.String toString()>();
label:
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.String filename> = v;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in> = v;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf> = v;
label:
virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void seek(long)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: long getPos()>();
v = v + v;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long end> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long end>;
v = v cmp v;
if v >= 0 goto label;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long end> = 9223372036854775807L;
label:
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void init(boolean)>(v);
label:
goto label;
label:
v := @caughtexception;
v = <oadd.org.apache.hadoop.io.SequenceFile: org.slf4j.Logger LOG>;
v = newarray (java.io.Closeable)[1];
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v[0] = v;
staticinvoke <oadd.org.apache.hadoop.io.IOUtils: void cleanupWithLogger(org.slf4j.Logger,java.io.Closeable[])>(v, v);
throw v;
label:
return;
catch java.lang.Throwable from label to label with label;
}
protected oadd.org.apache.hadoop.fs.FSDataInputStream openFile(oadd.org.apache.hadoop.fs.FileSystem, oadd.org.apache.hadoop.fs.Path, int, long) throws java.io.IOException
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
int v;
long v;
oadd.org.apache.hadoop.fs.Path v;
oadd.org.apache.hadoop.fs.FileSystem v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.fs.FileSystem;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: int;
v := @parameter: long;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FileSystem: oadd.org.apache.hadoop.fs.FSDataInputStream open(oadd.org.apache.hadoop.fs.Path,int)>(v, v);
return v;
}
private void init(boolean) throws java.io.IOException
{
byte[] v, v, v, v, v, v, v, v;
byte v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v;
oadd.org.apache.hadoop.conf.Configuration v, v, v, v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
oadd.org.apache.hadoop.io.compress.CompressionInputStream v, v, v, v, v, v, v, v;
oadd.org.apache.hadoop.io.DataInputBuffer v, v, v, v, v, v, v, v, v, v;
long v;
oadd.org.apache.hadoop.io.UTF8 v;
java.io.IOException v, v, v;
java.io.DataInputStream v, v, v, v, v, v;
oadd.org.apache.hadoop.io.serializer.Deserializer v, v, v, v, v, v, v;
oadd.org.apache.hadoop.io.serializer.SerializationFactory v;
oadd.org.apache.hadoop.io.VersionMismatchException v;
int v;
java.lang.Class v, v, v, v, v, v;
java.lang.Object v;
java.lang.String v, v, v, v, v, v, v, v, v, v, v, v;
oadd.org.apache.hadoop.fs.FSDataInputStream v, v, v, v, v, v, v, v, v, v, v;
oadd.org.apache.hadoop.io.compress.DefaultCodec v;
java.lang.ClassNotFoundException v;
boolean v, v, v, v, v, v, v;
oadd.org.apache.hadoop.io.compress.CompressionCodec v, v, v, v, v, v, v, v, v;
java.io.EOFException v, v;
oadd.org.apache.hadoop.io.compress.Decompressor v, v, v, v, v, v, v, v;
oadd.org.apache.hadoop.io.SequenceFile$Metadata v, v;
java.lang.IllegalArgumentException v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: boolean;
v = <oadd.org.apache.hadoop.io.SequenceFile: byte[] VERSION>;
v = lengthof v;
v = newarray (byte)[v];
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (oadd.org.apache.hadoop.io.SequenceFile$Reader)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u not a SequenceFile");
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: void readFully(byte[])>(v);
label:
goto label;
label:
v := @caughtexception;
v = new java.io.EOFException;
specialinvoke v.<java.io.EOFException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v[0];
v = <oadd.org.apache.hadoop.io.SequenceFile: byte[] VERSION>;
v = v[0];
if v != v goto label;
v = v[1];
v = <oadd.org.apache.hadoop.io.SequenceFile: byte[] VERSION>;
v = v[1];
if v != v goto label;
v = v[2];
v = <oadd.org.apache.hadoop.io.SequenceFile: byte[] VERSION>;
v = v[2];
if v == v goto label;
label:
v = new java.io.IOException;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (oadd.org.apache.hadoop.io.SequenceFile$Reader)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u not a SequenceFile");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v[3];
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte version> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte version>;
v = <oadd.org.apache.hadoop.io.SequenceFile: byte[] VERSION>;
v = v[3];
if v <= v goto label;
v = new oadd.org.apache.hadoop.io.VersionMismatchException;
v = <oadd.org.apache.hadoop.io.SequenceFile: byte[] VERSION>;
v = v[3];
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte version>;
specialinvoke v.<oadd.org.apache.hadoop.io.VersionMismatchException: void <init>(byte,byte)>(v, v);
throw v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte version>;
if v >= 4 goto label;
v = new oadd.org.apache.hadoop.io.UTF8;
specialinvoke v.<oadd.org.apache.hadoop.io.UTF8: void <init>()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
virtualinvoke v.<oadd.org.apache.hadoop.io.UTF8: void readFields(java.io.DataInput)>(v);
v = virtualinvoke v.<oadd.org.apache.hadoop.io.UTF8: java.lang.String toStringChecked()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.String keyClassName> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
virtualinvoke v.<oadd.org.apache.hadoop.io.UTF8: void readFields(java.io.DataInput)>(v);
v = virtualinvoke v.<oadd.org.apache.hadoop.io.UTF8: java.lang.String toStringChecked()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.String valClassName> = v;
goto label;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = staticinvoke <oadd.org.apache.hadoop.io.Text: java.lang.String readString(java.io.DataInput)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.String keyClassName> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = staticinvoke <oadd.org.apache.hadoop.io.Text: java.lang.String readString(java.io.DataInput)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.String valClassName> = v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte version>;
if v <= 2 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: boolean readBoolean()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean decompress> = v;
goto label;
label:
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean decompress> = 0;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte version>;
if v < 4 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: boolean readBoolean()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed> = v;
goto label;
label:
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed> = 0;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean decompress>;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte version>;
if v < 5 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = staticinvoke <oadd.org.apache.hadoop.io.Text: java.lang.String readString(java.io.DataInput)>(v);
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf>;
v = virtualinvoke v.<oadd.org.apache.hadoop.conf.Configuration: java.lang.Class getClassByName(java.lang.String)>(v);
v = virtualinvoke v.<java.lang.Class: java.lang.Class asSubclass(java.lang.Class)>(class "Loadd/org/apache/hadoop/io/compress/CompressionCodec;");
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf>;
v = staticinvoke <oadd.org.apache.hadoop.util.ReflectionUtils: java.lang.Object newInstance(java.lang.Class,oadd.org.apache.hadoop.conf.Configuration)>(v, v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec> = v;
label:
goto label;
label:
v := @caughtexception;
v = new java.lang.IllegalArgumentException;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Unknown codec: \u0001");
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String,java.lang.Throwable)>(v, v);
throw v;
label:
v = new oadd.org.apache.hadoop.io.compress.DefaultCodec;
specialinvoke v.<oadd.org.apache.hadoop.io.compress.DefaultCodec: void <init>()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf>;
interfaceinvoke v.<oadd.org.apache.hadoop.conf.Configurable: void setConf(oadd.org.apache.hadoop.conf.Configuration)>(v);
label:
v = new oadd.org.apache.hadoop.io.SequenceFile$Metadata;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Metadata: void <init>()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.SequenceFile$Metadata metadata> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte version>;
if v < 6 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.SequenceFile$Metadata metadata>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Metadata: void readFields(java.io.DataInput)>(v);
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte version>;
if v <= 1 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] sync>;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: void readFully(byte[])>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: long getPos()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long headerEnd> = v;
label:
if v != 0 goto label;
v = new oadd.org.apache.hadoop.io.DataInputBuffer;
specialinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: void <init>()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean decompress>;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec>;
v = staticinvoke <oadd.org.apache.hadoop.io.compress.CodecPool: oadd.org.apache.hadoop.io.compress.Decompressor getDecompressor(oadd.org.apache.hadoop.io.compress.CompressionCodec)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor valDecompressor> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor valDecompressor>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.io.compress.CompressionCodec: oadd.org.apache.hadoop.io.compress.CompressionInputStream createInputStream(java.io.InputStream,oadd.org.apache.hadoop.io.compress.Decompressor)>(v, v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream valInFilter> = v;
v = new java.io.DataInputStream;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream valInFilter>;
specialinvoke v.<java.io.DataInputStream: void <init>(java.io.InputStream)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn> = v;
goto label;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn> = v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v == 0 goto label;
v = new oadd.org.apache.hadoop.io.DataInputBuffer;
specialinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: void <init>()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer keyLenBuffer> = v;
v = new oadd.org.apache.hadoop.io.DataInputBuffer;
specialinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: void <init>()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer keyBuffer> = v;
v = new oadd.org.apache.hadoop.io.DataInputBuffer;
specialinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: void <init>()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valLenBuffer> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec>;
v = staticinvoke <oadd.org.apache.hadoop.io.compress.CodecPool: oadd.org.apache.hadoop.io.compress.Decompressor getDecompressor(oadd.org.apache.hadoop.io.compress.CompressionCodec)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor keyLenDecompressor> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer keyLenBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor keyLenDecompressor>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.io.compress.CompressionCodec: oadd.org.apache.hadoop.io.compress.CompressionInputStream createInputStream(java.io.InputStream,oadd.org.apache.hadoop.io.compress.Decompressor)>(v, v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream keyLenInFilter> = v;
v = new java.io.DataInputStream;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream keyLenInFilter>;
specialinvoke v.<java.io.DataInputStream: void <init>(java.io.InputStream)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyLenIn> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec>;
v = staticinvoke <oadd.org.apache.hadoop.io.compress.CodecPool: oadd.org.apache.hadoop.io.compress.Decompressor getDecompressor(oadd.org.apache.hadoop.io.compress.CompressionCodec)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor keyDecompressor> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer keyBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor keyDecompressor>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.io.compress.CompressionCodec: oadd.org.apache.hadoop.io.compress.CompressionInputStream createInputStream(java.io.InputStream,oadd.org.apache.hadoop.io.compress.Decompressor)>(v, v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream keyInFilter> = v;
v = new java.io.DataInputStream;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream keyInFilter>;
specialinvoke v.<java.io.DataInputStream: void <init>(java.io.InputStream)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyIn> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec>;
v = staticinvoke <oadd.org.apache.hadoop.io.compress.CodecPool: oadd.org.apache.hadoop.io.compress.Decompressor getDecompressor(oadd.org.apache.hadoop.io.compress.CompressionCodec)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor valLenDecompressor> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valLenBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor valLenDecompressor>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.io.compress.CompressionCodec: oadd.org.apache.hadoop.io.compress.CompressionInputStream createInputStream(java.io.InputStream,oadd.org.apache.hadoop.io.compress.Decompressor)>(v, v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream valLenInFilter> = v;
v = new java.io.DataInputStream;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream valLenInFilter>;
specialinvoke v.<java.io.DataInputStream: void <init>(java.io.InputStream)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valLenIn> = v;
label:
v = new oadd.org.apache.hadoop.io.serializer.SerializationFactory;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf>;
specialinvoke v.<oadd.org.apache.hadoop.io.serializer.SerializationFactory: void <init>(oadd.org.apache.hadoop.conf.Configuration)>(v);
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class getKeyClass()>();
v = specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer getDeserializer(oadd.org.apache.hadoop.io.serializer.SerializationFactory,java.lang.Class)>(v, v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer keyDeserializer> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer keyDeserializer>;
if v != null goto label;
v = new java.io.IOException;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class getKeyClass()>();
v = virtualinvoke v.<java.lang.Class: java.lang.String getCanonicalName()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Could not find a deserializer for the Key class: \'\u0001\'. Please ensure that the configuration \'oadd.io.serializations\' is properly configured, if you\'re using custom serialization.");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer keyDeserializer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
interfaceinvoke v.<oadd.org.apache.hadoop.io.serializer.Deserializer: void open(java.io.InputStream)>(v);
goto label;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer keyDeserializer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyIn>;
interfaceinvoke v.<oadd.org.apache.hadoop.io.serializer.Deserializer: void open(java.io.InputStream)>(v);
label:
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class getValueClass()>();
v = specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer getDeserializer(oadd.org.apache.hadoop.io.serializer.SerializationFactory,java.lang.Class)>(v, v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer valDeserializer> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer valDeserializer>;
if v != null goto label;
v = new java.io.IOException;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class getValueClass()>();
v = virtualinvoke v.<java.lang.Class: java.lang.String getCanonicalName()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Could not find a deserializer for the Value class: \'\u0001\'. Please ensure that the configuration \'oadd.io.serializations\' is properly configured, if you\'re using custom serialization.");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer valDeserializer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn>;
interfaceinvoke v.<oadd.org.apache.hadoop.io.serializer.Deserializer: void open(java.io.InputStream)>(v);
label:
return;
catch java.io.EOFException from label to label with label;
catch java.lang.ClassNotFoundException from label to label with label;
}
private oadd.org.apache.hadoop.io.serializer.Deserializer getDeserializer(oadd.org.apache.hadoop.io.serializer.SerializationFactory, java.lang.Class)
{
java.lang.Class v;
oadd.org.apache.hadoop.io.serializer.Deserializer v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
oadd.org.apache.hadoop.io.serializer.SerializationFactory v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.io.serializer.SerializationFactory;
v := @parameter: java.lang.Class;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.serializer.SerializationFactory: oadd.org.apache.hadoop.io.serializer.Deserializer getDeserializer(java.lang.Class)>(v);
return v;
}
public synchronized void close() throws java.io.IOException
{
oadd.org.apache.hadoop.io.compress.Decompressor v, v, v, v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
oadd.org.apache.hadoop.io.serializer.Deserializer v, v, v, v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor keyLenDecompressor>;
staticinvoke <oadd.org.apache.hadoop.io.compress.CodecPool: void returnDecompressor(oadd.org.apache.hadoop.io.compress.Decompressor)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor keyDecompressor>;
staticinvoke <oadd.org.apache.hadoop.io.compress.CodecPool: void returnDecompressor(oadd.org.apache.hadoop.io.compress.Decompressor)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor valLenDecompressor>;
staticinvoke <oadd.org.apache.hadoop.io.compress.CodecPool: void returnDecompressor(oadd.org.apache.hadoop.io.compress.Decompressor)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor valDecompressor>;
staticinvoke <oadd.org.apache.hadoop.io.compress.CodecPool: void returnDecompressor(oadd.org.apache.hadoop.io.compress.Decompressor)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor keyDecompressor> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor keyLenDecompressor> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor valDecompressor> = null;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.Decompressor valLenDecompressor> = null;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer keyDeserializer>;
if v == null goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer keyDeserializer>;
interfaceinvoke v.<oadd.org.apache.hadoop.io.serializer.Deserializer: void close()>();
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer valDeserializer>;
if v == null goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer valDeserializer>;
interfaceinvoke v.<oadd.org.apache.hadoop.io.serializer.Deserializer: void close()>();
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: void close()>();
return;
}
public java.lang.String getKeyClassName()
{
java.lang.String v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.String keyClassName>;
return v;
}
public synchronized java.lang.Class getKeyClass()
{
java.io.IOException v;
java.lang.RuntimeException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
java.lang.Class v, v, v;
oadd.org.apache.hadoop.conf.Configuration v;
java.lang.String v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class keyClass>;
if null != v goto label;
label:
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.String getKeyClassName()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableName: java.lang.Class getClass(java.lang.String,oadd.org.apache.hadoop.conf.Configuration)>(v, v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class keyClass> = v;
label:
goto label;
label:
v := @caughtexception;
v = new java.lang.RuntimeException;
specialinvoke v.<java.lang.RuntimeException: void <init>(java.lang.Throwable)>(v);
throw v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class keyClass>;
return v;
catch java.io.IOException from label to label with label;
}
public java.lang.String getValueClassName()
{
java.lang.String v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.String valClassName>;
return v;
}
public synchronized java.lang.Class getValueClass()
{
java.io.IOException v;
java.lang.RuntimeException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
java.lang.Class v, v, v;
oadd.org.apache.hadoop.conf.Configuration v;
java.lang.String v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class valClass>;
if null != v goto label;
label:
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.String getValueClassName()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableName: java.lang.Class getClass(java.lang.String,oadd.org.apache.hadoop.conf.Configuration)>(v, v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class valClass> = v;
label:
goto label;
label:
v := @caughtexception;
v = new java.lang.RuntimeException;
specialinvoke v.<java.lang.RuntimeException: void <init>(java.lang.Throwable)>(v);
throw v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class valClass>;
return v;
catch java.io.IOException from label to label with label;
}
public boolean isCompressed()
{
boolean v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean decompress>;
return v;
}
public boolean isBlockCompressed()
{
boolean v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
return v;
}
public oadd.org.apache.hadoop.io.compress.CompressionCodec getCompressionCodec()
{
oadd.org.apache.hadoop.io.compress.CompressionCodec v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec>;
return v;
}
private byte[] getSync()
{
byte[] v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] sync>;
return v;
}
private byte getVersion()
{
byte v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte version>;
return v;
}
public oadd.org.apache.hadoop.io.SequenceFile$CompressionType getCompressionType()
{
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v, v;
boolean v, v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean decompress>;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v == 0 goto label;
v = <oadd.org.apache.hadoop.io.SequenceFile$CompressionType: oadd.org.apache.hadoop.io.SequenceFile$CompressionType BLOCK>;
goto label;
label:
v = <oadd.org.apache.hadoop.io.SequenceFile$CompressionType: oadd.org.apache.hadoop.io.SequenceFile$CompressionType RECORD>;
label:
return v;
label:
v = <oadd.org.apache.hadoop.io.SequenceFile$CompressionType: oadd.org.apache.hadoop.io.SequenceFile$CompressionType NONE>;
return v;
}
public oadd.org.apache.hadoop.io.SequenceFile$Metadata getMetadata()
{
oadd.org.apache.hadoop.io.SequenceFile$Metadata v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.SequenceFile$Metadata metadata>;
return v;
}
oadd.org.apache.hadoop.conf.Configuration getConf()
{
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf>;
return v;
}
private synchronized void readBuffer(oadd.org.apache.hadoop.io.DataInputBuffer, oadd.org.apache.hadoop.io.compress.CompressionInputStream) throws java.io.IOException
{
java.lang.Throwable v;
byte[] v;
oadd.org.apache.hadoop.fs.FSDataInputStream v, v;
oadd.org.apache.hadoop.io.compress.CompressionInputStream v;
oadd.org.apache.hadoop.io.DataInputBuffer v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
oadd.org.apache.hadoop.io.DataOutputBuffer v;
int v, v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.io.DataInputBuffer;
v := @parameter: oadd.org.apache.hadoop.io.compress.CompressionInputStream;
v = new oadd.org.apache.hadoop.io.DataOutputBuffer;
specialinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: void <init>()>();
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: void write(java.io.DataInput,int)>(v, v);
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: byte[] getData()>();
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: int getLength()>();
virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: void reset(byte[],int,int)>(v, 0, v);
label:
virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: void close()>();
goto label;
label:
v := @caughtexception;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: void close()>();
throw v;
label:
virtualinvoke v.<oadd.org.apache.hadoop.io.compress.CompressionInputStream: void resetState()>();
return;
catch java.lang.Throwable from label to label with label;
}
private synchronized void readBlock() throws java.io.IOException
{
byte[] v, v, v, v;
boolean v, v, v, v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
oadd.org.apache.hadoop.io.compress.CompressionInputStream v, v, v, v;
oadd.org.apache.hadoop.io.DataInputBuffer v, v, v, v;
long v, v, v, v;
int v, v, v, v, v;
oadd.org.apache.hadoop.fs.FSDataInputStream v, v, v, v, v, v, v, v, v;
java.io.IOException v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean lazyDecompress>;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean valuesDecompressed>;
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: long getPos()>();
v = v + v;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: void seek(long)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: long getPos()>();
v = v + v;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: void seek(long)>(v);
label:
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys> = 0;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues> = 0;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedRecords> = 0;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean valuesDecompressed> = 0;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] sync>;
if v == null goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: int readInt()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] syncCheck>;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: void readFully(byte[])>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] sync>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] syncCheck>;
v = staticinvoke <java.util.Arrays: boolean equals(byte[],byte[])>(v, v);
if v != 0 goto label;
v = new java.io.IOException;
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>("File is corrupt!");
throw v;
label:
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean syncSeen> = 1;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedRecords> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer keyLenBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream keyLenInFilter>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void readBuffer(oadd.org.apache.hadoop.io.DataInputBuffer,oadd.org.apache.hadoop.io.compress.CompressionInputStream)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer keyBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream keyInFilter>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void readBuffer(oadd.org.apache.hadoop.io.DataInputBuffer,oadd.org.apache.hadoop.io.compress.CompressionInputStream)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedRecords>;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean lazyDecompress>;
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valLenBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream valLenInFilter>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void readBuffer(oadd.org.apache.hadoop.io.DataInputBuffer,oadd.org.apache.hadoop.io.compress.CompressionInputStream)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream valInFilter>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void readBuffer(oadd.org.apache.hadoop.io.DataInputBuffer,oadd.org.apache.hadoop.io.compress.CompressionInputStream)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedRecords>;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues> = v;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean valuesDecompressed> = 1;
label:
return;
}
private synchronized void seekToCurrentValue() throws java.io.IOException
{
java.io.DataInputStream v, v;
oadd.org.apache.hadoop.io.compress.CompressionInputStream v, v, v;
oadd.org.apache.hadoop.io.DataInputBuffer v, v, v;
int v, v, v, v, v, v, v, v, v;
java.lang.String v;
boolean v, v, v, v;
java.io.IOException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean decompress>;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream valInFilter>;
virtualinvoke v.<oadd.org.apache.hadoop.io.compress.CompressionInputStream: void resetState()>();
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: void reset()>();
goto label;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean lazyDecompress>;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean valuesDecompressed>;
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valLenBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream valLenInFilter>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void readBuffer(oadd.org.apache.hadoop.io.DataInputBuffer,oadd.org.apache.hadoop.io.compress.CompressionInputStream)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionInputStream valInFilter>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void readBuffer(oadd.org.apache.hadoop.io.DataInputBuffer,oadd.org.apache.hadoop.io.compress.CompressionInputStream)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedRecords>;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues> = v;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean valuesDecompressed> = 1;
label:
v = 0;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys>;
v = v + 1;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues>;
label:
if v <= v goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valLenIn>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
v = v + v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues>;
v = v - 1;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues> = v;
v = v - 1;
goto label;
label:
if v <= 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn>;
v = virtualinvoke v.<java.io.DataInputStream: int skipBytes(int)>(v);
if v == v goto label;
v = new java.io.IOException;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Failed to seek to \u0001(th) value!");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
return;
}
public synchronized void getCurrentValue(oadd.org.apache.hadoop.io.Writable) throws java.io.IOException
{
java.io.DataInputStream v, v, v, v, v;
oadd.org.apache.hadoop.io.DataInputBuffer v, v;
oadd.org.apache.hadoop.io.Writable v;
int v, v, v, v, v, v, v, v, v, v, v;
oadd.org.apache.hadoop.conf.Configuration v;
java.lang.String v, v, v;
boolean v, v, v;
org.slf4j.Logger v, v, v;
java.io.IOException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.io.Writable;
v = v instanceof oadd.org.apache.hadoop.conf.Configurable;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf>;
interfaceinvoke v.<oadd.org.apache.hadoop.conf.Configurable: void setConf(oadd.org.apache.hadoop.conf.Configuration)>(v);
label:
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void seekToCurrentValue()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn>;
interfaceinvoke v.<oadd.org.apache.hadoop.io.Writable: void readFields(java.io.DataInput)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn>;
v = virtualinvoke v.<java.io.DataInputStream: int read()>();
if v <= 0 goto label;
v = <oadd.org.apache.hadoop.io.SequenceFile: org.slf4j.Logger LOG>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn>;
v = virtualinvoke v.<java.io.DataInputStream: int available()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("available bytes: \u0001");
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>(v);
v = new java.io.IOException;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: int getPosition()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
v = v - v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: int getLength()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
v = v - v;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (oadd.org.apache.hadoop.io.Writable,int,int)>(v, v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u read \u bytes, should read \u0001");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valLenIn>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn>;
interfaceinvoke v.<oadd.org.apache.hadoop.io.Writable: void readFields(java.io.DataInput)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues>;
v = v - 1;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues> = v;
if v >= 0 goto label;
v = <oadd.org.apache.hadoop.io.SequenceFile: org.slf4j.Logger LOG>;
v = interfaceinvoke v.<org.slf4j.Logger: boolean isDebugEnabled()>();
if v == 0 goto label;
v = <oadd.org.apache.hadoop.io.SequenceFile: org.slf4j.Logger LOG>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (oadd.org.apache.hadoop.io.Writable)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u is a zero-length value");
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String)>(v);
label:
return;
}
public synchronized java.lang.Object getCurrentValue(java.lang.Object) throws java.io.IOException
{
java.io.DataInputStream v, v, v;
oadd.org.apache.hadoop.io.DataInputBuffer v, v;
int v, v, v, v, v, v, v, v, v, v, v;
oadd.org.apache.hadoop.conf.Configuration v;
java.lang.String v, v, v;
boolean v, v, v;
org.slf4j.Logger v, v, v;
java.io.IOException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
java.lang.Object v, v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: java.lang.Object;
v = v instanceof oadd.org.apache.hadoop.conf.Configurable;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf>;
interfaceinvoke v.<oadd.org.apache.hadoop.conf.Configurable: void setConf(oadd.org.apache.hadoop.conf.Configuration)>(v);
label:
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void seekToCurrentValue()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v != 0 goto label;
v = specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Object deserializeValue(java.lang.Object)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn>;
v = virtualinvoke v.<java.io.DataInputStream: int read()>();
if v <= 0 goto label;
v = <oadd.org.apache.hadoop.io.SequenceFile: org.slf4j.Logger LOG>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn>;
v = virtualinvoke v.<java.io.DataInputStream: int available()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("available bytes: \u0001");
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>(v);
v = new java.io.IOException;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: int getPosition()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
v = v - v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: int getLength()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
v = v - v;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.Object,int,int)>(v, v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u read \u bytes, should read \u0001");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valLenIn>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
v = specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Object deserializeValue(java.lang.Object)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues>;
v = v - 1;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues> = v;
if v >= 0 goto label;
v = <oadd.org.apache.hadoop.io.SequenceFile: org.slf4j.Logger LOG>;
v = interfaceinvoke v.<org.slf4j.Logger: boolean isDebugEnabled()>();
if v == 0 goto label;
v = <oadd.org.apache.hadoop.io.SequenceFile: org.slf4j.Logger LOG>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.Object)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u is a zero-length value");
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String)>(v);
label:
return v;
}
private java.lang.Object deserializeValue(java.lang.Object) throws java.io.IOException
{
java.lang.Object v, v;
oadd.org.apache.hadoop.io.serializer.Deserializer v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: java.lang.Object;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer valDeserializer>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.io.serializer.Deserializer: java.lang.Object deserialize(java.lang.Object)>(v);
return v;
}
public synchronized boolean next(oadd.org.apache.hadoop.io.Writable) throws java.io.IOException
{
byte[] v;
oadd.org.apache.hadoop.io.Writable v;
oadd.org.apache.hadoop.io.DataOutputBuffer v, v, v, v;
boolean v;
java.io.EOFException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
java.io.DataInputStream v, v;
oadd.org.apache.hadoop.io.DataInputBuffer v, v, v, v, v;
int v, v, v, v, v, v, v, v, v, v, v;
java.lang.String v, v, v;
java.io.IOException v, v;
java.lang.Class v, v, v, v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.io.Writable;
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class getKeyClass()>();
if v == v goto label;
v = new java.io.IOException;
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class keyClass>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.lang.Class)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("wrong key class: \u is not \u0001");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataOutputBuffer outBuf>;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: oadd.org.apache.hadoop.io.DataOutputBuffer reset()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataOutputBuffer outBuf>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int next(oadd.org.apache.hadoop.io.DataOutputBuffer)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
if v >= 0 goto label;
return 0;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataOutputBuffer outBuf>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: byte[] getData()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataOutputBuffer outBuf>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: int getLength()>();
virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: void reset(byte[],int)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
interfaceinvoke v.<oadd.org.apache.hadoop.io.Writable: void readFields(java.io.DataInput)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: void mark(int)>(0);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: int getPosition()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
if v == v goto label;
v = new java.io.IOException;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: int getPosition()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (oadd.org.apache.hadoop.io.Writable,int,int)>(v, v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u read \u bytes, should read \u0001");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean syncSeen> = 0;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys>;
if v != 0 goto label;
label:
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void readBlock()>();
label:
goto label;
label:
v := @caughtexception;
return 0;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyLenIn>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
if v >= 0 goto label;
return 0;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyIn>;
interfaceinvoke v.<oadd.org.apache.hadoop.io.Writable: void readFields(java.io.DataInput)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys>;
v = v - 1;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys> = v;
label:
return 1;
catch java.io.EOFException from label to label with label;
}
public synchronized boolean next(oadd.org.apache.hadoop.io.Writable, oadd.org.apache.hadoop.io.Writable) throws java.io.IOException
{
oadd.org.apache.hadoop.io.Writable v, v;
java.io.IOException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
java.lang.Class v, v, v;
java.lang.String v;
boolean v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.io.Writable;
v := @parameter: oadd.org.apache.hadoop.io.Writable;
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class getValueClass()>();
if v == v goto label;
v = new java.io.IOException;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class valClass>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (oadd.org.apache.hadoop.io.Writable,java.lang.Class)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("wrong value class: \u is not \u0001");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean next(oadd.org.apache.hadoop.io.Writable)>(v);
if v == 0 goto label;
virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void getCurrentValue(oadd.org.apache.hadoop.io.Writable)>(v);
label:
return v;
}
private synchronized int readRecordLength() throws java.io.IOException
{
byte[] v, v, v, v;
long v, v, v, v;
byte v, v, v;
int v, v, v, v;
boolean v;
oadd.org.apache.hadoop.fs.FSDataInputStream v, v, v, v, v;
java.io.IOException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: long getPos()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long end>;
v = v cmp v;
if v < 0 goto label;
v = (int) -1;
return v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: int readInt()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte version>;
if v <= 1 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] sync>;
if v == null goto label;
v = (int) -1;
if v != v goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] syncCheck>;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: void readFully(byte[])>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] sync>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] syncCheck>;
v = staticinvoke <java.util.Arrays: boolean equals(byte[],byte[])>(v, v);
if v != 0 goto label;
v = new java.io.IOException;
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>("File is corrupt!");
throw v;
label:
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean syncSeen> = 1;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: long getPos()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long end>;
v = v cmp v;
if v < 0 goto label;
v = (int) -1;
return v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: int readInt()>();
goto label;
label:
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean syncSeen> = 0;
label:
return v;
}
synchronized int next(oadd.org.apache.hadoop.io.DataOutputBuffer) throws java.io.IOException
{
oadd.org.apache.hadoop.fs.FSDataInputStream v, v;
java.io.IOException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
oadd.org.apache.hadoop.io.DataOutputBuffer v;
int v, v, v, v, v;
oadd.org.apache.hadoop.fs.ChecksumException v;
boolean v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.io.DataOutputBuffer;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v == 0 goto label;
v = new java.io.IOException;
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>("Unsupported call for block-compressed SequenceFiles - use SequenceFile.Reader.next(DataOutputStream, ValueBytes)");
throw v;
label:
v = specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int readRecordLength()>();
v = (int) -1;
if v != v goto label;
label:
v = (int) -1;
return v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: int readInt()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: void write(java.io.DataInput,int)>(v, v);
label:
return v;
label:
v := @caughtexception;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void handleChecksumException(oadd.org.apache.hadoop.fs.ChecksumException)>(v);
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int next(oadd.org.apache.hadoop.io.DataOutputBuffer)>(v);
return v;
catch oadd.org.apache.hadoop.fs.ChecksumException from label to label with label;
catch oadd.org.apache.hadoop.fs.ChecksumException from label to label with label;
}
public oadd.org.apache.hadoop.io.SequenceFile$ValueBytes createValueBytes()
{
oadd.org.apache.hadoop.io.SequenceFile$CompressedBytes v;
oadd.org.apache.hadoop.io.SequenceFile$ValueBytes v;
oadd.org.apache.hadoop.io.SequenceFile$UncompressedBytes v;
oadd.org.apache.hadoop.io.compress.CompressionCodec v;
boolean v, v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean decompress>;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v == 0 goto label;
label:
v = new oadd.org.apache.hadoop.io.SequenceFile$UncompressedBytes;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$UncompressedBytes: void <init>()>();
v = v;
goto label;
label:
v = new oadd.org.apache.hadoop.io.SequenceFile$CompressedBytes;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.compress.CompressionCodec codec>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$CompressedBytes: void <init>(oadd.org.apache.hadoop.io.compress.CompressionCodec)>(v);
v = v;
label:
return v;
}
public synchronized int nextRaw(oadd.org.apache.hadoop.io.DataOutputBuffer, oadd.org.apache.hadoop.io.SequenceFile$ValueBytes) throws java.io.IOException
{
byte v;
oadd.org.apache.hadoop.io.DataOutputBuffer v;
boolean v, v;
java.io.EOFException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
java.io.DataInputStream v, v, v, v;
long v, v;
int v, v, v, v, v, v, v, v, v, v, v, v, v, v, v;
oadd.org.apache.hadoop.fs.FSDataInputStream v, v, v, v, v;
java.io.IOException v;
oadd.org.apache.hadoop.io.SequenceFile$ValueBytes v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.io.DataOutputBuffer;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$ValueBytes;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v != 0 goto label;
v = specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int readRecordLength()>();
v = (int) -1;
if v != v goto label;
v = (int) -1;
return v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: int readInt()>();
v = v - v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: void write(java.io.DataInput,int)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean decompress>;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$CompressedBytes: void reset(java.io.DataInputStream,int)>(v, v);
goto label;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$UncompressedBytes: void reset(java.io.DataInputStream,int)>(v, v);
label:
return v;
label:
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean syncSeen> = 0;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys>;
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: long getPos()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long end>;
v = v cmp v;
if v < 0 goto label;
v = (int) -1;
return v;
label:
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void readBlock()>();
label:
goto label;
label:
v := @caughtexception;
v = (int) -1;
return v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyLenIn>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
if v >= 0 goto label;
v = new java.io.IOException;
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>("zero length key found!");
throw v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyIn>;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: void write(java.io.DataInput,int)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys>;
v = v - 1;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys> = v;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void seekToCurrentValue()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valLenIn>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$UncompressedBytes: void reset(java.io.DataInputStream,int)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues>;
v = v - 1;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues> = v;
v = v + v;
return v;
catch java.io.EOFException from label to label with label;
}
public synchronized int nextRawKey(oadd.org.apache.hadoop.io.DataOutputBuffer) throws java.io.IOException
{
java.io.DataInputStream v, v;
long v, v;
byte v;
oadd.org.apache.hadoop.io.DataOutputBuffer v;
int v, v, v, v, v, v, v, v, v, v, v, v, v;
boolean v;
oadd.org.apache.hadoop.fs.FSDataInputStream v, v, v;
java.io.IOException v;
java.io.EOFException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.io.DataOutputBuffer;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v != 0 goto label;
v = specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int readRecordLength()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int recordLength> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int recordLength>;
v = (int) -1;
if v != v goto label;
v = (int) -1;
return v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: int readInt()>();
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: void write(java.io.DataInput,int)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
return v;
label:
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean syncSeen> = 0;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys>;
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: long getPos()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long end>;
v = v cmp v;
if v < 0 goto label;
v = (int) -1;
return v;
label:
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void readBlock()>();
label:
goto label;
label:
v := @caughtexception;
v = (int) -1;
return v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyLenIn>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
if v >= 0 goto label;
v = new java.io.IOException;
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>("zero length key found!");
throw v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyIn>;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: void write(java.io.DataInput,int)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys>;
v = v - 1;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys> = v;
return v;
catch java.io.EOFException from label to label with label;
}
public synchronized java.lang.Object next(java.lang.Object) throws java.io.IOException
{
byte[] v;
oadd.org.apache.hadoop.io.DataOutputBuffer v, v, v, v;
boolean v;
java.io.EOFException v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
java.io.DataInputStream v;
oadd.org.apache.hadoop.io.DataInputBuffer v, v, v, v;
int v, v, v, v, v, v, v, v, v, v, v;
java.lang.String v, v, v;
java.io.IOException v, v;
java.lang.Class v, v, v, v;
java.lang.Object v, v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: java.lang.Object;
if v == null goto label;
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class getKeyClass()>();
if v == v goto label;
v = new java.io.IOException;
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Class keyClass>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.lang.Class)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("wrong key class: \u is not \u0001");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataOutputBuffer outBuf>;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: oadd.org.apache.hadoop.io.DataOutputBuffer reset()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataOutputBuffer outBuf>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int next(oadd.org.apache.hadoop.io.DataOutputBuffer)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength> = v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
if v >= 0 goto label;
return null;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataOutputBuffer outBuf>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: byte[] getData()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataOutputBuffer outBuf>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataOutputBuffer: int getLength()>();
virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: void reset(byte[],int)>(v, v);
v = specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Object deserializeKey(java.lang.Object)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: void mark(int)>(0);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: int getPosition()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
if v == v goto label;
v = new java.io.IOException;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.DataInputBuffer valBuffer>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.DataInputBuffer: int getPosition()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.Object,int,int)>(v, v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u read \u bytes, should read \u0001");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean syncSeen> = 0;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys>;
if v != 0 goto label;
label:
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void readBlock()>();
label:
goto label;
label:
v := @caughtexception;
return null;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream keyLenIn>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
if v >= 0 goto label;
return null;
label:
v = specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.Object deserializeKey(java.lang.Object)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys>;
v = v - 1;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys> = v;
label:
return v;
catch java.io.EOFException from label to label with label;
}
private java.lang.Object deserializeKey(java.lang.Object) throws java.io.IOException
{
java.lang.Object v, v;
oadd.org.apache.hadoop.io.serializer.Deserializer v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: java.lang.Object;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.io.serializer.Deserializer keyDeserializer>;
v = interfaceinvoke v.<oadd.org.apache.hadoop.io.serializer.Deserializer: java.lang.Object deserialize(java.lang.Object)>(v);
return v;
}
public synchronized int nextRawValue(oadd.org.apache.hadoop.io.SequenceFile$ValueBytes) throws java.io.IOException
{
java.io.DataInputStream v, v;
oadd.org.apache.hadoop.fs.FSDataInputStream v, v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
oadd.org.apache.hadoop.io.SequenceFile$ValueBytes v;
int v, v, v, v, v, v;
boolean v, v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$ValueBytes;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void seekToCurrentValue()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v != 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int recordLength>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int keyLength>;
v = v - v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean decompress>;
if v == 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$CompressedBytes: void reset(java.io.DataInputStream,int)>(v, v);
goto label;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$UncompressedBytes: void reset(java.io.DataInputStream,int)>(v, v);
label:
return v;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valLenIn>;
v = staticinvoke <oadd.org.apache.hadoop.io.WritableUtils: int readVInt(java.io.DataInput)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.io.DataInputStream valIn>;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$UncompressedBytes: void reset(java.io.DataInputStream,int)>(v, v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues>;
v = v - 1;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedValues> = v;
return v;
}
private void handleChecksumException(oadd.org.apache.hadoop.fs.ChecksumException) throws java.io.IOException
{
org.slf4j.Logger v;
long v, v, v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
int v;
oadd.org.apache.hadoop.fs.ChecksumException v;
oadd.org.apache.hadoop.conf.Configuration v, v;
java.lang.String v;
boolean v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: oadd.org.apache.hadoop.fs.ChecksumException;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf>;
v = virtualinvoke v.<oadd.org.apache.hadoop.conf.Configuration: boolean getBoolean(java.lang.String,boolean)>("oadd.io.skip.checksum.errors", 0);
if v == 0 goto label;
v = <oadd.org.apache.hadoop.io.SequenceFile: org.slf4j.Logger LOG>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long getPosition()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (long)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Bad checksum at \u. Skipping entries.");
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String)>(v);
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long getPosition()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.conf.Configuration conf>;
v = virtualinvoke v.<oadd.org.apache.hadoop.conf.Configuration: int getInt(java.lang.String,int)>("oadd.io.bytes.per.checksum", 512);
v = v + v;
virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void sync(long)>(v);
goto label;
label:
throw v;
label:
return;
}
synchronized void ignoreSync()
{
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] sync> = null;
return;
}
public synchronized void seek(long) throws java.io.IOException
{
oadd.org.apache.hadoop.fs.FSDataInputStream v;
long v;
boolean v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: long;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: void seek(long)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean blockCompressed>;
if v == 0 goto label;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: int noBufferedKeys> = 0;
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean valuesDecompressed> = 1;
label:
return;
}
public synchronized void sync(long) throws java.io.IOException
{
byte[] v, v, v, v, v;
byte v, v, v, v, v, v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
long v, v, v, v, v, v, v, v, v, v, v;
int v, v, v, v, v, v;
oadd.org.apache.hadoop.fs.FSDataInputStream v, v, v, v, v, v;
oadd.org.apache.hadoop.fs.ChecksumException v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v := @parameter: long;
v = v + 20L;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long end>;
v = v cmp v;
if v < 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long end>;
virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void seek(long)>(v);
return;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long headerEnd>;
v = v cmp v;
if v >= 0 goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long headerEnd>;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: void seek(long)>(v);
v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean syncSeen> = 1;
return;
label:
v = v + 4L;
virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void seek(long)>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] syncCheck>;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: void readFully(byte[])>(v);
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] sync>;
v = lengthof v;
v = 0;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: long getPos()>();
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: long end>;
v = v cmp v;
if v >= 0 goto label;
v = 0;
label:
if v >= v goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] sync>;
v = v[v];
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] syncCheck>;
v = v + v;
v = v % v;
v = v[v];
if v != v goto label;
v = v + 1;
goto label;
label:
if v != v goto label;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: long getPos()>();
v = v - 20L;
virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: void seek(long)>(v);
label:
return;
label:
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: byte[] syncCheck>;
v = v % v;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: byte readByte()>();
v[v] = v;
v = v + 1;
goto label;
label:
v := @caughtexception;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: void handleChecksumException(oadd.org.apache.hadoop.fs.ChecksumException)>(v);
label:
return;
catch oadd.org.apache.hadoop.fs.ChecksumException from label to label with label;
catch oadd.org.apache.hadoop.fs.ChecksumException from label to label with label;
}
public synchronized boolean syncSeen()
{
boolean v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: boolean syncSeen>;
return v;
}
public synchronized long getPosition() throws java.io.IOException
{
long v;
oadd.org.apache.hadoop.fs.FSDataInputStream v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: oadd.org.apache.hadoop.fs.FSDataInputStream in>;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FSDataInputStream: long getPos()>();
return v;
}
public java.lang.String toString()
{
java.lang.String v;
oadd.org.apache.hadoop.io.SequenceFile$Reader v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile$Reader;
v = v.<oadd.org.apache.hadoop.io.SequenceFile$Reader: java.lang.String filename>;
return v;
}
}