public class oadd.org.apache.hadoop.io.SequenceFile extends java.lang.Object
{
private static final org.slf4j.Logger LOG;
private static final byte BLOCK_COMPRESS_VERSION;
private static final byte CUSTOM_COMPRESS_VERSION;
private static final byte VERSION_WITH_METADATA;
private static byte[] VERSION;
private static final int SYNC_ESCAPE;
private static final int SYNC_HASH_SIZE;
private static final int SYNC_SIZE;
public static final int SYNC_INTERVAL;
private void <init>()
{
oadd.org.apache.hadoop.io.SequenceFile v;
v := @this: oadd.org.apache.hadoop.io.SequenceFile;
specialinvoke v.<java.lang.Object: void <init>()>();
return;
}
public static oadd.org.apache.hadoop.io.SequenceFile$CompressionType getDefaultCompressionType(oadd.org.apache.hadoop.conf.Configuration)
{
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
oadd.org.apache.hadoop.conf.Configuration v;
java.lang.String v;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v = virtualinvoke v.<oadd.org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("oadd.io.seqfile.compression.type");
if v != null goto label;
v = <oadd.org.apache.hadoop.io.SequenceFile$CompressionType: oadd.org.apache.hadoop.io.SequenceFile$CompressionType RECORD>;
goto label;
label:
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$CompressionType: oadd.org.apache.hadoop.io.SequenceFile$CompressionType valueOf(java.lang.String)>(v);
label:
return v;
}
public static void setDefaultCompressionType(oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.io.SequenceFile$CompressionType)
{
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
oadd.org.apache.hadoop.conf.Configuration v;
java.lang.String v;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$CompressionType;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$CompressionType: java.lang.String toString()>();
virtualinvoke v.<oadd.org.apache.hadoop.conf.Configuration: void set(java.lang.String,java.lang.String)>("oadd.io.seqfile.compression.type", v);
return;
}
public static transient oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[]) throws java.io.IOException
{
java.lang.Object[] v;
oadd.org.apache.hadoop.io.SequenceFile$Writer v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option v;
oadd.org.apache.hadoop.io.SequenceFile$BlockCompressWriter v;
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
oadd.org.apache.hadoop.io.SequenceFile$RecordCompressWriter v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[] v, v;
int[] v;
int v, v;
java.lang.Object v;
oadd.org.apache.hadoop.conf.Configuration v;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[];
v = staticinvoke <oadd.org.apache.hadoop.util.Options: java.lang.Object getOption(java.lang.Class,java.lang.Object[])>(class "Loadd/org/apache/hadoop/io/SequenceFile$Writer$CompressionOption;", v);
if v == null goto label;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Writer$CompressionOption: oadd.org.apache.hadoop.io.SequenceFile$CompressionType getValue()>();
goto label;
label:
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$CompressionType getDefaultCompressionType(oadd.org.apache.hadoop.conf.Configuration)>(v);
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Writer$Option)[1];
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option compression(oadd.org.apache.hadoop.io.SequenceFile$CompressionType)>(v);
v[0] = v;
v = staticinvoke <oadd.org.apache.hadoop.util.Options: java.lang.Object[] prependOptions(java.lang.Object[],java.lang.Object[])>(v, v);
label:
v = <oadd.org.apache.hadoop.io.SequenceFile$1: int[] $SwitchMap$org$apache$hadoop$io$SequenceFile$CompressionType>;
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$CompressionType: int ordinal()>();
v = v[v];
tableswitch(v)
{
case 1: goto label;
case 2: goto label;
case 3: goto label;
default: goto label;
};
label:
v = new oadd.org.apache.hadoop.io.SequenceFile$Writer;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Writer: void <init>(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
label:
v = new oadd.org.apache.hadoop.io.SequenceFile$RecordCompressWriter;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$RecordCompressWriter: void <init>(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
label:
v = new oadd.org.apache.hadoop.io.SequenceFile$BlockCompressWriter;
specialinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$BlockCompressWriter: void <init>(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.fs.FileSystem, oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class) throws java.io.IOException
{
oadd.org.apache.hadoop.io.SequenceFile$Writer v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option v, v, v, v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[] v;
java.lang.Class v, v;
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.hadoop.fs.Path v;
oadd.org.apache.hadoop.fs.FileSystem v;
v := @parameter: oadd.org.apache.hadoop.fs.FileSystem;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.Class;
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Writer$Option)[4];
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option filesystem(oadd.org.apache.hadoop.fs.FileSystem)>(v);
v[0] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option file(oadd.org.apache.hadoop.fs.Path)>(v);
v[1] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option keyClass(java.lang.Class)>(v);
v[2] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option valueClass(java.lang.Class)>(v);
v[3] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.fs.FileSystem, oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, oadd.org.apache.hadoop.io.SequenceFile$CompressionType) throws java.io.IOException
{
oadd.org.apache.hadoop.io.SequenceFile$Writer v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option v, v, v, v, v;
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[] v;
java.lang.Class v, v;
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.hadoop.fs.Path v;
oadd.org.apache.hadoop.fs.FileSystem v;
v := @parameter: oadd.org.apache.hadoop.fs.FileSystem;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.Class;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$CompressionType;
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Writer$Option)[5];
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option filesystem(oadd.org.apache.hadoop.fs.FileSystem)>(v);
v[0] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option file(oadd.org.apache.hadoop.fs.Path)>(v);
v[1] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option keyClass(java.lang.Class)>(v);
v[2] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option valueClass(java.lang.Class)>(v);
v[3] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option compression(oadd.org.apache.hadoop.io.SequenceFile$CompressionType)>(v);
v[4] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.fs.FileSystem, oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, oadd.org.apache.hadoop.io.SequenceFile$CompressionType, oadd.org.apache.hadoop.util.Progressable) throws java.io.IOException
{
oadd.org.apache.hadoop.io.SequenceFile$Writer v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option v, v, v, v, v, v;
oadd.org.apache.hadoop.util.Progressable v;
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[] v;
java.lang.Class v, v;
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.hadoop.fs.Path v;
oadd.org.apache.hadoop.fs.FileSystem v;
v := @parameter: oadd.org.apache.hadoop.fs.FileSystem;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.Class;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$CompressionType;
v := @parameter: oadd.org.apache.hadoop.util.Progressable;
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Writer$Option)[6];
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option file(oadd.org.apache.hadoop.fs.Path)>(v);
v[0] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option filesystem(oadd.org.apache.hadoop.fs.FileSystem)>(v);
v[1] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option keyClass(java.lang.Class)>(v);
v[2] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option valueClass(java.lang.Class)>(v);
v[3] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option compression(oadd.org.apache.hadoop.io.SequenceFile$CompressionType)>(v);
v[4] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option progressable(oadd.org.apache.hadoop.util.Progressable)>(v);
v[5] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.fs.FileSystem, oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, oadd.org.apache.hadoop.io.SequenceFile$CompressionType, oadd.org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException
{
oadd.org.apache.hadoop.io.SequenceFile$Writer v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option v, v, v, v, v;
oadd.org.apache.hadoop.io.compress.CompressionCodec v;
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[] v;
java.lang.Class v, v;
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.hadoop.fs.Path v;
oadd.org.apache.hadoop.fs.FileSystem v;
v := @parameter: oadd.org.apache.hadoop.fs.FileSystem;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.Class;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$CompressionType;
v := @parameter: oadd.org.apache.hadoop.io.compress.CompressionCodec;
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Writer$Option)[5];
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option file(oadd.org.apache.hadoop.fs.Path)>(v);
v[0] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option filesystem(oadd.org.apache.hadoop.fs.FileSystem)>(v);
v[1] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option keyClass(java.lang.Class)>(v);
v[2] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option valueClass(java.lang.Class)>(v);
v[3] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option compression(oadd.org.apache.hadoop.io.SequenceFile$CompressionType,oadd.org.apache.hadoop.io.compress.CompressionCodec)>(v, v);
v[4] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.fs.FileSystem, oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, oadd.org.apache.hadoop.io.SequenceFile$CompressionType, oadd.org.apache.hadoop.io.compress.CompressionCodec, oadd.org.apache.hadoop.util.Progressable, oadd.org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException
{
oadd.org.apache.hadoop.io.SequenceFile$Writer v;
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.hadoop.io.SequenceFile$Metadata v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option v, v, v, v, v, v, v;
oadd.org.apache.hadoop.util.Progressable v;
oadd.org.apache.hadoop.io.compress.CompressionCodec v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[] v;
java.lang.Class v, v;
oadd.org.apache.hadoop.fs.Path v;
oadd.org.apache.hadoop.fs.FileSystem v;
v := @parameter: oadd.org.apache.hadoop.fs.FileSystem;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.Class;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$CompressionType;
v := @parameter: oadd.org.apache.hadoop.io.compress.CompressionCodec;
v := @parameter: oadd.org.apache.hadoop.util.Progressable;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$Metadata;
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Writer$Option)[7];
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option file(oadd.org.apache.hadoop.fs.Path)>(v);
v[0] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option filesystem(oadd.org.apache.hadoop.fs.FileSystem)>(v);
v[1] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option keyClass(java.lang.Class)>(v);
v[2] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option valueClass(java.lang.Class)>(v);
v[3] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option compression(oadd.org.apache.hadoop.io.SequenceFile$CompressionType,oadd.org.apache.hadoop.io.compress.CompressionCodec)>(v, v);
v[4] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option progressable(oadd.org.apache.hadoop.util.Progressable)>(v);
v[5] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option metadata(oadd.org.apache.hadoop.io.SequenceFile$Metadata)>(v);
v[6] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.fs.FileSystem, oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, oadd.org.apache.hadoop.io.SequenceFile$CompressionType, oadd.org.apache.hadoop.io.compress.CompressionCodec, oadd.org.apache.hadoop.util.Progressable, oadd.org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException
{
oadd.org.apache.hadoop.io.SequenceFile$Writer v;
long v;
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
short v;
int v;
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.hadoop.io.SequenceFile$Metadata v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option v, v, v, v, v, v, v, v, v, v;
oadd.org.apache.hadoop.util.Progressable v;
oadd.org.apache.hadoop.io.compress.CompressionCodec v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[] v;
java.lang.Class v, v;
oadd.org.apache.hadoop.fs.Path v;
oadd.org.apache.hadoop.fs.FileSystem v;
v := @parameter: oadd.org.apache.hadoop.fs.FileSystem;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.Class;
v := @parameter: int;
v := @parameter: short;
v := @parameter: long;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$CompressionType;
v := @parameter: oadd.org.apache.hadoop.io.compress.CompressionCodec;
v := @parameter: oadd.org.apache.hadoop.util.Progressable;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$Metadata;
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Writer$Option)[10];
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option file(oadd.org.apache.hadoop.fs.Path)>(v);
v[0] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option filesystem(oadd.org.apache.hadoop.fs.FileSystem)>(v);
v[1] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option keyClass(java.lang.Class)>(v);
v[2] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option valueClass(java.lang.Class)>(v);
v[3] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option bufferSize(int)>(v);
v[4] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option replication(short)>(v);
v[5] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option blockSize(long)>(v);
v[6] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option compression(oadd.org.apache.hadoop.io.SequenceFile$CompressionType,oadd.org.apache.hadoop.io.compress.CompressionCodec)>(v, v);
v[7] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option progressable(oadd.org.apache.hadoop.util.Progressable)>(v);
v[8] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option metadata(oadd.org.apache.hadoop.io.SequenceFile$Metadata)>(v);
v[9] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.fs.FileSystem, oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, boolean, oadd.org.apache.hadoop.io.SequenceFile$CompressionType, oadd.org.apache.hadoop.io.compress.CompressionCodec, oadd.org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException
{
oadd.org.apache.hadoop.fs.FileContext v;
oadd.org.apache.hadoop.io.SequenceFile$Writer v;
oadd.org.apache.hadoop.fs.Options$CreateOpts$BufferSize v;
long v;
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
short v;
oadd.org.apache.hadoop.fs.CreateFlag v, v;
int v;
oadd.org.apache.hadoop.fs.Options$CreateOpts$ReplicationFactor v;
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.hadoop.fs.Options$CreateOpts[] v;
java.net.URI v;
java.util.EnumSet v;
boolean v;
oadd.org.apache.hadoop.io.SequenceFile$Metadata v;
oadd.org.apache.hadoop.io.compress.CompressionCodec v;
oadd.org.apache.hadoop.fs.Options$CreateOpts$BlockSize v;
java.lang.Class v, v;
oadd.org.apache.hadoop.fs.Path v;
oadd.org.apache.hadoop.fs.FileSystem v;
oadd.org.apache.hadoop.fs.Options$CreateOpts$CreateParent v;
v := @parameter: oadd.org.apache.hadoop.fs.FileSystem;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.Class;
v := @parameter: int;
v := @parameter: short;
v := @parameter: long;
v := @parameter: boolean;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$CompressionType;
v := @parameter: oadd.org.apache.hadoop.io.compress.CompressionCodec;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$Metadata;
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FileSystem: java.net.URI getUri()>();
v = staticinvoke <oadd.org.apache.hadoop.fs.FileContext: oadd.org.apache.hadoop.fs.FileContext getFileContext(java.net.URI,oadd.org.apache.hadoop.conf.Configuration)>(v, v);
v = <oadd.org.apache.hadoop.fs.CreateFlag: oadd.org.apache.hadoop.fs.CreateFlag CREATE>;
v = <oadd.org.apache.hadoop.fs.CreateFlag: oadd.org.apache.hadoop.fs.CreateFlag OVERWRITE>;
v = staticinvoke <java.util.EnumSet: java.util.EnumSet of(java.lang.Enum,java.lang.Enum)>(v, v);
v = newarray (oadd.org.apache.hadoop.fs.Options$CreateOpts)[4];
v = staticinvoke <oadd.org.apache.hadoop.fs.Options$CreateOpts: oadd.org.apache.hadoop.fs.Options$CreateOpts$BufferSize bufferSize(int)>(v);
v[0] = v;
if v == 0 goto label;
v = staticinvoke <oadd.org.apache.hadoop.fs.Options$CreateOpts: oadd.org.apache.hadoop.fs.Options$CreateOpts$CreateParent createParent()>();
goto label;
label:
v = staticinvoke <oadd.org.apache.hadoop.fs.Options$CreateOpts: oadd.org.apache.hadoop.fs.Options$CreateOpts$CreateParent donotCreateParent()>();
label:
v[1] = v;
v = staticinvoke <oadd.org.apache.hadoop.fs.Options$CreateOpts: oadd.org.apache.hadoop.fs.Options$CreateOpts$ReplicationFactor repFac(short)>(v);
v[2] = v;
v = staticinvoke <oadd.org.apache.hadoop.fs.Options$CreateOpts: oadd.org.apache.hadoop.fs.Options$CreateOpts$BlockSize blockSize(long)>(v);
v[3] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.fs.FileContext,oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.fs.Path,java.lang.Class,java.lang.Class,oadd.org.apache.hadoop.io.SequenceFile$CompressionType,oadd.org.apache.hadoop.io.compress.CompressionCodec,oadd.org.apache.hadoop.io.SequenceFile$Metadata,java.util.EnumSet,oadd.org.apache.hadoop.fs.Options$CreateOpts[])>(v, v, v, v, v, v, v, v, v, v);
return v;
}
public static transient oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.fs.FileContext, oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, oadd.org.apache.hadoop.io.SequenceFile$CompressionType, oadd.org.apache.hadoop.io.compress.CompressionCodec, oadd.org.apache.hadoop.io.SequenceFile$Metadata, java.util.EnumSet, oadd.org.apache.hadoop.fs.Options$CreateOpts[]) throws java.io.IOException
{
oadd.org.apache.hadoop.fs.FileContext v;
oadd.org.apache.hadoop.io.SequenceFile$Metadata v;
oadd.org.apache.hadoop.io.SequenceFile$Writer v, v;
oadd.org.apache.hadoop.io.compress.CompressionCodec v;
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
oadd.org.apache.hadoop.fs.FSDataOutputStream v;
java.lang.Class v, v;
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.hadoop.fs.Options$CreateOpts[] v;
oadd.org.apache.hadoop.fs.Path v;
java.util.EnumSet v;
v := @parameter: oadd.org.apache.hadoop.fs.FileContext;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.Class;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$CompressionType;
v := @parameter: oadd.org.apache.hadoop.io.compress.CompressionCodec;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$Metadata;
v := @parameter: java.util.EnumSet;
v := @parameter: oadd.org.apache.hadoop.fs.Options$CreateOpts[];
v = virtualinvoke v.<oadd.org.apache.hadoop.fs.FileContext: oadd.org.apache.hadoop.fs.FSDataOutputStream create(oadd.org.apache.hadoop.fs.Path,java.util.EnumSet,oadd.org.apache.hadoop.fs.Options$CreateOpts[])>(v, v, v);
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.fs.FSDataOutputStream,java.lang.Class,java.lang.Class,oadd.org.apache.hadoop.io.SequenceFile$CompressionType,oadd.org.apache.hadoop.io.compress.CompressionCodec,oadd.org.apache.hadoop.io.SequenceFile$Metadata)>(v, v, v, v, v, v, v);
v = virtualinvoke v.<oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer ownStream()>();
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.fs.FileSystem, oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, oadd.org.apache.hadoop.io.SequenceFile$CompressionType, oadd.org.apache.hadoop.io.compress.CompressionCodec, oadd.org.apache.hadoop.util.Progressable) throws java.io.IOException
{
oadd.org.apache.hadoop.io.SequenceFile$Writer v;
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
oadd.org.apache.hadoop.conf.Configuration v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option v, v, v, v, v, v;
oadd.org.apache.hadoop.util.Progressable v;
oadd.org.apache.hadoop.io.compress.CompressionCodec v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[] v;
java.lang.Class v, v;
oadd.org.apache.hadoop.fs.Path v;
oadd.org.apache.hadoop.fs.FileSystem v;
v := @parameter: oadd.org.apache.hadoop.fs.FileSystem;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.fs.Path;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.Class;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$CompressionType;
v := @parameter: oadd.org.apache.hadoop.io.compress.CompressionCodec;
v := @parameter: oadd.org.apache.hadoop.util.Progressable;
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Writer$Option)[6];
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option file(oadd.org.apache.hadoop.fs.Path)>(v);
v[0] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option filesystem(oadd.org.apache.hadoop.fs.FileSystem)>(v);
v[1] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option keyClass(java.lang.Class)>(v);
v[2] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option valueClass(java.lang.Class)>(v);
v[3] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option compression(oadd.org.apache.hadoop.io.SequenceFile$CompressionType,oadd.org.apache.hadoop.io.compress.CompressionCodec)>(v, v);
v[4] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option progressable(oadd.org.apache.hadoop.util.Progressable)>(v);
v[5] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, oadd.org.apache.hadoop.io.SequenceFile$CompressionType, oadd.org.apache.hadoop.io.compress.CompressionCodec, oadd.org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException
{
oadd.org.apache.hadoop.io.SequenceFile$Metadata v;
oadd.org.apache.hadoop.io.SequenceFile$Writer v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option v, v, v, v, v;
oadd.org.apache.hadoop.io.compress.CompressionCodec v;
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[] v;
oadd.org.apache.hadoop.fs.FSDataOutputStream v;
java.lang.Class v, v;
oadd.org.apache.hadoop.conf.Configuration v;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.fs.FSDataOutputStream;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.Class;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$CompressionType;
v := @parameter: oadd.org.apache.hadoop.io.compress.CompressionCodec;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$Metadata;
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Writer$Option)[5];
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option stream(oadd.org.apache.hadoop.fs.FSDataOutputStream)>(v);
v[0] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option keyClass(java.lang.Class)>(v);
v[1] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option valueClass(java.lang.Class)>(v);
v[2] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option compression(oadd.org.apache.hadoop.io.SequenceFile$CompressionType,oadd.org.apache.hadoop.io.compress.CompressionCodec)>(v, v);
v[3] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option metadata(oadd.org.apache.hadoop.io.SequenceFile$Metadata)>(v);
v[4] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
}
public static oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration, oadd.org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, oadd.org.apache.hadoop.io.SequenceFile$CompressionType, oadd.org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException
{
oadd.org.apache.hadoop.io.SequenceFile$Writer v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option v, v, v, v;
oadd.org.apache.hadoop.io.compress.CompressionCodec v;
oadd.org.apache.hadoop.io.SequenceFile$CompressionType v;
oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[] v;
oadd.org.apache.hadoop.fs.FSDataOutputStream v;
java.lang.Class v, v;
oadd.org.apache.hadoop.conf.Configuration v;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v := @parameter: oadd.org.apache.hadoop.fs.FSDataOutputStream;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.Class;
v := @parameter: oadd.org.apache.hadoop.io.SequenceFile$CompressionType;
v := @parameter: oadd.org.apache.hadoop.io.compress.CompressionCodec;
v = newarray (oadd.org.apache.hadoop.io.SequenceFile$Writer$Option)[4];
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option stream(oadd.org.apache.hadoop.fs.FSDataOutputStream)>(v);
v[0] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option keyClass(java.lang.Class)>(v);
v[1] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option valueClass(java.lang.Class)>(v);
v[2] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile$Writer: oadd.org.apache.hadoop.io.SequenceFile$Writer$Option compression(oadd.org.apache.hadoop.io.SequenceFile$CompressionType,oadd.org.apache.hadoop.io.compress.CompressionCodec)>(v, v);
v[3] = v;
v = staticinvoke <oadd.org.apache.hadoop.io.SequenceFile: oadd.org.apache.hadoop.io.SequenceFile$Writer createWriter(oadd.org.apache.hadoop.conf.Configuration,oadd.org.apache.hadoop.io.SequenceFile$Writer$Option[])>(v, v);
return v;
}
private static int getBufferSize(oadd.org.apache.hadoop.conf.Configuration)
{
int v;
oadd.org.apache.hadoop.conf.Configuration v;
v := @parameter: oadd.org.apache.hadoop.conf.Configuration;
v = virtualinvoke v.<oadd.org.apache.hadoop.conf.Configuration: int getInt(java.lang.String,int)>("oadd.io.file.buffer.size", 4096);
return v;
}
static void <clinit>()
{
byte[] v;
org.slf4j.Logger v;
v = staticinvoke <org.slf4j.LoggerFactory: org.slf4j.Logger getLogger(java.lang.Class)>(class "Loadd/org/apache/hadoop/io/SequenceFile;");
<oadd.org.apache.hadoop.io.SequenceFile: org.slf4j.Logger LOG> = v;
v = newarray (byte)[4];
v[0] = 83;
v[1] = 69;
v[2] = 81;
v[3] = 6;
<oadd.org.apache.hadoop.io.SequenceFile: byte[] VERSION> = v;
return;
}
}