enum class org.apache.parquet.hadoop.ParquetFileWriter$STATE extends java.lang.Enum
{
public static final enum org.apache.parquet.hadoop.ParquetFileWriter$STATE NOT_STARTED;
public static final enum org.apache.parquet.hadoop.ParquetFileWriter$STATE STARTED;
public static final enum org.apache.parquet.hadoop.ParquetFileWriter$STATE BLOCK;
public static final enum org.apache.parquet.hadoop.ParquetFileWriter$STATE COLUMN;
public static final enum org.apache.parquet.hadoop.ParquetFileWriter$STATE ENDED;
private static final org.apache.parquet.hadoop.ParquetFileWriter$STATE[] $VALUES;
public static org.apache.parquet.hadoop.ParquetFileWriter$STATE[] values()
{
java.lang.Object v;
org.apache.parquet.hadoop.ParquetFileWriter$STATE[] v;
v = <org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE[] $VALUES>;
v = virtualinvoke v.<java.lang.Object: java.lang.Object clone()>();
return v;
}
public static org.apache.parquet.hadoop.ParquetFileWriter$STATE valueOf(java.lang.String)
{
java.lang.String v;
java.lang.Enum v;
v := @parameter: java.lang.String;
v = staticinvoke <java.lang.Enum: java.lang.Enum valueOf(java.lang.Class,java.lang.String)>(class "Lorg/apache/parquet/hadoop/ParquetFileWriter$STATE;", v);
return v;
}
private void <init>(java.lang.String, int)
{
int v;
java.lang.String v;
org.apache.parquet.hadoop.ParquetFileWriter$STATE v;
v := @this: org.apache.parquet.hadoop.ParquetFileWriter$STATE;
v := @parameter: java.lang.String;
v := @parameter: int;
specialinvoke v.<java.lang.Enum: void <init>(java.lang.String,int)>(v, v);
return;
}
org.apache.parquet.hadoop.ParquetFileWriter$STATE start() throws java.io.IOException
{
org.apache.parquet.hadoop.ParquetFileWriter$STATE v, v;
v := @this: org.apache.parquet.hadoop.ParquetFileWriter$STATE;
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE error()>();
return v;
}
org.apache.parquet.hadoop.ParquetFileWriter$STATE startBlock() throws java.io.IOException
{
org.apache.parquet.hadoop.ParquetFileWriter$STATE v, v;
v := @this: org.apache.parquet.hadoop.ParquetFileWriter$STATE;
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE error()>();
return v;
}
org.apache.parquet.hadoop.ParquetFileWriter$STATE startColumn() throws java.io.IOException
{
org.apache.parquet.hadoop.ParquetFileWriter$STATE v, v;
v := @this: org.apache.parquet.hadoop.ParquetFileWriter$STATE;
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE error()>();
return v;
}
org.apache.parquet.hadoop.ParquetFileWriter$STATE write() throws java.io.IOException
{
org.apache.parquet.hadoop.ParquetFileWriter$STATE v, v;
v := @this: org.apache.parquet.hadoop.ParquetFileWriter$STATE;
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE error()>();
return v;
}
org.apache.parquet.hadoop.ParquetFileWriter$STATE endColumn() throws java.io.IOException
{
org.apache.parquet.hadoop.ParquetFileWriter$STATE v, v;
v := @this: org.apache.parquet.hadoop.ParquetFileWriter$STATE;
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE error()>();
return v;
}
org.apache.parquet.hadoop.ParquetFileWriter$STATE endBlock() throws java.io.IOException
{
org.apache.parquet.hadoop.ParquetFileWriter$STATE v, v;
v := @this: org.apache.parquet.hadoop.ParquetFileWriter$STATE;
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE error()>();
return v;
}
org.apache.parquet.hadoop.ParquetFileWriter$STATE end() throws java.io.IOException
{
org.apache.parquet.hadoop.ParquetFileWriter$STATE v, v;
v := @this: org.apache.parquet.hadoop.ParquetFileWriter$STATE;
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE error()>();
return v;
}
private final org.apache.parquet.hadoop.ParquetFileWriter$STATE error() throws java.io.IOException
{
java.io.IOException v;
java.lang.String v, v;
org.apache.parquet.hadoop.ParquetFileWriter$STATE v;
v := @this: org.apache.parquet.hadoop.ParquetFileWriter$STATE;
v = new java.io.IOException;
v = virtualinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE: java.lang.String name()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("The file being written is in an invalid state. Probably caused by an error thrown previously. Current state: \u0001");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
}
private static org.apache.parquet.hadoop.ParquetFileWriter$STATE[] $values()
{
org.apache.parquet.hadoop.ParquetFileWriter$STATE[] v;
org.apache.parquet.hadoop.ParquetFileWriter$STATE v, v, v, v, v;
v = newarray (org.apache.parquet.hadoop.ParquetFileWriter$STATE)[5];
v = <org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE NOT_STARTED>;
v[0] = v;
v = <org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE STARTED>;
v[1] = v;
v = <org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE BLOCK>;
v[2] = v;
v = <org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE COLUMN>;
v[3] = v;
v = <org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE ENDED>;
v[4] = v;
return v;
}
static void <clinit>()
{
org.apache.parquet.hadoop.ParquetFileWriter$STATE$2 v;
org.apache.parquet.hadoop.ParquetFileWriter$STATE$1 v;
org.apache.parquet.hadoop.ParquetFileWriter$STATE$4 v;
org.apache.parquet.hadoop.ParquetFileWriter$STATE$3 v;
org.apache.parquet.hadoop.ParquetFileWriter$STATE[] v;
org.apache.parquet.hadoop.ParquetFileWriter$STATE v;
v = new org.apache.parquet.hadoop.ParquetFileWriter$STATE$1;
specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE$1: void <init>(java.lang.String,int)>("NOT_STARTED", 0);
<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE NOT_STARTED> = v;
v = new org.apache.parquet.hadoop.ParquetFileWriter$STATE$2;
specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE$2: void <init>(java.lang.String,int)>("STARTED", 1);
<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE STARTED> = v;
v = new org.apache.parquet.hadoop.ParquetFileWriter$STATE$3;
specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE$3: void <init>(java.lang.String,int)>("BLOCK", 2);
<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE BLOCK> = v;
v = new org.apache.parquet.hadoop.ParquetFileWriter$STATE$4;
specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE$4: void <init>(java.lang.String,int)>("COLUMN", 3);
<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE COLUMN> = v;
v = new org.apache.parquet.hadoop.ParquetFileWriter$STATE;
specialinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter$STATE: void <init>(java.lang.String,int)>("ENDED", 4);
<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE ENDED> = v;
v = staticinvoke <org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE[] $values()>();
<org.apache.parquet.hadoop.ParquetFileWriter$STATE: org.apache.parquet.hadoop.ParquetFileWriter$STATE[] $VALUES> = v;
return;
}
}