final enum class org.apache.hadoop.hive.kafka.MetadataColumn extends java.lang.Enum
{
public static final enum org.apache.hadoop.hive.kafka.MetadataColumn OFFSET;
public static final enum org.apache.hadoop.hive.kafka.MetadataColumn PARTITION;
public static final enum org.apache.hadoop.hive.kafka.MetadataColumn KEY;
public static final enum org.apache.hadoop.hive.kafka.MetadataColumn TIMESTAMP;
private static final java.util.List KAFKA_METADATA_COLUMNS;
static final java.util.List KAFKA_METADATA_INSPECTORS;
static final java.util.List KAFKA_METADATA_COLUMN_NAMES;
private final java.lang.String name;
private final org.apache.hadoop.hive.serde.typeinfo.TypeInfo typeInfo;
private static final java.util.Map NAMES_MAP;
private static final org.apache.hadoop.hive.kafka.MetadataColumn[] $VALUES;
public static org.apache.hadoop.hive.kafka.MetadataColumn[] values()
{
java.lang.Object v;
org.apache.hadoop.hive.kafka.MetadataColumn[] v;
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn[] $VALUES>;
v = virtualinvoke v.<java.lang.Object: java.lang.Object clone()>();
return v;
}
public static org.apache.hadoop.hive.kafka.MetadataColumn valueOf(java.lang.String)
{
java.lang.String v;
java.lang.Enum v;
v := @parameter: java.lang.String;
v = staticinvoke <java.lang.Enum: java.lang.Enum valueOf(java.lang.Class,java.lang.String)>(class "Lorg/apache/hadoop/hive/kafka/MetadataColumn;", v);
return v;
}
private void <init>(java.lang.String, int, java.lang.String, org.apache.hadoop.hive.serde.typeinfo.TypeInfo)
{
org.apache.hadoop.hive.serde.typeinfo.TypeInfo v;
org.apache.hadoop.hive.kafka.MetadataColumn v;
int v;
java.lang.String v, v;
v := @this: org.apache.hadoop.hive.kafka.MetadataColumn;
v := @parameter: java.lang.String;
v := @parameter: int;
v := @parameter: java.lang.String;
v := @parameter: org.apache.hadoop.hive.serde.typeinfo.TypeInfo;
specialinvoke v.<java.lang.Enum: void <init>(java.lang.String,int)>(v, v);
v.<org.apache.hadoop.hive.kafka.MetadataColumn: java.lang.String name> = v;
v.<org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.serde.typeinfo.TypeInfo typeInfo> = v;
return;
}
public java.lang.String getName()
{
java.lang.String v;
org.apache.hadoop.hive.kafka.MetadataColumn v;
v := @this: org.apache.hadoop.hive.kafka.MetadataColumn;
v = v.<org.apache.hadoop.hive.kafka.MetadataColumn: java.lang.String name>;
return v;
}
public org.apache.hadoop.hive.serde.objectinspector.primitive.AbstractPrimitiveWritableObjectInspector getObjectInspector()
{
org.apache.hadoop.hive.serde.typeinfo.TypeInfo v;
org.apache.hadoop.hive.kafka.MetadataColumn v;
java.lang.String v;
org.apache.hadoop.hive.serde.typeinfo.PrimitiveTypeInfo v;
org.apache.hadoop.hive.serde.objectinspector.primitive.AbstractPrimitiveWritableObjectInspector v;
v := @this: org.apache.hadoop.hive.kafka.MetadataColumn;
v = v.<org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.serde.typeinfo.TypeInfo typeInfo>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.typeinfo.TypeInfo: java.lang.String getTypeName()>();
v = staticinvoke <org.apache.hadoop.hive.serde.typeinfo.TypeInfoFactory: org.apache.hadoop.hive.serde.typeinfo.PrimitiveTypeInfo getPrimitiveTypeInfo(java.lang.String)>(v);
v = staticinvoke <org.apache.hadoop.hive.serde.objectinspector.primitive.PrimitiveObjectInspectorFactory: org.apache.hadoop.hive.serde.objectinspector.primitive.AbstractPrimitiveWritableObjectInspector getPrimitiveWritableObjectInspector(org.apache.hadoop.hive.serde.typeinfo.PrimitiveTypeInfo)>(v);
return v;
}
static org.apache.hadoop.hive.kafka.MetadataColumn forName(java.lang.String)
{
java.lang.Object v;
java.lang.String v;
java.util.Map v;
v := @parameter: java.lang.String;
v = <org.apache.hadoop.hive.kafka.MetadataColumn: java.util.Map NAMES_MAP>;
v = interfaceinvoke v.<java.util.Map: java.lang.Object get(java.lang.Object)>(v);
return v;
}
private static org.apache.hadoop.hive.kafka.MetadataColumn[] $values()
{
org.apache.hadoop.hive.kafka.MetadataColumn[] v;
org.apache.hadoop.hive.kafka.MetadataColumn v, v, v, v;
v = newarray (org.apache.hadoop.hive.kafka.MetadataColumn)[4];
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn OFFSET>;
v[0] = v;
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn PARTITION>;
v[1] = v;
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn KEY>;
v[2] = v;
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn TIMESTAMP>;
v[3] = v;
return v;
}
static void <clinit>()
{
org.apache.hadoop.hive.kafka.MetadataColumn[] v, v, v;
java.util.stream.Collector v, v, v;
org.apache.hadoop.hive.serde.typeinfo.PrimitiveTypeInfo v, v, v, v;
java.util.List v, v, v;
java.util.stream.Stream v, v, v, v, v;
org.apache.hadoop.hive.kafka.MetadataColumn v, v, v, v, v, v, v, v;
java.util.function.Function v, v, v, v;
java.lang.Object v, v, v;
v = new org.apache.hadoop.hive.kafka.MetadataColumn;
v = <org.apache.hadoop.hive.serde.typeinfo.TypeInfoFactory: org.apache.hadoop.hive.serde.typeinfo.PrimitiveTypeInfo longTypeInfo>;
specialinvoke v.<org.apache.hadoop.hive.kafka.MetadataColumn: void <init>(java.lang.String,int,java.lang.String,org.apache.hadoop.hive.serde.typeinfo.TypeInfo)>("OFFSET", 0, "__offset", v);
<org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn OFFSET> = v;
v = new org.apache.hadoop.hive.kafka.MetadataColumn;
v = <org.apache.hadoop.hive.serde.typeinfo.TypeInfoFactory: org.apache.hadoop.hive.serde.typeinfo.PrimitiveTypeInfo intTypeInfo>;
specialinvoke v.<org.apache.hadoop.hive.kafka.MetadataColumn: void <init>(java.lang.String,int,java.lang.String,org.apache.hadoop.hive.serde.typeinfo.TypeInfo)>("PARTITION", 1, "__partition", v);
<org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn PARTITION> = v;
v = new org.apache.hadoop.hive.kafka.MetadataColumn;
v = <org.apache.hadoop.hive.serde.typeinfo.TypeInfoFactory: org.apache.hadoop.hive.serde.typeinfo.PrimitiveTypeInfo binaryTypeInfo>;
specialinvoke v.<org.apache.hadoop.hive.kafka.MetadataColumn: void <init>(java.lang.String,int,java.lang.String,org.apache.hadoop.hive.serde.typeinfo.TypeInfo)>("KEY", 2, "__key", v);
<org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn KEY> = v;
v = new org.apache.hadoop.hive.kafka.MetadataColumn;
v = <org.apache.hadoop.hive.serde.typeinfo.TypeInfoFactory: org.apache.hadoop.hive.serde.typeinfo.PrimitiveTypeInfo longTypeInfo>;
specialinvoke v.<org.apache.hadoop.hive.kafka.MetadataColumn: void <init>(java.lang.String,int,java.lang.String,org.apache.hadoop.hive.serde.typeinfo.TypeInfo)>("TIMESTAMP", 3, "__timestamp", v);
<org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn TIMESTAMP> = v;
v = staticinvoke <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn[] $values()>();
<org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn[] $VALUES> = v;
v = newarray (org.apache.hadoop.hive.kafka.MetadataColumn)[4];
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn KEY>;
v[0] = v;
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn PARTITION>;
v[1] = v;
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn OFFSET>;
v[2] = v;
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn TIMESTAMP>;
v[3] = v;
v = staticinvoke <java.util.Arrays: java.util.List asList(java.lang.Object[])>(v);
<org.apache.hadoop.hive.kafka.MetadataColumn: java.util.List KAFKA_METADATA_COLUMNS> = v;
v = <org.apache.hadoop.hive.kafka.MetadataColumn: java.util.List KAFKA_METADATA_COLUMNS>;
v = interfaceinvoke v.<java.util.List: java.util.stream.Stream stream()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.MetadataColumn$getObjectInspector__59: java.util.function.Function bootstrap$()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.util.stream.Stream map(java.util.function.Function)>(v);
v = staticinvoke <java.util.stream.Collectors: java.util.stream.Collector toList()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.lang.Object collect(java.util.stream.Collector)>(v);
<org.apache.hadoop.hive.kafka.MetadataColumn: java.util.List KAFKA_METADATA_INSPECTORS> = v;
v = <org.apache.hadoop.hive.kafka.MetadataColumn: java.util.List KAFKA_METADATA_COLUMNS>;
v = interfaceinvoke v.<java.util.List: java.util.stream.Stream stream()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.MetadataColumn$getName__60: java.util.function.Function bootstrap$()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.util.stream.Stream map(java.util.function.Function)>(v);
v = staticinvoke <java.util.stream.Collectors: java.util.stream.Collector toList()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.lang.Object collect(java.util.stream.Collector)>(v);
<org.apache.hadoop.hive.kafka.MetadataColumn: java.util.List KAFKA_METADATA_COLUMN_NAMES> = v;
v = staticinvoke <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn[] values()>();
v = staticinvoke <java.util.Arrays: java.util.stream.Stream stream(java.lang.Object[])>(v);
v = staticinvoke <org.apache.hadoop.hive.kafka.MetadataColumn$getName__61: java.util.function.Function bootstrap$()>();
v = staticinvoke <java.util.function.Function: java.util.function.Function identity()>();
v = staticinvoke <java.util.stream.Collectors: java.util.stream.Collector toMap(java.util.function.Function,java.util.function.Function)>(v, v);
v = interfaceinvoke v.<java.util.stream.Stream: java.lang.Object collect(java.util.stream.Collector)>(v);
<org.apache.hadoop.hive.kafka.MetadataColumn: java.util.Map NAMES_MAP> = v;
return;
}
}