public class org.apache.hadoop.hive.kafka.KafkaSerDe extends org.apache.hadoop.hive.serde.AbstractSerDe
{
private org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe;
private org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector delegateDeserializerOI;
private org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector delegateSerializerOI;
private org.apache.hadoop.hive.serde.objectinspector.ObjectInspector objectInspector;
private final java.util.List columnNames;
private org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter bytesConverter;
private int metadataStartIndex;
public void <init>()
{
org.apache.hadoop.hive.kafka.KafkaSerDe v;
java.util.ArrayList v;
v := @this: org.apache.hadoop.hive.kafka.KafkaSerDe;
specialinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: void <init>()>();
v = new java.util.ArrayList;
specialinvoke v.<java.util.ArrayList: void <init>()>();
v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.List columnNames> = v;
return;
}
public void initialize(org.apache.hadoop.conf.Configuration, java.util.Properties, java.util.Properties) throws org.apache.hadoop.hive.serde.SerDeException
{
org.apache.hadoop.hive.kafka.KafkaSerDe$BytesWritableConverter v;
org.apache.hadoop.hive.serde.objectinspector.StandardStructObjectInspector v;
org.apache.hadoop.hive.serde.AbstractSerDe v, v, v, v, v, v, v;
org.apache.hadoop.conf.Configuration v;
java.util.stream.Collector v, v;
org.apache.hadoop.hive.serde.objectinspector.ObjectInspector v, v, v;
org.apache.avro.Schema v;
java.util.stream.Stream v, v, v, v;
org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter v;
org.apache.hadoop.hive.kafka.KafkaSerDe v;
java.util.ArrayList v;
java.lang.String v, v, v, v, v, v, v;
java.util.Properties v, v, v, v, v;
org.apache.hadoop.hive.serde.SerDeException v;
org.apache.hadoop.hive.kafka.MetadataColumn[] v;
boolean v, v, v;
org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector v, v;
java.util.List v, v, v, v, v, v, v, v, v;
org.apache.hadoop.hive.kafka.KafkaSerDe$TextBytesConverter v;
java.util.function.Function v, v;
org.apache.hadoop.hive.kafka.KafkaTableProperties v, v;
int v, v, v, v;
org.apache.hadoop.hive.serde.avro.AvroSerdeUtils$AvroTableProperties v;
org.slf4j.Logger v;
java.lang.Class v, v, v;
java.lang.Object v, v;
v := @this: org.apache.hadoop.hive.kafka.KafkaSerDe;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.util.Properties;
v := @parameter: java.util.Properties;
specialinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: void initialize(org.apache.hadoop.conf.Configuration,java.util.Properties,java.util.Properties)>(v, v, v);
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.Properties properties>;
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties SERDE_CLASS_NAME>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties SERDE_CLASS_NAME>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getDefaultValue()>();
v = virtualinvoke v.<java.util.Properties: java.lang.String getProperty(java.lang.String,java.lang.String)>(v, v);
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils: org.apache.hadoop.hive.serde.AbstractSerDe createDelegate(java.lang.String)>(v);
v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe> = v;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe>;
virtualinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: void initialize(org.apache.hadoop.conf.Configuration,java.util.Properties,java.util.Properties)>(v, v, v);
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: org.apache.hadoop.hive.serde.objectinspector.ObjectInspector getObjectInspector()>();
v = v instanceof org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector;
if v != 0 goto label;
v = new org.apache.hadoop.hive.serde.SerDeException;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: org.apache.hadoop.hive.serde.objectinspector.ObjectInspector getObjectInspector()>();
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Was expecting Struct Object Inspector but have \u0001");
specialinvoke v.<org.apache.hadoop.hive.serde.SerDeException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: org.apache.hadoop.hive.serde.objectinspector.ObjectInspector getObjectInspector()>();
v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector delegateDeserializerOI> = v;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.List columnNames>;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector delegateDeserializerOI>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector: java.util.List getAllStructFieldRefs()>();
v = interfaceinvoke v.<java.util.List: java.util.stream.Stream stream()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaSerDe$getFieldName__34: java.util.function.Function bootstrap$()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.util.stream.Stream map(java.util.function.Function)>(v);
v = staticinvoke <java.util.stream.Collectors: java.util.stream.Collector toList()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.lang.Object collect(java.util.stream.Collector)>(v);
interfaceinvoke v.<java.util.List: boolean addAll(java.util.Collection)>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.List columnNames>;
v = <org.apache.hadoop.hive.kafka.MetadataColumn: java.util.List KAFKA_METADATA_COLUMN_NAMES>;
interfaceinvoke v.<java.util.List: boolean addAll(java.util.Collection)>(v);
v = new java.util.ArrayList;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.List columnNames>;
v = interfaceinvoke v.<java.util.List: int size()>();
specialinvoke v.<java.util.ArrayList: void <init>(int)>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector delegateDeserializerOI>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector: java.util.List getAllStructFieldRefs()>();
v = interfaceinvoke v.<java.util.List: java.util.stream.Stream stream()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaSerDe$getFieldObjectInspector__35: java.util.function.Function bootstrap$()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.util.stream.Stream map(java.util.function.Function)>(v);
v = staticinvoke <java.util.stream.Collectors: java.util.stream.Collector toList()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.lang.Object collect(java.util.stream.Collector)>(v);
interfaceinvoke v.<java.util.List: boolean addAll(java.util.Collection)>(v);
v = <org.apache.hadoop.hive.kafka.MetadataColumn: java.util.List KAFKA_METADATA_INSPECTORS>;
interfaceinvoke v.<java.util.List: boolean addAll(java.util.Collection)>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.List columnNames>;
v = staticinvoke <org.apache.hadoop.hive.serde.objectinspector.ObjectInspectorFactory: org.apache.hadoop.hive.serde.objectinspector.StandardStructObjectInspector getStandardStructObjectInspector(java.util.List,java.util.List)>(v, v);
v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.objectinspector.ObjectInspector objectInspector> = v;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.List columnNames>;
v = interfaceinvoke v.<java.util.List: int size()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn[] values()>();
v = lengthof v;
v = v - v;
v.<org.apache.hadoop.hive.kafka.KafkaSerDe: int metadataStartIndex> = v;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: java.lang.Class getSerializedClass()>();
if v != class "Lorg/apache/hadoop/io/Text;" goto label;
v = new org.apache.hadoop.hive.kafka.KafkaSerDe$TextBytesConverter;
specialinvoke v.<org.apache.hadoop.hive.kafka.KafkaSerDe$TextBytesConverter: void <init>()>();
v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter bytesConverter> = v;
goto label;
label:
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: java.lang.Class getSerializedClass()>();
if v != class "Lorg/apache/hadoop/hive/serde2/avro/AvroGenericRecordWritable;" goto label;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.Properties properties>;
v = <org.apache.hadoop.hive.serde.avro.AvroSerdeUtils$AvroTableProperties: org.apache.hadoop.hive.serde.avro.AvroSerdeUtils$AvroTableProperties SCHEMA_LITERAL>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.avro.AvroSerdeUtils$AvroTableProperties: java.lang.String getPropName()>();
v = virtualinvoke v.<java.util.Properties: java.lang.String getProperty(java.lang.String,java.lang.String)>(v, "");
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v != 0 goto label;
v = 1;
goto label;
label:
v = 0;
label:
staticinvoke <com.google.common.base.Preconditions: void checkArgument(boolean,java.lang.Object)>(v, "Avro Schema is empty Can not go further");
v = staticinvoke <org.apache.hadoop.hive.serde.avro.AvroSerdeUtils: org.apache.avro.Schema getSchemaFor(java.lang.String)>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.slf4j.Logger log>;
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String,java.lang.Object)>("Building Avro Reader with schema {}", v);
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.Properties properties>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter getByteConverterForAvroDelegate(org.apache.avro.Schema,java.util.Properties)>(v, v);
v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter bytesConverter> = v;
goto label;
label:
v = new org.apache.hadoop.hive.kafka.KafkaSerDe$BytesWritableConverter;
specialinvoke v.<org.apache.hadoop.hive.kafka.KafkaSerDe$BytesWritableConverter: void <init>()>();
v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter bytesConverter> = v;
label:
return;
}
org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter getByteConverterForAvroDelegate(org.apache.avro.Schema, java.util.Properties) throws org.apache.hadoop.hive.serde.SerDeException
{
java.lang.Integer v;
org.apache.hadoop.hive.kafka.KafkaSerDe v;
int[] v;
int v, v, v, v;
java.lang.String v, v, v, v, v, v, v;
java.util.Properties v;
org.apache.avro.Schema v;
org.apache.hadoop.hive.serde.avro.AvroSerdeUtils$AvroTableProperties v, v;
java.lang.NumberFormatException v;
org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverterType v, v;
org.apache.hadoop.hive.serde.SerDeException v, v;
org.apache.hadoop.hive.kafka.KafkaSerDe$AvroBytesConverter v;
org.apache.hadoop.hive.kafka.KafkaSerDe$AvroSkipBytesConverter v;
v := @this: org.apache.hadoop.hive.kafka.KafkaSerDe;
v := @parameter: org.apache.avro.Schema;
v := @parameter: java.util.Properties;
v = <org.apache.hadoop.hive.serde.avro.AvroSerdeUtils$AvroTableProperties: org.apache.hadoop.hive.serde.avro.AvroSerdeUtils$AvroTableProperties AVRO_SERDE_TYPE>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.avro.AvroSerdeUtils$AvroTableProperties: java.lang.String getPropName()>();
v = <org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverterType: org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverterType NONE>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverterType: java.lang.String toString()>();
v = virtualinvoke v.<java.util.Properties: java.lang.String getProperty(java.lang.String,java.lang.String)>(v, v);
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverterType: org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverterType fromString(java.lang.String)>(v);
v = <org.apache.hadoop.hive.kafka.KafkaSerDe$1: int[] $SwitchMap$org$apache$hadoop$hive$kafka$KafkaSerDe$BytesConverterType>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverterType: int ordinal()>();
v = v[v];
lookupswitch(v)
{
case 1: goto label;
case 2: goto label;
default: goto label;
};
label:
v = <org.apache.hadoop.hive.serde.avro.AvroSerdeUtils$AvroTableProperties: org.apache.hadoop.hive.serde.avro.AvroSerdeUtils$AvroTableProperties AVRO_SERDE_SKIP_BYTES>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.avro.AvroSerdeUtils$AvroTableProperties: java.lang.String getPropName()>();
staticinvoke <java.lang.Integer: java.lang.Integer valueOf(int)>(0);
label:
v = virtualinvoke v.<java.util.Properties: java.lang.String getProperty(java.lang.String)>(v);
v = staticinvoke <java.lang.Integer: int parseInt(java.lang.String)>(v);
v = staticinvoke <java.lang.Integer: java.lang.Integer valueOf(int)>(v);
label:
goto label;
label:
v := @caughtexception;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Value of \u could not be parsed into an integer properly.");
v = new org.apache.hadoop.hive.serde.SerDeException;
specialinvoke v.<org.apache.hadoop.hive.serde.SerDeException: void <init>(java.lang.String,java.lang.Throwable)>(v, v);
throw v;
label:
v = new org.apache.hadoop.hive.kafka.KafkaSerDe$AvroSkipBytesConverter;
v = virtualinvoke v.<java.lang.Integer: int intValue()>();
specialinvoke v.<org.apache.hadoop.hive.kafka.KafkaSerDe$AvroSkipBytesConverter: void <init>(org.apache.avro.Schema,int)>(v, v);
return v;
label:
v = new org.apache.hadoop.hive.kafka.KafkaSerDe$AvroBytesConverter;
specialinvoke v.<org.apache.hadoop.hive.kafka.KafkaSerDe$AvroBytesConverter: void <init>(org.apache.avro.Schema)>(v);
return v;
label:
v = new org.apache.hadoop.hive.serde.SerDeException;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Value of \u was invalid.");
specialinvoke v.<org.apache.hadoop.hive.serde.SerDeException: void <init>(java.lang.String)>(v);
throw v;
catch java.lang.NumberFormatException from label to label with label;
}
public java.lang.Class getSerializedClass()
{
java.lang.Class v;
org.apache.hadoop.hive.kafka.KafkaSerDe v;
org.apache.hadoop.hive.serde.AbstractSerDe v;
v := @this: org.apache.hadoop.hive.kafka.KafkaSerDe;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: java.lang.Class getSerializedClass()>();
return v;
}
public org.apache.hadoop.io.Writable serialize(java.lang.Object, org.apache.hadoop.hive.serde.objectinspector.ObjectInspector) throws org.apache.hadoop.hive.serde.SerDeException
{
byte[] v, v, v;
org.apache.hadoop.hive.kafka.MetadataColumn[] v;
org.apache.hadoop.io.Writable v;
org.apache.hadoop.hive.serde.AbstractSerDe v;
byte v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
org.apache.hadoop.hive.serde.objectinspector.ObjectInspector v;
boolean v;
org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector v, v;
java.util.List v, v;
org.apache.hadoop.hive.kafka.MetadataColumn v, v, v;
long v, v, v;
org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter v;
org.apache.hadoop.hive.serde.objectinspector.primitive.AbstractPrimitiveWritableObjectInspector v, v, v;
org.apache.hadoop.hive.kafka.KafkaSerDe v;
org.apache.hadoop.hive.serde.objectinspector.primitive.WritableBinaryObjectInspector v;
int v, v, v, v, v, v, v, v;
java.lang.String v, v, v;
java.lang.Class v, v;
java.lang.Object v, v, v, v, v;
org.apache.hadoop.hive.serde.SerDeException v, v;
org.apache.hadoop.hive.kafka.KafkaSerDe$SubStructObjectInspector v;
v := @this: org.apache.hadoop.hive.kafka.KafkaSerDe;
v := @parameter: java.lang.Object;
v := @parameter: org.apache.hadoop.hive.serde.objectinspector.ObjectInspector;
v = v instanceof org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector;
if v != 0 goto label;
v = new org.apache.hadoop.hive.serde.SerDeException;
v = class "Lorg/apache/hadoop/hive/serde2/objectinspector/StructObjectInspector;";
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.lang.String)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Object inspector has to be \u but got \u0001");
specialinvoke v.<org.apache.hadoop.hive.serde.SerDeException: void <init>(java.lang.String)>(v);
throw v;
label:
v = virtualinvoke v.<org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector: java.util.List getStructFieldsDataAsList(java.lang.Object)>(v);
v = interfaceinvoke v.<java.util.List: int size()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn[] values()>();
v = lengthof v;
v = v - v;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector delegateSerializerOI>;
if v != null goto label;
v = new org.apache.hadoop.hive.kafka.KafkaSerDe$SubStructObjectInspector;
specialinvoke v.<org.apache.hadoop.hive.kafka.KafkaSerDe$SubStructObjectInspector: void <init>(org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector,int)>(v, v);
v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector delegateSerializerOI> = v;
label:
v = interfaceinvoke v.<java.util.List: java.util.List subList(int,int)>(0, v);
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = v + 1;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = v + 2;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = v + 3;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn OFFSET>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.serde.objectinspector.primitive.AbstractPrimitiveWritableObjectInspector getObjectInspector()>();
v = staticinvoke <org.apache.hadoop.hive.serde.objectinspector.primitive.PrimitiveObjectInspectorUtils: long getLong(java.lang.Object,org.apache.hadoop.hive.serde.objectinspector.PrimitiveObjectInspector)>(v, v);
v = v cmp -1L;
if v == 0 goto label;
v = new org.apache.hadoop.hive.serde.SerDeException;
specialinvoke v.<org.apache.hadoop.hive.serde.SerDeException: void <init>(java.lang.String)>("Can not insert values into `__offset` column, has to be [-1]");
throw v;
label:
if v != null goto label;
v = null;
goto label;
label:
v = <org.apache.hadoop.hive.serde.objectinspector.primitive.PrimitiveObjectInspectorFactory: org.apache.hadoop.hive.serde.objectinspector.primitive.WritableBinaryObjectInspector writableBinaryObjectInspector>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.objectinspector.primitive.WritableBinaryObjectInspector: byte[] getPrimitiveJavaObject(java.lang.Object)>(v);
label:
v = v;
if v != null goto label;
v = -1L;
goto label;
label:
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn TIMESTAMP>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.serde.objectinspector.primitive.AbstractPrimitiveWritableObjectInspector getObjectInspector()>();
v = staticinvoke <org.apache.hadoop.hive.serde.objectinspector.primitive.PrimitiveObjectInspectorUtils: long getLong(java.lang.Object,org.apache.hadoop.hive.serde.objectinspector.PrimitiveObjectInspector)>(v, v);
label:
v = v;
if v != null goto label;
v = (int) -1;
v = v;
goto label;
label:
v = <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn PARTITION>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.serde.objectinspector.primitive.AbstractPrimitiveWritableObjectInspector getObjectInspector()>();
v = staticinvoke <org.apache.hadoop.hive.serde.objectinspector.primitive.PrimitiveObjectInspectorUtils: int getInt(java.lang.Object,org.apache.hadoop.hive.serde.objectinspector.PrimitiveObjectInspector)>(v, v);
label:
v = new org.apache.hadoop.hive.kafka.KafkaWritable;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter bytesConverter>;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe>;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector delegateSerializerOI>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: org.apache.hadoop.io.Writable serialize(java.lang.Object,org.apache.hadoop.hive.serde.objectinspector.ObjectInspector)>(v, v);
v = interfaceinvoke v.<org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter: byte[] getBytes(org.apache.hadoop.io.Writable)>(v);
specialinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: void <init>(int,long,byte[],byte[])>(v, v, v, v);
return v;
}
public org.apache.hadoop.hive.serde.SerDeStats getSerDeStats()
{
org.apache.hadoop.hive.serde.SerDeStats v;
org.apache.hadoop.hive.kafka.KafkaSerDe v;
org.apache.hadoop.hive.serde.AbstractSerDe v;
v := @this: org.apache.hadoop.hive.kafka.KafkaSerDe;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: org.apache.hadoop.hive.serde.SerDeStats getSerDeStats()>();
return v;
}
public java.lang.Object deserialize(org.apache.hadoop.io.Writable) throws org.apache.hadoop.hive.serde.SerDeException
{
java.util.List v;
java.lang.Object[] v;
int v;
org.apache.hadoop.io.Writable v;
org.apache.hadoop.hive.kafka.KafkaSerDe v;
v := @this: org.apache.hadoop.hive.kafka.KafkaSerDe;
v := @parameter: org.apache.hadoop.io.Writable;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.List columnNames>;
v = interfaceinvoke v.<java.util.List: int size()>();
v = newarray (java.lang.Object)[v];
virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaSerDe: void deserializeKWritable(org.apache.hadoop.hive.kafka.KafkaWritable,java.lang.Object[])>(v, v);
return v;
}
void deserializeKWritable(org.apache.hadoop.hive.kafka.KafkaWritable, java.lang.Object[]) throws org.apache.hadoop.hive.serde.SerDeException
{
byte[] v;
java.lang.Object[] v;
org.apache.hadoop.io.Writable v, v;
org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter v;
org.apache.hadoop.hive.kafka.KafkaSerDe v;
org.apache.hadoop.hive.serde.AbstractSerDe v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
int v, v, v, v;
org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector v, v;
java.util.List v, v, v;
org.apache.hadoop.hive.kafka.MetadataColumn v;
java.lang.Object v, v, v, v;
org.apache.hadoop.hive.serde.objectinspector.StructField v;
v := @this: org.apache.hadoop.hive.kafka.KafkaSerDe;
v := @parameter: org.apache.hadoop.hive.kafka.KafkaWritable;
v := @parameter: java.lang.Object[];
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.AbstractSerDe delegateSerDe>;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter bytesConverter>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] getValue()>();
v = interfaceinvoke v.<org.apache.hadoop.hive.kafka.KafkaSerDe$BytesConverter: org.apache.hadoop.io.Writable getWritable(byte[])>(v);
v = virtualinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: java.lang.Object deserialize(org.apache.hadoop.io.Writable)>(v);
v = 0;
label:
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: int metadataStartIndex>;
if v >= v goto label;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector delegateDeserializerOI>;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector delegateDeserializerOI>;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.List columnNames>;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = virtualinvoke v.<org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector: org.apache.hadoop.hive.serde.objectinspector.StructField getStructFieldRef(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.hadoop.hive.serde.objectinspector.StructObjectInspector: java.lang.Object getStructFieldData(java.lang.Object,org.apache.hadoop.hive.serde.objectinspector.StructField)>(v, v);
v[v] = v;
v = v + 1;
goto label;
label:
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: int metadataStartIndex>;
label:
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.List columnNames>;
v = interfaceinvoke v.<java.util.List: int size()>();
if v >= v goto label;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: java.util.List columnNames>;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = staticinvoke <org.apache.hadoop.hive.kafka.MetadataColumn: org.apache.hadoop.hive.kafka.MetadataColumn forName(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: org.apache.hadoop.io.Writable getHiveWritable(org.apache.hadoop.hive.kafka.MetadataColumn)>(v);
v[v] = v;
v = v + 1;
goto label;
label:
return;
}
public org.apache.hadoop.hive.serde.objectinspector.ObjectInspector getObjectInspector()
{
org.apache.hadoop.hive.serde.objectinspector.ObjectInspector v;
org.apache.hadoop.hive.kafka.KafkaSerDe v;
v := @this: org.apache.hadoop.hive.kafka.KafkaSerDe;
v = v.<org.apache.hadoop.hive.kafka.KafkaSerDe: org.apache.hadoop.hive.serde.objectinspector.ObjectInspector objectInspector>;
return v;
}
}