class org.apache.hadoop.hive.kafka.KafkaJsonSerDe extends org.apache.hadoop.hive.serde.AbstractSerDe
{
private static final java.lang.ThreadLocal TS_PARSER;
private static final java.util.function.Function TYPEINFO_TO_OI;
private org.apache.hadoop.hive.serde.objectinspector.ObjectInspector inspector;
private final com.fasterxml.jackson.databind.ObjectMapper mapper;
private long rowCount;
private long rawDataSize;
void <init>()
{
com.fasterxml.jackson.databind.ObjectMapper v;
org.apache.hadoop.hive.kafka.KafkaJsonSerDe v;
v := @this: org.apache.hadoop.hive.kafka.KafkaJsonSerDe;
specialinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: void <init>()>();
v = new com.fasterxml.jackson.databind.ObjectMapper;
specialinvoke v.<com.fasterxml.jackson.databind.ObjectMapper: void <init>()>();
v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: com.fasterxml.jackson.databind.ObjectMapper mapper> = v;
v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: long rowCount> = 0L;
v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: long rawDataSize> = 0L;
return;
}
public void initialize(org.apache.hadoop.conf.Configuration, java.util.Properties, java.util.Properties) throws org.apache.hadoop.hive.serde.SerDeException
{
org.apache.hadoop.hive.kafka.KafkaJsonSerDe v;
java.util.Properties v, v;
org.apache.hadoop.hive.serde.objectinspector.StandardStructObjectInspector v;
java.util.function.Function v;
java.util.List v, v;
java.util.stream.Stream v, v;
org.apache.hadoop.conf.Configuration v;
java.lang.Object v;
java.util.stream.Collector v;
v := @this: org.apache.hadoop.hive.kafka.KafkaJsonSerDe;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.util.Properties;
v := @parameter: java.util.Properties;
specialinvoke v.<org.apache.hadoop.hive.serde.AbstractSerDe: void initialize(org.apache.hadoop.conf.Configuration,java.util.Properties,java.util.Properties)>(v, v, v);
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.util.List getColumnTypes()>();
v = interfaceinvoke v.<java.util.List: java.util.stream.Stream stream()>();
v = <org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.util.function.Function TYPEINFO_TO_OI>;
v = interfaceinvoke v.<java.util.stream.Stream: java.util.stream.Stream map(java.util.function.Function)>(v);
v = staticinvoke <java.util.stream.Collectors: java.util.stream.Collector toList()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.lang.Object collect(java.util.stream.Collector)>(v);
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.util.List getColumnNames()>();
v = staticinvoke <org.apache.hadoop.hive.serde.objectinspector.ObjectInspectorFactory: org.apache.hadoop.hive.serde.objectinspector.StandardStructObjectInspector getStandardStructObjectInspector(java.util.List,java.util.List)>(v, v);
v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: org.apache.hadoop.hive.serde.objectinspector.ObjectInspector inspector> = v;
return;
}
public java.lang.Class getSerializedClass()
{
org.apache.hadoop.hive.kafka.KafkaJsonSerDe v;
v := @this: org.apache.hadoop.hive.kafka.KafkaJsonSerDe;
return class "Lorg/apache/hadoop/hive/serde2/columnar/BytesRefWritable;";
}
public org.apache.hadoop.io.Writable serialize(java.lang.Object, org.apache.hadoop.hive.serde.objectinspector.ObjectInspector) throws org.apache.hadoop.hive.serde.SerDeException
{
org.apache.hadoop.hive.kafka.KafkaJsonSerDe v;
java.lang.Object v;
org.apache.hadoop.hive.serde.objectinspector.ObjectInspector v;
org.apache.hadoop.hive.serde.SerDeException v;
v := @this: org.apache.hadoop.hive.kafka.KafkaJsonSerDe;
v := @parameter: java.lang.Object;
v := @parameter: org.apache.hadoop.hive.serde.objectinspector.ObjectInspector;
v = new org.apache.hadoop.hive.serde.SerDeException;
specialinvoke v.<org.apache.hadoop.hive.serde.SerDeException: void <init>(java.lang.String)>("unimplemented");
throw v;
}
public org.apache.hadoop.hive.serde.SerDeStats getSerDeStats()
{
org.apache.hadoop.hive.kafka.KafkaJsonSerDe v;
org.apache.hadoop.hive.serde.SerDeStats v;
long v, v;
v := @this: org.apache.hadoop.hive.kafka.KafkaJsonSerDe;
v = new org.apache.hadoop.hive.serde.SerDeStats;
specialinvoke v.<org.apache.hadoop.hive.serde.SerDeStats: void <init>()>();
v = v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: long rawDataSize>;
virtualinvoke v.<org.apache.hadoop.hive.serde.SerDeStats: void setRawDataSize(long)>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: long rowCount>;
virtualinvoke v.<org.apache.hadoop.hive.serde.SerDeStats: void setRowCount(long)>(v);
return v;
}
public java.lang.Object deserialize(org.apache.hadoop.io.Writable) throws org.apache.hadoop.hive.serde.SerDeException
{
byte[] v;
org.apache.hadoop.hive.kafka.KafkaJsonSerDe v;
long v, v, v, v;
org.apache.hadoop.io.Writable v;
org.apache.hadoop.hive.serde.objectinspector.ObjectInspector$Category v;
java.util.ArrayList v;
int[] v;
java.util.Map v;
int v, v, v, v, v, v;
java.io.IOException v;
java.util.List v, v, v, v, v;
java.lang.Object v, v, v, v, v;
org.apache.hadoop.hive.serde.SerDeException v, v;
v := @this: org.apache.hadoop.hive.kafka.KafkaJsonSerDe;
v := @parameter: org.apache.hadoop.io.Writable;
label:
v = virtualinvoke v.<org.apache.hadoop.io.BytesWritable: byte[] getBytes()>();
v = specialinvoke v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.util.Map parseAsJson(byte[])>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: long rowCount>;
v = v + 1L;
v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: long rowCount> = v;
v = v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: long rawDataSize>;
v = virtualinvoke v.<org.apache.hadoop.io.BytesWritable: int getLength()>();
v = v + v;
v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: long rawDataSize> = v;
label:
goto label;
label:
v := @caughtexception;
v = new org.apache.hadoop.hive.serde.SerDeException;
specialinvoke v.<org.apache.hadoop.hive.serde.SerDeException: void <init>(java.lang.Throwable)>(v);
throw v;
label:
v = new java.util.ArrayList;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.util.List getColumnNames()>();
v = interfaceinvoke v.<java.util.List: int size()>();
specialinvoke v.<java.util.ArrayList: void <init>(int)>(v);
v = 0;
label:
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.util.List getColumnNames()>();
v = interfaceinvoke v.<java.util.List: int size()>();
if v >= v goto label;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.util.List getColumnNames()>();
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.util.List getColumnTypes()>();
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = interfaceinvoke v.<java.util.Map: java.lang.Object get(java.lang.Object)>(v);
if v != null goto label;
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(null);
goto label;
label:
v = <org.apache.hadoop.hive.kafka.KafkaJsonSerDe$1: int[] $SwitchMap$org$apache$hadoop$hive$serde2$objectinspector$ObjectInspector$Category>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.util.List getColumnTypes()>();
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = virtualinvoke v.<org.apache.hadoop.hive.serde.typeinfo.TypeInfo: org.apache.hadoop.hive.serde.objectinspector.ObjectInspector$Category getCategory()>();
v = virtualinvoke v.<org.apache.hadoop.hive.serde.objectinspector.ObjectInspector$Category: int ordinal()>();
v = v[v];
tableswitch(v)
{
case 1: goto label;
case 2: goto label;
case 3: goto label;
case 4: goto label;
case 5: goto label;
default: goto label;
};
label:
v = specialinvoke v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.lang.Object parseAsPrimitive(com.fasterxml.jackson.databind.JsonNode,org.apache.hadoop.hive.serde.typeinfo.TypeInfo)>(v, v);
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
goto label;
label:
v = new org.apache.hadoop.hive.serde.SerDeException;
specialinvoke v.<org.apache.hadoop.hive.serde.SerDeException: void <init>(java.lang.String)>("not supported yet");
throw v;
label:
v = v + 1;
goto label;
label:
return v;
catch java.io.IOException from label to label with label;
}
private java.lang.Object parseAsPrimitive(com.fasterxml.jackson.databind.JsonNode, org.apache.hadoop.hive.serde.typeinfo.TypeInfo) throws org.apache.hadoop.hive.serde.SerDeException
{
org.apache.hadoop.hive.kafka.KafkaJsonSerDe v;
java.time.ZonedDateTime v;
org.apache.hadoop.io.Text v;
org.apache.hadoop.io.LongWritable v;
java.math.BigDecimal v;
org.apache.hadoop.hive.common.type.HiveChar v;
int[] v;
com.fasterxml.jackson.databind.JsonNode v;
org.apache.hadoop.hive.serde.typeinfo.PrimitiveTypeInfo v;
org.apache.hadoop.hive.serde.io.ShortWritable v;
org.apache.hadoop.io.IntWritable v;
boolean v, v;
java.lang.ThreadLocal v, v;
org.apache.hadoop.hive.serde.io.HiveVarcharWritable v;
org.apache.hadoop.hive.serde.io.TimestampLocalTZWritable v;
java.time.Instant v;
java.time.ZoneId v;
org.apache.hadoop.io.BooleanWritable v;
float v;
org.apache.hadoop.hive.serde.objectinspector.PrimitiveObjectInspector$PrimitiveCategory v;
org.apache.hadoop.hive.serde.io.ByteWritable v;
long v, v, v;
org.apache.hadoop.hive.serde.io.HiveCharWritable v;
org.apache.hadoop.hive.common.type.HiveVarchar v;
short v;
int v, v, v, v, v, v;
java.lang.Boolean v;
java.lang.String v, v, v, v, v, v, v, v, v;
org.apache.hadoop.hive.serde.io.TimestampWritable v;
org.apache.hadoop.hive.serde.io.DoubleWritable v;
double v;
org.apache.hadoop.hive.common.type.TimestampTZ v;
org.apache.hadoop.hive.serde.typeinfo.TypeInfo v;
org.apache.hadoop.hive.common.type.HiveDecimal v;
java.lang.Object v, v;
org.apache.hadoop.hive.serde.SerDeException v;
org.apache.hadoop.hive.serde.io.HiveDecimalWritable v;
org.apache.hadoop.io.FloatWritable v;
v := @this: org.apache.hadoop.hive.kafka.KafkaJsonSerDe;
v := @parameter: com.fasterxml.jackson.databind.JsonNode;
v := @parameter: org.apache.hadoop.hive.serde.typeinfo.TypeInfo;
v = <org.apache.hadoop.hive.kafka.KafkaJsonSerDe$1: int[] $SwitchMap$org$apache$hadoop$hive$serde2$objectinspector$PrimitiveObjectInspector$PrimitiveCategory>;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.typeinfo.TypeInfo: java.lang.String getTypeName()>();
v = staticinvoke <org.apache.hadoop.hive.serde.typeinfo.TypeInfoFactory: org.apache.hadoop.hive.serde.typeinfo.PrimitiveTypeInfo getPrimitiveTypeInfo(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.hadoop.hive.serde.typeinfo.PrimitiveTypeInfo: org.apache.hadoop.hive.serde.objectinspector.PrimitiveObjectInspector$PrimitiveCategory getPrimitiveCategory()>();
v = virtualinvoke v.<org.apache.hadoop.hive.serde.objectinspector.PrimitiveObjectInspector$PrimitiveCategory: int ordinal()>();
v = v[v];
tableswitch(v)
{
case 1: goto label;
case 2: goto label;
case 3: goto label;
case 4: goto label;
case 5: goto label;
case 6: goto label;
case 7: goto label;
case 8: goto label;
case 9: goto label;
case 10: goto label;
case 11: goto label;
case 12: goto label;
case 13: goto label;
default: goto label;
};
label:
v = new org.apache.hadoop.hive.serde.io.TimestampWritable;
specialinvoke v.<org.apache.hadoop.hive.serde.io.TimestampWritable: void <init>()>();
v = <org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.lang.ThreadLocal TS_PARSER>;
v = virtualinvoke v.<java.lang.ThreadLocal: java.lang.Object get()>();
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: java.lang.String textValue()>();
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatter: long parseMillis(java.lang.String)>(v);
virtualinvoke v.<org.apache.hadoop.hive.serde.io.TimestampWritable: void setTime(long)>(v);
return v;
label:
v = <org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.lang.ThreadLocal TS_PARSER>;
v = virtualinvoke v.<java.lang.ThreadLocal: java.lang.Object get()>();
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: java.lang.String textValue()>();
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatter: long parseMillis(java.lang.String)>(v);
v = new org.apache.hadoop.hive.serde.io.TimestampLocalTZWritable;
v = new org.apache.hadoop.hive.common.type.TimestampTZ;
v = staticinvoke <java.time.Instant: java.time.Instant ofEpochMilli(long)>(v);
v = virtualinvoke v.<org.apache.hadoop.hive.serde.typeinfo.TimestampLocalTZTypeInfo: java.time.ZoneId timeZone()>();
v = staticinvoke <java.time.ZonedDateTime: java.time.ZonedDateTime ofInstant(java.time.Instant,java.time.ZoneId)>(v, v);
specialinvoke v.<org.apache.hadoop.hive.common.type.TimestampTZ: void <init>(java.time.ZonedDateTime)>(v);
specialinvoke v.<org.apache.hadoop.hive.serde.io.TimestampLocalTZWritable: void <init>(org.apache.hadoop.hive.common.type.TimestampTZ)>(v);
return v;
label:
v = new org.apache.hadoop.hive.serde.io.ByteWritable;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: int intValue()>();
specialinvoke v.<org.apache.hadoop.hive.serde.io.ByteWritable: void <init>(byte)>(v);
return v;
label:
v = new org.apache.hadoop.hive.serde.io.ShortWritable;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: short shortValue()>();
specialinvoke v.<org.apache.hadoop.hive.serde.io.ShortWritable: void <init>(short)>(v);
return v;
label:
v = new org.apache.hadoop.io.IntWritable;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: int intValue()>();
specialinvoke v.<org.apache.hadoop.io.IntWritable: void <init>(int)>(v);
return v;
label:
v = new org.apache.hadoop.io.LongWritable;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: long longValue()>();
specialinvoke v.<org.apache.hadoop.io.LongWritable: void <init>(long)>(v);
return v;
label:
v = new org.apache.hadoop.io.FloatWritable;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: float floatValue()>();
specialinvoke v.<org.apache.hadoop.io.FloatWritable: void <init>(float)>(v);
return v;
label:
v = new org.apache.hadoop.hive.serde.io.DoubleWritable;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: double doubleValue()>();
specialinvoke v.<org.apache.hadoop.hive.serde.io.DoubleWritable: void <init>(double)>(v);
return v;
label:
v = new org.apache.hadoop.hive.serde.io.HiveDecimalWritable;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: java.math.BigDecimal decimalValue()>();
v = staticinvoke <org.apache.hadoop.hive.common.type.HiveDecimal: org.apache.hadoop.hive.common.type.HiveDecimal create(java.math.BigDecimal)>(v);
specialinvoke v.<org.apache.hadoop.hive.serde.io.HiveDecimalWritable: void <init>(org.apache.hadoop.hive.common.type.HiveDecimal)>(v);
return v;
label:
v = new org.apache.hadoop.hive.serde.io.HiveCharWritable;
v = new org.apache.hadoop.hive.common.type.HiveChar;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: java.lang.String textValue()>();
v = virtualinvoke v.<org.apache.hadoop.hive.serde.typeinfo.CharTypeInfo: int getLength()>();
specialinvoke v.<org.apache.hadoop.hive.common.type.HiveChar: void <init>(java.lang.String,int)>(v, v);
specialinvoke v.<org.apache.hadoop.hive.serde.io.HiveCharWritable: void <init>(org.apache.hadoop.hive.common.type.HiveChar)>(v);
return v;
label:
v = new org.apache.hadoop.hive.serde.io.HiveVarcharWritable;
v = new org.apache.hadoop.hive.common.type.HiveVarchar;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: java.lang.String textValue()>();
v = virtualinvoke v.<org.apache.hadoop.hive.serde.typeinfo.CharTypeInfo: int getLength()>();
specialinvoke v.<org.apache.hadoop.hive.common.type.HiveVarchar: void <init>(java.lang.String,int)>(v, v);
specialinvoke v.<org.apache.hadoop.hive.serde.io.HiveVarcharWritable: void <init>(org.apache.hadoop.hive.common.type.HiveVarchar)>(v);
return v;
label:
v = new org.apache.hadoop.io.Text;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: java.lang.String textValue()>();
specialinvoke v.<org.apache.hadoop.io.Text: void <init>(java.lang.String)>(v);
return v;
label:
v = new org.apache.hadoop.io.BooleanWritable;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: boolean isBoolean()>();
if v == 0 goto label;
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: boolean booleanValue()>();
goto label;
label:
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: java.lang.String textValue()>();
v = staticinvoke <java.lang.Boolean: java.lang.Boolean valueOf(java.lang.String)>(v);
v = virtualinvoke v.<java.lang.Boolean: boolean booleanValue()>();
label:
specialinvoke v.<org.apache.hadoop.io.BooleanWritable: void <init>(boolean)>(v);
return v;
label:
v = new org.apache.hadoop.hive.serde.SerDeException;
v = virtualinvoke v.<org.apache.hadoop.hive.serde.typeinfo.TypeInfo: java.lang.String getTypeName()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Unknown type: \u0001");
specialinvoke v.<org.apache.hadoop.hive.serde.SerDeException: void <init>(java.lang.String)>(v);
throw v;
}
private java.util.Map parseAsJson(byte[]) throws java.io.IOException
{
byte[] v;
org.apache.hadoop.hive.kafka.KafkaJsonSerDe v;
java.util.Iterator v;
com.fasterxml.jackson.databind.ObjectMapper v;
java.util.function.Consumer v;
java.util.TreeMap v;
java.lang.Object v;
java.util.Comparator v;
v := @this: org.apache.hadoop.hive.kafka.KafkaJsonSerDe;
v := @parameter: byte[];
v = v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: com.fasterxml.jackson.databind.ObjectMapper mapper>;
v = virtualinvoke v.<com.fasterxml.jackson.databind.ObjectMapper: java.lang.Object readValue(byte[],java.lang.Class)>(v, class "Lcom/fasterxml/jackson/databind/JsonNode;");
v = <java.lang.String: java.util.Comparator CASE_INSENSITIVE_ORDER>;
v = staticinvoke <com.google.common.collect.Maps: java.util.TreeMap newTreeMap(java.util.Comparator)>(v);
v = virtualinvoke v.<com.fasterxml.jackson.databind.JsonNode: java.util.Iterator fields()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaJsonSerDe$lambda_parseAsJson_1__15: java.util.function.Consumer bootstrap$(java.util.Map)>(v);
interfaceinvoke v.<java.util.Iterator: void forEachRemaining(java.util.function.Consumer)>(v);
return v;
}
public org.apache.hadoop.hive.serde.objectinspector.ObjectInspector getObjectInspector() throws org.apache.hadoop.hive.serde.SerDeException
{
org.apache.hadoop.hive.kafka.KafkaJsonSerDe v;
org.apache.hadoop.hive.serde.objectinspector.ObjectInspector v, v;
org.apache.hadoop.hive.serde.SerDeException v;
v := @this: org.apache.hadoop.hive.kafka.KafkaJsonSerDe;
v = v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: org.apache.hadoop.hive.serde.objectinspector.ObjectInspector inspector>;
if v != null goto label;
v = new org.apache.hadoop.hive.serde.SerDeException;
specialinvoke v.<org.apache.hadoop.hive.serde.SerDeException: void <init>(java.lang.String)>("null inspector ??");
throw v;
label:
v = v.<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: org.apache.hadoop.hive.serde.objectinspector.ObjectInspector inspector>;
return v;
}
public static org.joda.time.format.DateTimeFormatter createAutoParser()
{
org.joda.time.format.DateTimeFormatterBuilder v, v, v, v, v, v, v, v, v, v, v, v, v;
org.joda.time.format.DateTimeParser v, v, v, v, v;
org.joda.time.format.DateTimeParser[] v;
org.joda.time.format.DateTimeFormatter v, v, v, v;
v = new org.joda.time.format.DateTimeFormatterBuilder;
specialinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: void <init>()>();
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeFormatterBuilder appendTimeZoneOffset(java.lang.String,boolean,int,int)>("Z", 1, 2, 4);
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeFormatter toFormatter()>();
v = new org.joda.time.format.DateTimeFormatterBuilder;
specialinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: void <init>()>();
v = newarray (org.joda.time.format.DateTimeParser)[2];
v = new org.joda.time.format.DateTimeFormatterBuilder;
specialinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: void <init>()>();
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeFormatterBuilder appendLiteral(char)>(84);
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeParser toParser()>();
v[0] = v;
v = new org.joda.time.format.DateTimeFormatterBuilder;
specialinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: void <init>()>();
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeFormatterBuilder appendLiteral(char)>(32);
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeParser toParser()>();
v[1] = v;
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeFormatterBuilder append(org.joda.time.format.DateTimePrinter,org.joda.time.format.DateTimeParser[])>(null, v);
v = staticinvoke <org.joda.time.format.ISODateTimeFormat: org.joda.time.format.DateTimeFormatter timeElementParser()>();
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatter: org.joda.time.format.DateTimeParser getParser()>();
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeFormatterBuilder appendOptional(org.joda.time.format.DateTimeParser)>(v);
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatter: org.joda.time.format.DateTimeParser getParser()>();
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeFormatterBuilder appendOptional(org.joda.time.format.DateTimeParser)>(v);
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeParser toParser()>();
v = new org.joda.time.format.DateTimeFormatterBuilder;
specialinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: void <init>()>();
v = staticinvoke <org.joda.time.format.ISODateTimeFormat: org.joda.time.format.DateTimeFormatter dateElementParser()>();
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeFormatterBuilder append(org.joda.time.format.DateTimeFormatter)>(v);
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeFormatterBuilder appendOptional(org.joda.time.format.DateTimeParser)>(v);
v = virtualinvoke v.<org.joda.time.format.DateTimeFormatterBuilder: org.joda.time.format.DateTimeFormatter toFormatter()>();
return v;
}
static void <clinit>()
{
java.lang.ThreadLocal v;
java.util.function.Function v;
java.util.function.Supplier v;
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaJsonSerDe$createAutoParser__16: java.util.function.Supplier bootstrap$()>();
v = staticinvoke <java.lang.ThreadLocal: java.lang.ThreadLocal withInitial(java.util.function.Supplier)>(v);
<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.lang.ThreadLocal TS_PARSER> = v;
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaJsonSerDe$lambda_static_0__17: java.util.function.Function bootstrap$()>();
<org.apache.hadoop.hive.kafka.KafkaJsonSerDe: java.util.function.Function TYPEINFO_TO_OI> = v;
return;
}
}