public class org.apache.hadoop.hive.kafka.KafkaWritable extends java.lang.Object implements org.apache.hadoop.io.Writable
{
private int partition;
private long offset;
private long timestamp;
private byte[] value;
private byte[] recordKey;
void set(org.apache.kafkaesque.clients.consumer.ConsumerRecord)
{
org.apache.kafkaesque.clients.consumer.ConsumerRecord v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
int v;
java.lang.Object v, v;
long v, v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v := @parameter: org.apache.kafkaesque.clients.consumer.ConsumerRecord;
v = virtualinvoke v.<org.apache.kafkaesque.clients.consumer.ConsumerRecord: int partition()>();
v.<org.apache.hadoop.hive.kafka.KafkaWritable: int partition> = v;
v = virtualinvoke v.<org.apache.kafkaesque.clients.consumer.ConsumerRecord: long timestamp()>();
v.<org.apache.hadoop.hive.kafka.KafkaWritable: long timestamp> = v;
v = virtualinvoke v.<org.apache.kafkaesque.clients.consumer.ConsumerRecord: long offset()>();
v.<org.apache.hadoop.hive.kafka.KafkaWritable: long offset> = v;
v = virtualinvoke v.<org.apache.kafkaesque.clients.consumer.ConsumerRecord: java.lang.Object value()>();
v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value> = v;
v = virtualinvoke v.<org.apache.kafkaesque.clients.consumer.ConsumerRecord: java.lang.Object key()>();
v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey> = v;
return;
}
void <init>(int, long, long, byte[], byte[])
{
byte[] v, v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
int v;
long v, v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v := @parameter: int;
v := @parameter: long;
v := @parameter: long;
v := @parameter: byte[];
v := @parameter: byte[];
specialinvoke v.<java.lang.Object: void <init>()>();
v.<org.apache.hadoop.hive.kafka.KafkaWritable: int partition> = v;
v.<org.apache.hadoop.hive.kafka.KafkaWritable: long offset> = v;
v.<org.apache.hadoop.hive.kafka.KafkaWritable: long timestamp> = v;
v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value> = v;
v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey> = v;
return;
}
void <init>(int, long, byte[], byte[])
{
byte[] v, v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
int v;
long v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v := @parameter: int;
v := @parameter: long;
v := @parameter: byte[];
v := @parameter: byte[];
specialinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: void <init>(int,long,long,byte[],byte[])>(v, -1L, v, v, v);
return;
}
public void <init>()
{
org.apache.hadoop.hive.kafka.KafkaWritable v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
specialinvoke v.<java.lang.Object: void <init>()>();
return;
}
public void write(java.io.DataOutput) throws java.io.IOException
{
byte[] v, v, v, v, v;
java.io.DataOutput v;
long v, v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
int v, v, v, v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v := @parameter: java.io.DataOutput;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long timestamp>;
interfaceinvoke v.<java.io.DataOutput: void writeLong(long)>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: int partition>;
interfaceinvoke v.<java.io.DataOutput: void writeInt(int)>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long offset>;
interfaceinvoke v.<java.io.DataOutput: void writeLong(long)>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value>;
v = lengthof v;
interfaceinvoke v.<java.io.DataOutput: void writeInt(int)>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value>;
interfaceinvoke v.<java.io.DataOutput: void write(byte[])>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey>;
if v == null goto label;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey>;
v = lengthof v;
interfaceinvoke v.<java.io.DataOutput: void writeInt(int)>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey>;
interfaceinvoke v.<java.io.DataOutput: void write(byte[])>(v);
goto label;
label:
v = (int) -1;
interfaceinvoke v.<java.io.DataOutput: void writeInt(int)>(v);
label:
return;
}
public void readFields(java.io.DataInput) throws java.io.IOException
{
byte[] v, v, v, v, v;
long v, v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
int v, v, v, v;
java.io.DataInput v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v := @parameter: java.io.DataInput;
v = interfaceinvoke v.<java.io.DataInput: long readLong()>();
v.<org.apache.hadoop.hive.kafka.KafkaWritable: long timestamp> = v;
v = interfaceinvoke v.<java.io.DataInput: int readInt()>();
v.<org.apache.hadoop.hive.kafka.KafkaWritable: int partition> = v;
v = interfaceinvoke v.<java.io.DataInput: long readLong()>();
v.<org.apache.hadoop.hive.kafka.KafkaWritable: long offset> = v;
v = interfaceinvoke v.<java.io.DataInput: int readInt()>();
if v <= 0 goto label;
v = newarray (byte)[v];
v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value> = v;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value>;
interfaceinvoke v.<java.io.DataInput: void readFully(byte[])>(v);
goto label;
label:
v = newarray (byte)[0];
v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value> = v;
label:
v = interfaceinvoke v.<java.io.DataInput: int readInt()>();
v = (int) -1;
if v <= v goto label;
v = newarray (byte)[v];
v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey> = v;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey>;
interfaceinvoke v.<java.io.DataInput: void readFully(byte[])>(v);
goto label;
label:
v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey> = null;
label:
return;
}
int getPartition()
{
int v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: int partition>;
return v;
}
long getOffset()
{
long v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long offset>;
return v;
}
long getTimestamp()
{
long v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long timestamp>;
return v;
}
byte[] getValue()
{
byte[] v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value>;
return v;
}
byte[] getRecordKey()
{
byte[] v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey>;
return v;
}
public boolean equals(java.lang.Object)
{
byte[] v, v, v, v;
long v, v, v, v;
byte v, v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
int v, v;
boolean v, v, v, v;
java.lang.Object v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v := @parameter: java.lang.Object;
if v != v goto label;
return 1;
label:
v = v instanceof org.apache.hadoop.hive.kafka.KafkaWritable;
if v != 0 goto label;
return 0;
label:
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: int partition>;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: int partition>;
if v != v goto label;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long offset>;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long offset>;
v = v cmp v;
if v != 0 goto label;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long timestamp>;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long timestamp>;
v = v cmp v;
if v != 0 goto label;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value>;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value>;
v = staticinvoke <java.util.Arrays: boolean equals(byte[],byte[])>(v, v);
if v == 0 goto label;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey>;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey>;
v = staticinvoke <java.util.Arrays: boolean equals(byte[],byte[])>(v, v);
if v == 0 goto label;
v = 1;
goto label;
label:
v = 0;
label:
return v;
}
public int hashCode()
{
byte[] v, v;
java.lang.Object[] v;
long v, v;
java.lang.Integer v;
java.lang.Long v, v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
int v, v, v, v, v, v, v, v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v = newarray (java.lang.Object)[3];
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: int partition>;
v = staticinvoke <java.lang.Integer: java.lang.Integer valueOf(int)>(v);
v[0] = v;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long offset>;
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v[1] = v;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long timestamp>;
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v[2] = v;
v = staticinvoke <java.util.Objects: int hash(java.lang.Object[])>(v);
v = 31 * v;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value>;
v = staticinvoke <java.util.Arrays: int hashCode(byte[])>(v);
v = v + v;
v = 31 * v;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey>;
v = staticinvoke <java.util.Arrays: int hashCode(byte[])>(v);
v = v + v;
return v;
}
public java.lang.String toString()
{
byte[] v, v;
long v, v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
int v;
java.lang.String v, v, v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: int partition>;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long offset>;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: long timestamp>;
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] value>;
v = staticinvoke <java.util.Arrays: java.lang.String toString(byte[])>(v);
v = v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] recordKey>;
v = staticinvoke <java.util.Arrays: java.lang.String toString(byte[])>(v);
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int,long,long,java.lang.String,java.lang.String)>(v, v, v, v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("KafkaWritable{partition=\u, offset=\u, timestamp=\u, value=\u, recordKey=\u0001}");
return v;
}
org.apache.hadoop.io.Writable getHiveWritable(org.apache.hadoop.hive.kafka.MetadataColumn)
{
byte[] v, v;
long v, v;
org.apache.hadoop.io.LongWritable v, v;
int[] v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
int v, v, v;
org.apache.hadoop.io.BytesWritable v, v;
java.lang.String v, v;
org.apache.hadoop.io.IntWritable v;
java.lang.IllegalArgumentException v;
org.apache.hadoop.hive.kafka.MetadataColumn v;
v := @this: org.apache.hadoop.hive.kafka.KafkaWritable;
v := @parameter: org.apache.hadoop.hive.kafka.MetadataColumn;
v = <org.apache.hadoop.hive.kafka.KafkaWritable$1: int[] $SwitchMap$org$apache$hadoop$hive$kafka$MetadataColumn>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.MetadataColumn: int ordinal()>();
v = v[v];
tableswitch(v)
{
case 1: goto label;
case 2: goto label;
case 3: goto label;
case 4: goto label;
default: goto label;
};
label:
v = new org.apache.hadoop.io.LongWritable;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: long getOffset()>();
specialinvoke v.<org.apache.hadoop.io.LongWritable: void <init>(long)>(v);
return v;
label:
v = new org.apache.hadoop.io.IntWritable;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: int getPartition()>();
specialinvoke v.<org.apache.hadoop.io.IntWritable: void <init>(int)>(v);
return v;
label:
v = new org.apache.hadoop.io.LongWritable;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: long getTimestamp()>();
specialinvoke v.<org.apache.hadoop.io.LongWritable: void <init>(long)>(v);
return v;
label:
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] getRecordKey()>();
if v != null goto label;
v = null;
goto label;
label:
v = new org.apache.hadoop.io.BytesWritable;
v = v;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] getRecordKey()>();
specialinvoke v.<org.apache.hadoop.io.BytesWritable: void <init>(byte[])>(v);
label:
return v;
label:
v = new java.lang.IllegalArgumentException;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.MetadataColumn: java.lang.String getName()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Unknown metadata column [\u]");
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String)>(v);
throw v;
}
}