class org.apache.hadoop.hive.kafka.HiveKafkaProducer extends java.lang.Object implements org.apache.kafkaesque.clients.producer.Producer
{
private static final org.slf4j.Logger LOG;
private final org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer;
private final java.lang.String transactionalId;
void <init>(java.util.Properties)
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
java.util.Properties v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
java.lang.String v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v := @parameter: java.util.Properties;
specialinvoke v.<java.lang.Object: void <init>()>();
v = virtualinvoke v.<java.util.Properties: java.lang.String getProperty(java.lang.String)>("transactional.id");
v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.String transactionalId> = v;
v = new org.apache.kafkaesque.clients.producer.KafkaProducer;
specialinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: void <init>(java.util.Properties)>(v);
v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer> = v;
return;
}
public void initTransactions()
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: void initTransactions()>();
return;
}
public void beginTransaction() throws org.apache.kafkaesque.common.errors.ProducerFencedException
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: void beginTransaction()>();
return;
}
public void commitTransaction() throws org.apache.kafkaesque.common.errors.ProducerFencedException
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: void commitTransaction()>();
return;
}
public void abortTransaction() throws org.apache.kafkaesque.common.errors.ProducerFencedException
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: void abortTransaction()>();
return;
}
public void sendOffsetsToTransaction(java.util.Map, java.lang.String) throws org.apache.kafkaesque.common.errors.ProducerFencedException
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
java.util.Map v;
java.lang.String v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v := @parameter: java.util.Map;
v := @parameter: java.lang.String;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: void sendOffsetsToTransaction(java.util.Map,java.lang.String)>(v, v);
return;
}
public void sendOffsetsToTransaction(java.util.Map, org.apache.kafkaesque.clients.consumer.ConsumerGroupMetadata) throws org.apache.kafkaesque.common.errors.ProducerFencedException
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.consumer.ConsumerGroupMetadata v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
java.util.Map v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v := @parameter: java.util.Map;
v := @parameter: org.apache.kafkaesque.clients.consumer.ConsumerGroupMetadata;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: void sendOffsetsToTransaction(java.util.Map,org.apache.kafkaesque.clients.consumer.ConsumerGroupMetadata)>(v, v);
return;
}
public java.util.concurrent.Future send(org.apache.kafkaesque.clients.producer.ProducerRecord)
{
java.util.concurrent.Future v;
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
org.apache.kafkaesque.clients.producer.ProducerRecord v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v := @parameter: org.apache.kafkaesque.clients.producer.ProducerRecord;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
v = virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: java.util.concurrent.Future send(org.apache.kafkaesque.clients.producer.ProducerRecord)>(v);
return v;
}
public java.util.concurrent.Future send(org.apache.kafkaesque.clients.producer.ProducerRecord, org.apache.kafkaesque.clients.producer.Callback)
{
java.util.concurrent.Future v;
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
org.apache.kafkaesque.clients.producer.ProducerRecord v;
org.apache.kafkaesque.clients.producer.Callback v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v := @parameter: org.apache.kafkaesque.clients.producer.ProducerRecord;
v := @parameter: org.apache.kafkaesque.clients.producer.Callback;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
v = virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: java.util.concurrent.Future send(org.apache.kafkaesque.clients.producer.ProducerRecord,org.apache.kafkaesque.clients.producer.Callback)>(v, v);
return v;
}
public java.util.List partitionsFor(java.lang.String)
{
java.util.List v;
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
java.lang.String v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v := @parameter: java.lang.String;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
v = virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: java.util.List partitionsFor(java.lang.String)>(v);
return v;
}
public java.util.Map metrics()
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
java.util.Map v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
v = virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: java.util.Map metrics()>();
return v;
}
public void close()
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: void close()>();
return;
}
public void close(java.time.Duration)
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
java.time.Duration v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v := @parameter: java.time.Duration;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: void close(java.time.Duration)>(v);
return;
}
public void flush()
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
java.lang.String v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
virtualinvoke v.<org.apache.kafkaesque.clients.producer.KafkaProducer: void flush()>();
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.String transactionalId>;
if v == null goto label;
specialinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void flushNewPartitions()>();
label:
return;
}
synchronized void resumeTransaction(long, short)
{
java.lang.Object[] v, v, v, v, v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
long v;
short v;
byte v;
java.lang.Enum v, v, v;
java.lang.Long v, v;
java.lang.Short v, v;
java.lang.Boolean v;
java.lang.String v;
boolean v;
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.slf4j.Logger v;
java.lang.Object v, v, v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v := @parameter: long;
v := @parameter: short;
v = v cmp 0L;
if v < 0 goto label;
if v < 0 goto label;
v = 1;
goto label;
label:
v = 0;
label:
staticinvoke <com.google.common.base.Preconditions: void checkState(boolean,java.lang.String,long,int)>(v, "Incorrect values for producerId {} and epoch {}", v, v);
v = <org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.slf4j.Logger LOG>;
v = newarray (java.lang.Object)[3];
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.String transactionalId>;
v[0] = v;
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v[1] = v;
v = staticinvoke <java.lang.Short: java.lang.Short valueOf(short)>(v);
v[2] = v;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String,java.lang.Object[])>("Attempting to resume transaction {} with producerId {} and epoch {}", v);
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.String)>(v, "transactionManager");
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.String)>(v, "topicPartitionBookkeeper");
v = newarray (java.lang.Object)[1];
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Enum getEnum(java.lang.String)>("org.apache.kafkaesque.clients.producer.internals.TransactionManager$State.INITIALIZING");
v[0] = v;
staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object invoke(java.lang.Object,java.lang.String,java.lang.Object[])>(v, "transitionTo", v);
v = newarray (java.lang.Object)[0];
staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object invoke(java.lang.Object,java.lang.String,java.lang.Object[])>(v, "reset", v);
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.String)>(v, "producerIdAndEpoch");
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: void setValue(java.lang.Object,java.lang.String,java.lang.Object)>(v, "producerId", v);
v = staticinvoke <java.lang.Short: java.lang.Short valueOf(short)>(v);
staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: void setValue(java.lang.Object,java.lang.String,java.lang.Object)>(v, "epoch", v);
v = newarray (java.lang.Object)[1];
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Enum getEnum(java.lang.String)>("org.apache.kafkaesque.clients.producer.internals.TransactionManager$State.READY");
v[0] = v;
staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object invoke(java.lang.Object,java.lang.String,java.lang.Object[])>(v, "transitionTo", v);
v = newarray (java.lang.Object)[1];
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Enum getEnum(java.lang.String)>("org.apache.kafkaesque.clients.producer.internals.TransactionManager$State.IN_TRANSACTION");
v[0] = v;
staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object invoke(java.lang.Object,java.lang.String,java.lang.Object[])>(v, "transitionTo", v);
v = staticinvoke <java.lang.Boolean: java.lang.Boolean valueOf(boolean)>(1);
staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: void setValue(java.lang.Object,java.lang.String,java.lang.Object)>(v, "transactionStarted", v);
return;
}
java.lang.String getTransactionalId()
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
java.lang.String v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.String transactionalId>;
return v;
}
long getProducerId()
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
java.lang.Object v, v, v;
long v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.String)>(v, "transactionManager");
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.String)>(v, "producerIdAndEpoch");
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.String)>(v, "producerId");
v = virtualinvoke v.<java.lang.Long: long longValue()>();
return v;
}
short getEpoch()
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
short v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
java.lang.Object v, v, v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.String)>(v, "transactionManager");
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.String)>(v, "producerIdAndEpoch");
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.String)>(v, "epoch");
v = virtualinvoke v.<java.lang.Short: short shortValue()>();
return v;
}
private void flushNewPartitions()
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
java.lang.Object[] v;
org.slf4j.Logger v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
java.lang.Object v;
org.apache.kafkaesque.clients.producer.internals.TransactionalRequestResult v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = <org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.slf4j.Logger LOG>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>("Flushing new partitions");
v = specialinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.internals.TransactionalRequestResult enqueueNewPartitions()>();
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.String)>(v, "sender");
v = newarray (java.lang.Object)[0];
staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object invoke(java.lang.Object,java.lang.String,java.lang.Object[])>(v, "wakeup", v);
virtualinvoke v.<org.apache.kafkaesque.clients.producer.internals.TransactionalRequestResult: void await()>();
return;
}
private synchronized org.apache.kafkaesque.clients.producer.internals.TransactionalRequestResult enqueueNewPartitions()
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
java.lang.Object[] v, v;
org.apache.kafkaesque.clients.producer.KafkaProducer v;
java.lang.Class[] v;
java.lang.Class v, v, v, v;
java.lang.Object v, v, v;
v := @this: org.apache.hadoop.hive.kafka.HiveKafkaProducer;
v = v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.apache.kafkaesque.clients.producer.KafkaProducer kafkaProducer>;
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.String)>(v, "transactionManager");
v = newarray (java.lang.Object)[0];
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object invoke(java.lang.Object,java.lang.String,java.lang.Object[])>(v, "addPartitionsToTransactionHandler", v);
v = newarray (java.lang.Class)[1];
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = virtualinvoke v.<java.lang.Class: java.lang.Class getSuperclass()>();
v[0] = v;
v = newarray (java.lang.Object)[1];
v[0] = v;
staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object invoke(java.lang.Object,java.lang.String,java.lang.Class[],java.lang.Object[])>(v, "enqueueRequest", v, v);
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = virtualinvoke v.<java.lang.Class: java.lang.Class getSuperclass()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.Class,java.lang.String)>(v, v, "result");
return v;
}
private static java.lang.Enum getEnum(java.lang.String)
{
java.lang.String[] v;
java.lang.RuntimeException v;
java.lang.Enum v;
int v;
java.lang.Class v;
java.lang.ClassNotFoundException v;
java.lang.String v, v, v;
v := @parameter: java.lang.String;
v = virtualinvoke v.<java.lang.String: java.lang.String[] split(java.lang.String)>("\\.(?=[^\\.]+$)");
v = lengthof v;
if v != 2 goto label;
v = v[0];
v = v[1];
label:
v = staticinvoke <java.lang.Class: java.lang.Class forName(java.lang.String)>(v);
v = staticinvoke <java.lang.Enum: java.lang.Enum valueOf(java.lang.Class,java.lang.String)>(v, v);
label:
return v;
label:
v := @caughtexception;
v = new java.lang.RuntimeException;
specialinvoke v.<java.lang.RuntimeException: void <init>(java.lang.String,java.lang.Throwable)>("Incompatible KafkaProducer version", v);
throw v;
label:
return null;
catch java.lang.ClassNotFoundException from label to label with label;
}
private static transient java.lang.Object invoke(java.lang.Object, java.lang.String, java.lang.Object[])
{
java.lang.Object[] v;
java.lang.Class[] v;
int v, v, v;
java.lang.Class v;
java.lang.Object v, v, v;
java.lang.String v;
v := @parameter: java.lang.Object;
v := @parameter: java.lang.String;
v := @parameter: java.lang.Object[];
v = lengthof v;
v = newarray (java.lang.Class)[v];
v = 0;
label:
v = lengthof v;
if v >= v goto label;
v = v[v];
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v[v] = v;
v = v + 1;
goto label;
label:
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object invoke(java.lang.Object,java.lang.String,java.lang.Class[],java.lang.Object[])>(v, v, v, v);
return v;
}
private static java.lang.Object invoke(java.lang.Object, java.lang.String, java.lang.Class[], java.lang.Object[])
{
java.lang.Object[] v;
java.lang.Class[] v;
java.lang.ReflectiveOperationException v;
java.lang.RuntimeException v;
java.lang.Class v;
java.lang.Object v, v;
java.lang.String v;
java.lang.reflect.Method v;
v := @parameter: java.lang.Object;
v := @parameter: java.lang.String;
v := @parameter: java.lang.Class[];
v := @parameter: java.lang.Object[];
label:
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = virtualinvoke v.<java.lang.Class: java.lang.reflect.Method getDeclaredMethod(java.lang.String,java.lang.Class[])>(v, v);
virtualinvoke v.<java.lang.reflect.Method: void setAccessible(boolean)>(1);
v = virtualinvoke v.<java.lang.reflect.Method: java.lang.Object invoke(java.lang.Object,java.lang.Object[])>(v, v);
label:
return v;
label:
v := @caughtexception;
v = new java.lang.RuntimeException;
specialinvoke v.<java.lang.RuntimeException: void <init>(java.lang.String,java.lang.Throwable)>("Incompatible KafkaProducer version", v);
throw v;
catch java.lang.NoSuchMethodException from label to label with label;
catch java.lang.reflect.InvocationTargetException from label to label with label;
catch java.lang.IllegalAccessException from label to label with label;
}
private static java.lang.Object getValue(java.lang.Object, java.lang.String)
{
java.lang.Class v;
java.lang.Object v, v;
java.lang.String v;
v := @parameter: java.lang.Object;
v := @parameter: java.lang.String;
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.Object getValue(java.lang.Object,java.lang.Class,java.lang.String)>(v, v, v);
return v;
}
private static java.lang.Object getValue(java.lang.Object, java.lang.Class, java.lang.String)
{
java.lang.Class v;
java.lang.Object v, v;
java.lang.ReflectiveOperationException v;
java.lang.RuntimeException v;
java.lang.String v;
java.lang.reflect.Field v;
v := @parameter: java.lang.Object;
v := @parameter: java.lang.Class;
v := @parameter: java.lang.String;
label:
v = virtualinvoke v.<java.lang.Class: java.lang.reflect.Field getDeclaredField(java.lang.String)>(v);
virtualinvoke v.<java.lang.reflect.Field: void setAccessible(boolean)>(1);
v = virtualinvoke v.<java.lang.reflect.Field: java.lang.Object get(java.lang.Object)>(v);
label:
return v;
label:
v := @caughtexception;
v = new java.lang.RuntimeException;
specialinvoke v.<java.lang.RuntimeException: void <init>(java.lang.String,java.lang.Throwable)>("Incompatible KafkaProducer version", v);
throw v;
catch java.lang.NoSuchFieldException from label to label with label;
catch java.lang.IllegalAccessException from label to label with label;
}
private static void setValue(java.lang.Object, java.lang.String, java.lang.Object)
{
java.lang.Class v;
java.lang.Object v, v;
java.lang.ReflectiveOperationException v;
java.lang.RuntimeException v;
java.lang.String v;
java.lang.reflect.Field v;
v := @parameter: java.lang.Object;
v := @parameter: java.lang.String;
v := @parameter: java.lang.Object;
label:
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = virtualinvoke v.<java.lang.Class: java.lang.reflect.Field getDeclaredField(java.lang.String)>(v);
virtualinvoke v.<java.lang.reflect.Field: void setAccessible(boolean)>(1);
virtualinvoke v.<java.lang.reflect.Field: void set(java.lang.Object,java.lang.Object)>(v, v);
label:
goto label;
label:
v := @caughtexception;
v = new java.lang.RuntimeException;
specialinvoke v.<java.lang.RuntimeException: void <init>(java.lang.String,java.lang.Throwable)>("Incompatible KafkaProducer version", v);
throw v;
label:
return;
catch java.lang.NoSuchFieldException from label to label with label;
catch java.lang.IllegalAccessException from label to label with label;
}
static void <clinit>()
{
org.slf4j.Logger v;
v = staticinvoke <org.slf4j.LoggerFactory: org.slf4j.Logger getLogger(java.lang.Class)>(class "Lorg/apache/hadoop/hive/kafka/HiveKafkaProducer;");
<org.apache.hadoop.hive.kafka.HiveKafkaProducer: org.slf4j.Logger LOG> = v;
return;
}
}