class org.apache.hadoop.hive.kafka.TransactionalKafkaWriter extends java.lang.Object implements org.apache.hadoop.hive.ql.exec.FileSinkOperator$RecordWriter, org.apache.hadoop.mapred.RecordWriter
{
private static final org.slf4j.Logger LOG;
private static final java.lang.String TRANSACTION_DIR;
private static final java.time.Duration DURATION_0;
private final java.lang.String topic;
private final org.apache.hadoop.hive.kafka.HiveKafkaProducer producer;
private final org.apache.kafkaesqueesqueesque.clients.producer.Callback callback;
private final java.util.concurrent.atomic.AtomicReference sendExceptionRef;
private final org.apache.hadoop.fs.Path openTxFileName;
private final boolean optimisticCommit;
private final org.apache.hadoop.fs.FileSystem fileSystem;
private final java.util.Map offsets;
private final java.lang.String writerIdTopicId;
private final long producerId;
private final short producerEpoch;
private long sentRecords;
static final boolean $assertionsDisabled;
void <init>(java.lang.String, java.util.Properties, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.FileSystem, java.lang.Boolean)
{
org.apache.hadoop.fs.FileSystem v;
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
java.lang.Long v;
java.lang.Short v;
java.time.Duration v;
org.apache.hadoop.fs.Path v, v, v, v, v;
boolean v, v, v, v, v, v, v;
java.lang.Exception v;
org.apache.kafkaesqueesqueesque.clients.producer.Callback v;
java.lang.Object[] v, v;
java.util.HashMap v;
long v, v;
java.lang.AssertionError v;
java.util.concurrent.atomic.AtomicReference v;
short v, v, v;
java.lang.Boolean v;
java.lang.String v, v, v, v, v, v, v, v, v, v, v, v, v, v;
org.apache.hadoop.hive.kafka.HiveKafkaProducer v, v, v, v, v, v, v, v, v, v, v, v;
java.util.Properties v;
org.slf4j.Logger v, v, v;
java.lang.Class v, v;
java.lang.Object v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v := @parameter: java.lang.String;
v := @parameter: java.util.Properties;
v := @parameter: org.apache.hadoop.fs.Path;
v := @parameter: org.apache.hadoop.fs.FileSystem;
v := @parameter: java.lang.Boolean;
specialinvoke v.<java.lang.Object: void <init>()>();
v = new java.util.concurrent.atomic.AtomicReference;
specialinvoke v.<java.util.concurrent.atomic.AtomicReference: void <init>()>();
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.concurrent.atomic.AtomicReference sendExceptionRef> = v;
v = new java.util.HashMap;
specialinvoke v.<java.util.HashMap: void <init>()>();
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.Map offsets> = v;
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: long sentRecords> = 0L;
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.fs.FileSystem fileSystem> = v;
v = staticinvoke <com.google.common.base.Preconditions: java.lang.Object checkNotNull(java.lang.Object,java.lang.Object)>(v, "NULL topic !!");
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String topic> = v;
v = virtualinvoke v.<java.util.Properties: java.lang.String getProperty(java.lang.String)>("bootstrap.servers");
if v == null goto label;
v = 1;
goto label;
label:
v = 0;
label:
staticinvoke <com.google.common.base.Preconditions: void checkState(boolean,java.lang.Object)>(v, "set [bootstrap.servers] property");
v = class "Lorg/apache/kafkaesqueesqueesque/common/serialization/ByteArraySerializer;";
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("value.serializer", v);
v = class "Lorg/apache/kafkaesqueesqueesque/common/serialization/ByteArraySerializer;";
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("key.serializer", v);
v = new org.apache.hadoop.hive.kafka.HiveKafkaProducer;
specialinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void <init>(java.util.Properties)>(v);
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer> = v;
if v != null goto label;
v = 1;
goto label;
label:
v = virtualinvoke v.<java.lang.Boolean: boolean booleanValue()>();
label:
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: boolean optimisticCommit> = v;
v = staticinvoke <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter$lambda_new_0__63: org.apache.kafkaesqueesqueesque.clients.producer.Callback bootstrap$(org.apache.hadoop.hive.kafka.TransactionalKafkaWriter)>(v);
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.kafkaesqueesqueesque.clients.producer.Callback callback> = v;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: boolean $assertionsDisabled>;
if v != 0 goto label;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.String getTransactionalId()>();
if v != null goto label;
v = new java.lang.AssertionError;
specialinvoke v.<java.lang.AssertionError: void <init>()>();
throw v;
label:
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void initTransactions()>();
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void beginTransaction()>();
label:
goto label;
label:
v := @caughtexception;
specialinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: void logHints(java.lang.Exception)>(v);
v = specialinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: boolean tryToAbortTx(java.lang.Throwable)>(v);
if v == 0 goto label;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.String getTransactionalId()>();
v = virtualinvoke v.<java.lang.Exception: java.lang.String getMessage()>();
interfaceinvoke v.<org.slf4j.Logger: void error(java.lang.String,java.lang.Object,java.lang.Object)>("Aborting Transaction [{}] cause by ERROR [{}]", v, v);
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void abortTransaction()>();
label:
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.String getTransactionalId()>();
v = virtualinvoke v.<java.lang.Exception: java.lang.String getMessage()>();
interfaceinvoke v.<org.slf4j.Logger: void error(java.lang.String,java.lang.Object,java.lang.Object)>("Closing writer [{}] caused by ERROR [{}]", v, v);
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.time.Duration DURATION_0>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void close(java.time.Duration)>(v);
throw v;
label:
v = newarray (java.lang.Object)[2];
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.String getTransactionalId()>();
v[0] = v;
v[1] = v;
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>("WriterId [%s], Kafka Topic [%s]", v);
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String writerIdTopicId> = v;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: boolean optimisticCommit>;
if v == 0 goto label;
v = -1;
goto label;
label:
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: short getEpoch()>();
label:
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: short producerEpoch> = v;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: boolean optimisticCommit>;
if v == 0 goto label;
v = -1L;
goto label;
label:
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: long getProducerId()>();
label:
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: long producerId> = v;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = newarray (java.lang.Object)[3];
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String writerIdTopicId>;
v[0] = v;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: short producerEpoch>;
v = staticinvoke <java.lang.Short: java.lang.Short valueOf(short)>(v);
v[1] = v;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: long producerId>;
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v[2] = v;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String,java.lang.Object[])>("DONE with Initialization of {}, Epoch[{}], internal ID[{}]", v);
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: boolean optimisticCommit>;
if v == 0 goto label;
v = null;
goto label;
label:
v = new org.apache.hadoop.fs.Path;
v = v;
v = new org.apache.hadoop.fs.Path;
v = new org.apache.hadoop.fs.Path;
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(org.apache.hadoop.fs.Path,java.lang.String)>(v, "transaction_states");
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.String getTransactionalId()>();
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(org.apache.hadoop.fs.Path,java.lang.String)>(v, v);
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: short producerEpoch>;
v = staticinvoke <java.lang.String: java.lang.String valueOf(int)>(v);
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(org.apache.hadoop.fs.Path,java.lang.String)>(v, v);
label:
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.fs.Path openTxFileName> = v;
return;
catch java.lang.Exception from label to label with label;
}
public void write(org.apache.hadoop.io.Writable) throws java.io.IOException
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v, v, v;
org.apache.kafkaesqueesqueesque.clients.producer.Callback v;
org.apache.kafkaesqueesqueesque.clients.producer.ProducerRecord v;
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
long v, v;
org.apache.hadoop.io.Writable v;
java.util.concurrent.atomic.AtomicReference v;
java.lang.Exception v;
java.time.Duration v;
java.lang.String v;
boolean v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v := @parameter: org.apache.hadoop.io.Writable;
specialinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: void checkExceptions()>();
label:
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: long sentRecords>;
v = v + 1L;
v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: long sentRecords> = v;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String topic>;
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils: org.apache.kafkaesqueesqueesque.clients.producer.ProducerRecord toProducerRecord(java.lang.String,org.apache.hadoop.hive.kafka.KafkaWritable)>(v, v);
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.kafkaesqueesqueesque.clients.producer.Callback callback>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.util.concurrent.Future send(org.apache.kafkaesqueesqueesque.clients.producer.ProducerRecord,org.apache.kafkaesqueesqueesque.clients.producer.Callback)>(v, v);
label:
goto label;
label:
v := @caughtexception;
v = specialinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: boolean tryToAbortTx(java.lang.Throwable)>(v);
if v == 0 goto label;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void abortTransaction()>();
label:
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.time.Duration DURATION_0>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void close(java.time.Duration)>(v);
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.concurrent.atomic.AtomicReference sendExceptionRef>;
virtualinvoke v.<java.util.concurrent.atomic.AtomicReference: boolean compareAndSet(java.lang.Object,java.lang.Object)>(null, v);
specialinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: void checkExceptions()>();
label:
return;
catch java.lang.Exception from label to label with label;
}
private void logHints(java.lang.Exception)
{
org.slf4j.Logger v;
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
java.lang.String v;
java.lang.Exception v;
boolean v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v := @parameter: java.lang.Exception;
v = v instanceof org.apache.kafkaesqueesqueesque.common.errors.TimeoutException;
if v == 0 goto label;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = virtualinvoke v.<java.lang.Exception: java.lang.String getMessage()>();
interfaceinvoke v.<org.slf4j.Logger: void error(java.lang.String,java.lang.Object)>("Maybe Try to increase [`retry.backoff.ms`] to avoid this error [{}].", v);
label:
return;
}
public void close(boolean) throws java.io.IOException
{
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
java.lang.Long v;
java.time.Duration v, v;
java.util.Map v;
java.util.stream.Collector v;
boolean v, v;
java.util.Set v;
java.lang.Exception v;
java.util.stream.Stream v, v;
java.lang.Object[] v;
long v;
java.util.function.Function v;
java.lang.String v, v, v, v, v, v, v;
org.apache.hadoop.hive.kafka.HiveKafkaProducer v, v, v, v, v;
org.slf4j.Logger v, v, v, v, v;
java.lang.Object v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v := @parameter: boolean;
if v == 0 goto label;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String writerIdTopicId>;
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("Aborting Transaction and Sending from {}", v);
label:
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void abortTransaction()>();
label:
goto label;
label:
v := @caughtexception;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String writerIdTopicId>;
v = virtualinvoke v.<java.lang.Exception: java.lang.String getMessage()>();
interfaceinvoke v.<org.slf4j.Logger: void error(java.lang.String,java.lang.Object,java.lang.Object)>("Aborting Transaction {} failed due to [{}]", v, v);
label:
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.time.Duration DURATION_0>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void close(java.time.Duration)>(v);
return;
label:
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String writerIdTopicId>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String,java.lang.Object)>("Flushing Kafka buffer of writerId {}", v);
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void flush()>();
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.Map offsets>;
v = interfaceinvoke v.<java.util.Map: java.util.Set entrySet()>();
v = interfaceinvoke v.<java.util.Set: java.util.stream.Stream stream()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter$lambda_close_1__64: java.util.function.Function bootstrap$(java.lang.String)>("Topic[%s] Partition [%s] -> Last offset [%s]");
v = interfaceinvoke v.<java.util.stream.Stream: java.util.stream.Stream map(java.util.function.Function)>(v);
v = staticinvoke <java.util.stream.Collectors: java.util.stream.Collector joining(java.lang.CharSequence)>(",");
v = interfaceinvoke v.<java.util.stream.Stream: java.lang.Object collect(java.util.stream.Collector)>(v);
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String writerIdTopicId>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String,java.lang.Object,java.lang.Object)>("WriterId {} flushed the following [{}] ", v, v);
specialinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: void checkExceptions()>();
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: boolean optimisticCommit>;
if v == 0 goto label;
specialinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: void commitTransaction()>();
goto label;
label:
specialinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: void persistTxState()>();
label:
specialinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: void checkExceptions()>();
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = newarray (java.lang.Object)[3];
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: java.lang.String getTransactionalId()>();
v[0] = v;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: long sentRecords>;
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v[1] = v;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String topic>;
v[2] = v;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String,java.lang.Object[])>("Closed writerId [{}], Sent [{}] records to Topic [{}]", v);
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = <java.time.Duration: java.time.Duration ZERO>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void close(java.time.Duration)>(v);
return;
catch java.lang.Exception from label to label with label;
}
private void commitTransaction()
{
org.apache.hadoop.hive.kafka.HiveKafkaProducer v;
org.slf4j.Logger v;
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
java.lang.String v;
java.util.concurrent.atomic.AtomicReference v;
java.lang.Exception v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String writerIdTopicId>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String,java.lang.Object)>("Attempting Optimistic commit by {}", v);
label:
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void commitTransaction()>();
label:
goto label;
label:
v := @caughtexception;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.concurrent.atomic.AtomicReference sendExceptionRef>;
virtualinvoke v.<java.util.concurrent.atomic.AtomicReference: boolean compareAndSet(java.lang.Object,java.lang.Object)>(null, v);
label:
return;
catch java.lang.Exception from label to label with label;
}
private void persistTxState()
{
java.lang.Throwable v;
org.slf4j.Logger v;
org.apache.hadoop.fs.FileSystem v;
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
long v;
java.util.concurrent.atomic.AtomicReference v;
java.lang.Exception v;
org.apache.hadoop.fs.FSDataOutputStream v;
short v;
org.apache.hadoop.fs.Path v, v;
java.lang.String v, v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.fs.Path openTxFileName>;
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.lang.String toString()>();
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String writerIdTopicId>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String,java.lang.Object,java.lang.Object)>("Committing state to path [{}] by [{}]", v, v);
label:
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.fs.FileSystem fileSystem>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.fs.Path openTxFileName>;
v = virtualinvoke v.<org.apache.hadoop.fs.FileSystem: org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path)>(v);
label:
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: long producerId>;
virtualinvoke v.<org.apache.hadoop.fs.FSDataOutputStream: void writeLong(long)>(v);
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: short producerEpoch>;
virtualinvoke v.<org.apache.hadoop.fs.FSDataOutputStream: void writeShort(int)>(v);
label:
virtualinvoke v.<org.apache.hadoop.fs.FSDataOutputStream: void close()>();
goto label;
label:
v := @caughtexception;
throw v;
label:
v := @caughtexception;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.concurrent.atomic.AtomicReference sendExceptionRef>;
virtualinvoke v.<java.util.concurrent.atomic.AtomicReference: boolean compareAndSet(java.lang.Object,java.lang.Object)>(null, v);
label:
return;
catch java.lang.Throwable from label to label with label;
catch java.lang.Exception from label to label with label;
}
public void write(org.apache.hadoop.io.BytesWritable, org.apache.hadoop.hive.kafka.KafkaWritable) throws java.io.IOException
{
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
org.apache.hadoop.io.BytesWritable v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v := @parameter: org.apache.hadoop.io.BytesWritable;
v := @parameter: org.apache.hadoop.hive.kafka.KafkaWritable;
virtualinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: void write(org.apache.hadoop.io.Writable)>(v);
return;
}
public void close(org.apache.hadoop.mapred.Reporter) throws java.io.IOException
{
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
org.apache.hadoop.mapred.Reporter v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v := @parameter: org.apache.hadoop.mapred.Reporter;
virtualinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: void close(boolean)>(0);
return;
}
long getSentRecords()
{
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
long v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: long sentRecords>;
return v;
}
short getProducerEpoch()
{
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
short v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: short producerEpoch>;
return v;
}
long getProducerId()
{
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
long v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: long producerId>;
return v;
}
private void checkExceptions() throws java.io.IOException
{
java.lang.Throwable v;
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
java.util.function.UnaryOperator v;
java.util.concurrent.atomic.AtomicReference v, v, v, v, v, v;
java.time.Duration v;
java.lang.String v, v, v, v;
boolean v, v, v;
org.apache.hadoop.hive.kafka.HiveKafkaProducer v, v;
org.slf4j.Logger v, v;
java.io.IOException v;
java.lang.Object v, v, v, v, v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.concurrent.atomic.AtomicReference sendExceptionRef>;
v = virtualinvoke v.<java.util.concurrent.atomic.AtomicReference: java.lang.Object get()>();
if v == null goto label;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.concurrent.atomic.AtomicReference sendExceptionRef>;
v = virtualinvoke v.<java.util.concurrent.atomic.AtomicReference: java.lang.Object get()>();
v = v instanceof org.apache.kafkaesqueesqueesque.common.KafkaException;
if v == 0 goto label;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.concurrent.atomic.AtomicReference sendExceptionRef>;
v = virtualinvoke v.<java.util.concurrent.atomic.AtomicReference: java.lang.Object get()>();
v = virtualinvoke v.<java.lang.Exception: java.lang.Throwable getCause()>();
v = v instanceof org.apache.kafkaesqueesqueesque.common.errors.ProducerFencedException;
if v == 0 goto label;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.concurrent.atomic.AtomicReference sendExceptionRef>;
v = staticinvoke <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter$lambda_checkExceptions_2__65: java.util.function.UnaryOperator bootstrap$()>();
virtualinvoke v.<java.util.concurrent.atomic.AtomicReference: java.lang.Object updateAndGet(java.util.function.UnaryOperator)>(v);
label:
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.concurrent.atomic.AtomicReference sendExceptionRef>;
v = virtualinvoke v.<java.util.concurrent.atomic.AtomicReference: java.lang.Object get()>();
if v == null goto label;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.util.concurrent.atomic.AtomicReference sendExceptionRef>;
v = virtualinvoke v.<java.util.concurrent.atomic.AtomicReference: java.lang.Object get()>();
specialinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: void logHints(java.lang.Exception)>(v);
v = specialinvoke v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: boolean tryToAbortTx(java.lang.Throwable)>(v);
if v == 0 goto label;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String writerIdTopicId>;
v = virtualinvoke v.<java.lang.Exception: java.lang.String getMessage()>();
interfaceinvoke v.<org.slf4j.Logger: void error(java.lang.String,java.lang.Object,java.lang.Object)>("Aborting Transaction [{}] cause by ERROR [{}]", v, v);
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void abortTransaction()>();
label:
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG>;
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.lang.String writerIdTopicId>;
v = virtualinvoke v.<java.lang.Exception: java.lang.String getMessage()>();
interfaceinvoke v.<org.slf4j.Logger: void error(java.lang.String,java.lang.Object,java.lang.Object)>("Closing writer [{}] caused by ERROR [{}]", v, v);
v = v.<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.apache.hadoop.hive.kafka.HiveKafkaProducer producer>;
v = <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.time.Duration DURATION_0>;
virtualinvoke v.<org.apache.hadoop.hive.kafka.HiveKafkaProducer: void close(java.time.Duration)>(v);
v = new java.io.IOException;
specialinvoke v.<java.io.IOException: void <init>(java.lang.Throwable)>(v);
throw v;
label:
return;
}
private boolean tryToAbortTx(java.lang.Throwable)
{
java.lang.Throwable v, v, v;
org.apache.hadoop.hive.kafka.TransactionalKafkaWriter v;
boolean v, v, v, v, v, v, v, v;
v := @this: org.apache.hadoop.hive.kafka.TransactionalKafkaWriter;
v := @parameter: java.lang.Throwable;
v = v instanceof org.apache.kafkaesqueesqueesque.common.errors.ProducerFencedException;
if v != 0 goto label;
v = v instanceof org.apache.kafkaesqueesqueesque.common.errors.OutOfOrderSequenceException;
if v != 0 goto label;
v = v instanceof org.apache.kafkaesqueesqueesque.common.errors.AuthenticationException;
if v != 0 goto label;
v = 1;
goto label;
label:
v = 0;
label:
v = v;
v = virtualinvoke v.<java.lang.Throwable: java.lang.Throwable getCause()>();
if v == null goto label;
v = virtualinvoke v.<java.lang.Throwable: java.lang.Throwable getCause()>();
v = v instanceof org.apache.kafkaesqueesqueesque.common.errors.ProducerFencedException;
if v != 0 goto label;
label:
v = 1;
goto label;
label:
v = 0;
label:
if v == 0 goto label;
if v == 0 goto label;
v = 1;
goto label;
label:
v = 0;
label:
return v;
}
static java.util.Map getTransactionsState(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path) throws java.io.IOException
{
org.apache.hadoop.fs.FileSystem v;
java.util.function.Function v;
org.apache.hadoop.fs.Path v, v;
java.util.stream.Collector v, v;
org.apache.hadoop.fs.FileStatus[] v;
com.google.common.collect.ImmutableMap v;
java.util.function.Predicate v;
com.google.common.collect.ImmutableMap$Builder v;
java.util.function.Consumer v;
java.util.stream.Stream v, v, v, v;
java.lang.Object v, v;
v := @parameter: org.apache.hadoop.fs.FileSystem;
v := @parameter: org.apache.hadoop.fs.Path;
v = new org.apache.hadoop.fs.Path;
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(org.apache.hadoop.fs.Path,java.lang.String)>(v, "transaction_states");
v = virtualinvoke v.<org.apache.hadoop.fs.FileSystem: org.apache.hadoop.fs.FileStatus[] listStatus(org.apache.hadoop.fs.Path)>(v);
v = staticinvoke <java.util.Arrays: java.util.stream.Stream stream(java.lang.Object[])>(v);
v = staticinvoke <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter$isDirectory__66: java.util.function.Predicate bootstrap$()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.util.stream.Stream filter(java.util.function.Predicate)>(v);
v = staticinvoke <java.util.stream.Collectors: java.util.stream.Collector toSet()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.lang.Object collect(java.util.stream.Collector)>(v);
v = interfaceinvoke v.<java.util.Set: java.util.stream.Stream stream()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter$getPath__67: java.util.function.Function bootstrap$()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.util.stream.Stream map(java.util.function.Function)>(v);
v = staticinvoke <java.util.stream.Collectors: java.util.stream.Collector toSet()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.lang.Object collect(java.util.stream.Collector)>(v);
v = staticinvoke <com.google.common.collect.ImmutableMap: com.google.common.collect.ImmutableMap$Builder builder()>();
v = staticinvoke <org.apache.hadoop.hive.kafka.TransactionalKafkaWriter$lambda_getTransactionsState_5__68: java.util.function.Consumer bootstrap$(org.apache.hadoop.fs.FileSystem,com.google.common.collect.ImmutableMap$Builder)>(v, v);
interfaceinvoke v.<java.util.Set: void forEach(java.util.function.Consumer)>(v);
v = virtualinvoke v.<com.google.common.collect.ImmutableMap$Builder: com.google.common.collect.ImmutableMap build()>();
return v;
}
static void <clinit>()
{
org.slf4j.Logger v;
java.time.Duration v;
java.lang.Class v;
boolean v, v;
v = class "Lorg/apache/hadoop/hive/kafka/TransactionalKafkaWriter;";
v = virtualinvoke v.<java.lang.Class: boolean desiredAssertionStatus()>();
if v != 0 goto label;
v = 1;
goto label;
label:
v = 0;
label:
<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: boolean $assertionsDisabled> = v;
v = staticinvoke <org.slf4j.LoggerFactory: org.slf4j.Logger getLogger(java.lang.Class)>(class "Lorg/apache/hadoop/hive/kafka/TransactionalKafkaWriter;");
<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: org.slf4j.Logger LOG> = v;
v = staticinvoke <java.time.Duration: java.time.Duration ofMillis(long)>(0L);
<org.apache.hadoop.hive.kafka.TransactionalKafkaWriter: java.time.Duration DURATION_0> = v;
return;
}
}