final class org.apache.hadoop.hive.kafka.KafkaUtils extends java.lang.Object
{
private static final org.slf4j.Logger log;
private static final java.lang.String JAAS_TEMPLATE;
private static final java.lang.String JAAS_TEMPLATE_SCRAM;
static final java.lang.String CONSUMER_CONFIGURATION_PREFIX;
static final java.lang.String PRODUCER_CONFIGURATION_PREFIX;
static final java.util.Set FORBIDDEN_PROPERTIES;
private void <init>()
{
org.apache.hadoop.hive.kafka.KafkaUtils v;
v := @this: org.apache.hadoop.hive.kafka.KafkaUtils;
specialinvoke v.<java.lang.Object: void <init>()>();
return;
}
static java.util.Properties consumerProperties(org.apache.hadoop.conf.Configuration)
{
org.apache.hadoop.hive.kafka.KafkaTableProperties v, v;
java.util.Map v;
org.apache.hadoop.conf.Configuration v;
java.lang.String v, v, v, v, v, v, v;
boolean v, v;
java.util.Properties v;
java.lang.IllegalArgumentException v;
java.lang.Class v, v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v = new java.util.Properties;
specialinvoke v.<java.util.Properties: void <init>()>();
v = staticinvoke <org.apache.hadoop.hive.ql.exec.Utilities: java.lang.String getTaskId(org.apache.hadoop.conf.Configuration)>(v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("client.id", v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("enable.auto.commit", "false");
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("auto.offset.reset", "none");
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties HIVE_KAFKA_BOOTSTRAP_SERVERS>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>(v);
if v == null goto label;
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v == 0 goto label;
label:
v = new java.lang.IllegalArgumentException;
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties HIVE_KAFKA_BOOTSTRAP_SERVERS>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Kafka Broker End Point is missing Please set Config \u0001");
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String)>(v);
throw v;
label:
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("bootstrap.servers", v);
v = class "Lorg/apache/kafkaesqueesque/common/serialization/ByteArrayDeserializer;";
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("key.deserializer", v);
v = class "Lorg/apache/kafkaesqueesque/common/serialization/ByteArrayDeserializer;";
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("value.deserializer", v);
v = staticinvoke <org.apache.hadoop.security.UserGroupInformation: boolean isSecurityEnabled()>();
if v == 0 goto label;
staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils: void addKerberosJaasConf(org.apache.hadoop.conf.Configuration,java.util.Properties)>(v, v);
label:
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils: java.util.Map extractExtraProperties(org.apache.hadoop.conf.Configuration,java.lang.String)>(v, "kafka.consumer");
virtualinvoke v.<java.util.Properties: void putAll(java.util.Map)>(v);
staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils: void setupKafkaSslProperties(org.apache.hadoop.conf.Configuration,java.util.Properties)>(v, v);
return v;
}
static void setupKafkaSslProperties(org.apache.hadoop.conf.Configuration, java.util.Properties)
{
java.lang.IllegalStateException v;
org.apache.hadoop.conf.Configuration v;
org.apache.hadoop.fs.Path v, v;
boolean v, v, v;
java.lang.Exception v;
org.apache.hadoop.hive.kafka.KafkaTableProperties v, v, v, v, v, v;
org.apache.hadoop.hive.conf.HiveConf$ConfVars v;
java.lang.String v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v;
java.util.Properties v;
org.slf4j.Logger v;
java.io.File v, v, v, v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.util.Properties;
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties HIVE_KAFKA_SSL_CREDENTIAL_KEYSTORE>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>(v);
if v == null goto label;
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v != 0 goto label;
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties HIVE_KAFKA_SSL_TRUSTSTORE_PASSWORD>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>(v);
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties HIVE_KAFKA_SSL_KEYSTORE_PASSWORD>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>(v);
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties HIVE_KAFKA_SSL_KEY_PASSWORD>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>(v);
v = <org.apache.hadoop.hive.conf.HiveConf$ConfVars: org.apache.hadoop.hive.conf.HiveConf$ConfVars DOWNLOADED_RESOURCES_DIR>;
v = staticinvoke <org.apache.hadoop.hive.conf.HiveConf: java.lang.String getVar(org.apache.hadoop.conf.Configuration,org.apache.hadoop.hive.conf.HiveConf$ConfVars)>(v, v);
label:
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties HIVE_SSL_TRUSTSTORE_LOCATION_CONFIG>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>(v);
v = new org.apache.hadoop.fs.Path;
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String)>(v);
v = new java.io.File;
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.lang.String getName()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.lang.String)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u0001/\u0001");
specialinvoke v.<java.io.File: void <init>(java.lang.String)>(v);
v = virtualinvoke v.<java.io.File: java.lang.String getAbsolutePath()>();
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("ssl.truststore.location", v);
v = new java.io.File;
specialinvoke v.<java.io.File: void <init>(java.lang.String)>(v);
v = virtualinvoke v.<java.io.File: java.lang.String getAbsolutePath()>();
staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils: void writeStoreToLocal(org.apache.hadoop.conf.Configuration,java.lang.String,java.lang.String)>(v, v, v);
v = staticinvoke <org.apache.hadoop.hive.ql.exec.Utilities: java.lang.String getPasswdFromKeystore(java.lang.String,java.lang.String)>(v, v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("ssl.truststore.password", v);
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v != 0 goto label;
v = <org.apache.hadoop.hive.kafka.KafkaUtils: org.slf4j.Logger log>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>("Kafka keystore configured, configuring local keystore");
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties HIVE_SSL_KEYSTORE_LOCATION_CONFIG>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>(v);
v = new org.apache.hadoop.fs.Path;
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String)>(v);
v = new java.io.File;
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.lang.String getName()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.lang.String)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u0001/\u0001");
specialinvoke v.<java.io.File: void <init>(java.lang.String)>(v);
v = virtualinvoke v.<java.io.File: java.lang.String getAbsolutePath()>();
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("ssl.keystore.location", v);
v = new java.io.File;
specialinvoke v.<java.io.File: void <init>(java.lang.String)>(v);
v = virtualinvoke v.<java.io.File: java.lang.String getAbsolutePath()>();
staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils: void writeStoreToLocal(org.apache.hadoop.conf.Configuration,java.lang.String,java.lang.String)>(v, v, v);
v = staticinvoke <org.apache.hadoop.hive.ql.exec.Utilities: java.lang.String getPasswdFromKeystore(java.lang.String,java.lang.String)>(v, v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("ssl.keystore.password", v);
label:
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v != 0 goto label;
v = staticinvoke <org.apache.hadoop.hive.ql.exec.Utilities: java.lang.String getPasswdFromKeystore(java.lang.String,java.lang.String)>(v, v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("ssl.key.password", v);
label:
goto label;
label:
v := @caughtexception;
v = new java.lang.IllegalStateException;
specialinvoke v.<java.lang.IllegalStateException: void <init>(java.lang.String,java.lang.Throwable)>("Unable to retrieve password from the credential keystore", v);
throw v;
label:
return;
catch java.io.IOException from label to label with label;
catch java.net.URISyntaxException from label to label with label;
}
private static void writeStoreToLocal(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String) throws java.io.IOException, java.net.URISyntaxException
{
org.apache.hadoop.fs.FileSystem v;
java.net.URISyntaxException v;
org.apache.hadoop.conf.Configuration v;
org.apache.hadoop.fs.Path v, v;
java.lang.String v, v, v, v, v, v, v;
java.net.URI v, v, v;
boolean v, v, v;
java.io.IOException v, v;
java.lang.IllegalArgumentException v;
java.io.File v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v = "hdfs";
v = new java.net.URI;
specialinvoke v.<java.net.URI: void <init>(java.lang.String)>(v);
v = virtualinvoke v.<java.net.URI: java.lang.String getScheme()>();
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>(v);
if v != 0 goto label;
v = new java.lang.IllegalArgumentException;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Kafka stores must be located in HDFS, but received: \u0001");
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String)>(v);
throw v;
label:
v = new java.io.File;
specialinvoke v.<java.io.File: void <init>(java.lang.String)>(v);
v = virtualinvoke v.<java.io.File: boolean exists()>();
if v != 0 goto label;
v = virtualinvoke v.<java.io.File: boolean mkdirs()>();
if v != 0 goto label;
v = new java.io.IOException;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Unable to create local directory, \u0001");
specialinvoke v.<java.io.IOException: void <init>(java.lang.String)>(v);
throw v;
label:
v = new java.net.URI;
specialinvoke v.<java.net.URI: void <init>(java.lang.String)>(v);
v = new java.net.URI;
specialinvoke v.<java.net.URI: void <init>(java.lang.String)>(v);
v = staticinvoke <org.apache.hadoop.fs.FileSystem: org.apache.hadoop.fs.FileSystem get(java.net.URI,org.apache.hadoop.conf.Configuration)>(v, v);
v = new org.apache.hadoop.fs.Path;
v = virtualinvoke v.<java.net.URI: java.lang.String toString()>();
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String)>(v);
v = new org.apache.hadoop.fs.Path;
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String)>(v);
virtualinvoke v.<org.apache.hadoop.fs.FileSystem: void copyToLocalFile(org.apache.hadoop.fs.Path,org.apache.hadoop.fs.Path)>(v, v);
label:
goto label;
label:
v := @caughtexception;
v = new java.io.IOException;
specialinvoke v.<java.io.IOException: void <init>(java.lang.String,java.lang.Throwable)>("Unable to download store", v);
throw v;
label:
return;
catch java.net.URISyntaxException from label to label with label;
}
private static java.util.Map extractExtraProperties(org.apache.hadoop.conf.Configuration, java.lang.String)
{
java.util.Map v;
org.apache.hadoop.conf.Configuration v;
int v, v;
java.lang.String v, v, v, v;
boolean v, v;
java.util.Iterator v;
com.google.common.collect.ImmutableMap v;
java.util.Set v, v;
java.lang.IllegalArgumentException v;
com.google.common.collect.ImmutableMap$Builder v;
java.lang.Object v, v, v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.lang.String;
v = staticinvoke <com.google.common.collect.ImmutableMap: com.google.common.collect.ImmutableMap$Builder builder()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("^\u0001\\..*");
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.util.Map getValByRegex(java.lang.String)>(v);
v = interfaceinvoke v.<java.util.Map: java.util.Set entrySet()>();
v = interfaceinvoke v.<java.util.Set: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = interfaceinvoke v.<java.util.Map$Entry: java.lang.Object getKey()>();
v = virtualinvoke v.<java.lang.String: int length()>();
v = v + 1;
v = virtualinvoke v.<java.lang.String: java.lang.String substring(int)>(v);
v = <org.apache.hadoop.hive.kafka.KafkaUtils: java.util.Set FORBIDDEN_PROPERTIES>;
v = interfaceinvoke v.<java.util.Set: boolean contains(java.lang.Object)>(v);
if v == 0 goto label;
v = new java.lang.IllegalArgumentException;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Not suppose to set Kafka Property \u0001");
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String)>(v);
throw v;
label:
v = interfaceinvoke v.<java.util.Map$Entry: java.lang.Object getValue()>();
virtualinvoke v.<com.google.common.collect.ImmutableMap$Builder: com.google.common.collect.ImmutableMap$Builder put(java.lang.Object,java.lang.Object)>(v, v);
goto label;
label:
v = virtualinvoke v.<com.google.common.collect.ImmutableMap$Builder: com.google.common.collect.ImmutableMap build()>();
return v;
}
static java.util.Properties producerProperties(org.apache.hadoop.conf.Configuration)
{
org.apache.hadoop.hive.kafka.KafkaTableProperties v, v, v;
int[] v;
java.util.Map v;
org.apache.hadoop.conf.Configuration v;
int v, v;
java.lang.String v, v, v, v, v, v, v, v, v, v, v, v, v;
boolean v, v;
org.apache.hadoop.hive.kafka.KafkaOutputFormat$WriteSemantic v;
java.util.Properties v;
java.util.UUID v;
java.lang.IllegalArgumentException v, v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties WRITE_SEMANTIC_PROPERTY>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>(v);
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaOutputFormat$WriteSemantic: org.apache.hadoop.hive.kafka.KafkaOutputFormat$WriteSemantic valueOf(java.lang.String)>(v);
v = new java.util.Properties;
specialinvoke v.<java.util.Properties: void <init>()>();
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties HIVE_KAFKA_BOOTSTRAP_SERVERS>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>(v);
if v == null goto label;
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v == 0 goto label;
label:
v = new java.lang.IllegalArgumentException;
v = <org.apache.hadoop.hive.kafka.KafkaTableProperties: org.apache.hadoop.hive.kafka.KafkaTableProperties HIVE_KAFKA_BOOTSTRAP_SERVERS>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaTableProperties: java.lang.String getName()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Kafka Broker End Point is missing Please set Config \u0001");
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String)>(v);
throw v;
label:
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("bootstrap.servers", v);
v = staticinvoke <org.apache.hadoop.security.UserGroupInformation: boolean isSecurityEnabled()>();
if v == 0 goto label;
staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils: void addKerberosJaasConf(org.apache.hadoop.conf.Configuration,java.util.Properties)>(v, v);
label:
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils: java.util.Map extractExtraProperties(org.apache.hadoop.conf.Configuration,java.lang.String)>(v, "kafka.producer");
virtualinvoke v.<java.util.Properties: void putAll(java.util.Map)>(v);
staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils: void setupKafkaSslProperties(org.apache.hadoop.conf.Configuration,java.util.Properties)>(v, v);
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String,java.lang.String)>("mapred.task.id", null);
if v != null goto label;
v = staticinvoke <java.util.UUID: java.util.UUID randomUUID()>();
v = virtualinvoke v.<java.util.UUID: java.lang.String toString()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("random_\u0001");
goto label;
label:
v = v;
label:
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("client.id", v);
v = <org.apache.hadoop.hive.kafka.KafkaUtils$1: int[] $SwitchMap$org$apache$hadoop$hive$kafka$KafkaOutputFormat$WriteSemantic>;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaOutputFormat$WriteSemantic: int ordinal()>();
v = v[v];
lookupswitch(v)
{
case 1: goto label;
case 2: goto label;
default: goto label;
};
label:
v = staticinvoke <java.lang.String: java.lang.String valueOf(int)>(2147483647);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("retries", v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("acks", "all");
goto label;
label:
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils: java.lang.String getTaskId(org.apache.hadoop.conf.Configuration)>(v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("acks", "all");
v = staticinvoke <java.lang.String: java.lang.String valueOf(int)>(2147483647);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("retries", v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("transactional.id", v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("enable.idempotence", "true");
goto label;
label:
v = new java.lang.IllegalArgumentException;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (org.apache.hadoop.hive.kafka.KafkaOutputFormat$WriteSemantic)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Unknown Semantic \u0001");
specialinvoke v.<java.lang.IllegalArgumentException: void <init>(java.lang.String)>(v);
throw v;
label:
return v;
}
static transient void copyDependencyJars(org.apache.hadoop.conf.Configuration, java.lang.Class[]) throws java.io.IOException
{
java.lang.Object[] v;
java.lang.String[] v;
java.util.function.Function v;
java.util.HashSet v;
org.apache.hadoop.conf.Configuration v;
java.lang.String v;
java.util.stream.Collector v;
boolean v;
java.util.function.Predicate v;
java.util.Collection v;
java.lang.Class[] v;
java.util.stream.Stream v, v, v;
java.lang.Object v;
org.apache.hadoop.fs.LocalFileSystem v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.lang.Class[];
v = new java.util.HashSet;
specialinvoke v.<java.util.HashSet: void <init>()>();
v = staticinvoke <org.apache.hadoop.fs.FileSystem: org.apache.hadoop.fs.LocalFileSystem getLocal(org.apache.hadoop.conf.Configuration)>(v);
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.util.Collection getStringCollection(java.lang.String)>("tmpjars");
interfaceinvoke v.<java.util.Set: boolean addAll(java.util.Collection)>(v);
v = staticinvoke <java.util.Arrays: java.util.stream.Stream stream(java.lang.Object[])>(v);
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils$nonNull__56: java.util.function.Predicate bootstrap$()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.util.stream.Stream filter(java.util.function.Predicate)>(v);
v = staticinvoke <org.apache.hadoop.hive.kafka.KafkaUtils$lambda_copyDependencyJars_0__57: java.util.function.Function bootstrap$(org.apache.hadoop.fs.FileSystem)>(v);
v = interfaceinvoke v.<java.util.stream.Stream: java.util.stream.Stream map(java.util.function.Function)>(v);
v = staticinvoke <java.util.stream.Collectors: java.util.stream.Collector toList()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.lang.Object collect(java.util.stream.Collector)>(v);
interfaceinvoke v.<java.util.Set: boolean addAll(java.util.Collection)>(v);
v = interfaceinvoke v.<java.util.Set: boolean isEmpty()>();
if v == 0 goto label;
return;
label:
v = newarray (java.lang.String)[0];
v = interfaceinvoke v.<java.util.Set: java.lang.Object[] toArray(java.lang.Object[])>(v);
v = staticinvoke <org.apache.hadoop.util.StringUtils: java.lang.String arrayToString(java.lang.String[])>(v);
virtualinvoke v.<org.apache.hadoop.conf.Configuration: void set(java.lang.String,java.lang.String)>("tmpjars", v);
return;
}
static org.apache.hadoop.hive.serde.AbstractSerDe createDelegate(java.lang.String)
{
java.lang.Class v;
java.lang.ClassNotFoundException v;
java.lang.Object v;
java.lang.RuntimeException v;
java.lang.String v;
v := @parameter: java.lang.String;
label:
v = staticinvoke <java.lang.Class: java.lang.Class forName(java.lang.String)>(v);
label:
goto label;
label:
v := @caughtexception;
v = new java.lang.RuntimeException;
specialinvoke v.<java.lang.RuntimeException: void <init>(java.lang.Throwable)>(v);
throw v;
label:
v = staticinvoke <org.apache.hive.common.util.ReflectionUtil: java.lang.Object newInstance(java.lang.Class,org.apache.hadoop.conf.Configuration)>(v, null);
return v;
catch java.lang.ClassNotFoundException from label to label with label;
}
static org.apache.kafkaesqueesque.clients.producer.ProducerRecord toProducerRecord(java.lang.String, org.apache.hadoop.hive.kafka.KafkaWritable)
{
byte[] v, v;
org.apache.kafkaesqueesque.clients.producer.ProducerRecord v;
long v, v;
java.lang.Integer v;
byte v;
java.lang.Long v;
org.apache.hadoop.hive.kafka.KafkaWritable v;
int v, v, v;
java.lang.String v;
v := @parameter: java.lang.String;
v := @parameter: org.apache.hadoop.hive.kafka.KafkaWritable;
v = new org.apache.kafkaesqueesque.clients.producer.ProducerRecord;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: int getPartition()>();
v = (int) -1;
if v == v goto label;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: int getPartition()>();
v = staticinvoke <java.lang.Integer: java.lang.Integer valueOf(int)>(v);
goto label;
label:
v = null;
label:
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: long getTimestamp()>();
v = v cmp -1L;
if v == 0 goto label;
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: long getTimestamp()>();
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
goto label;
label:
v = null;
label:
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] getRecordKey()>();
v = virtualinvoke v.<org.apache.hadoop.hive.kafka.KafkaWritable: byte[] getValue()>();
specialinvoke v.<org.apache.kafkaesqueesque.clients.producer.ProducerRecord: void <init>(java.lang.String,java.lang.Integer,java.lang.Long,java.lang.Object,java.lang.Object)>(v, v, v, v, v);
return v;
}
static boolean exceptionIsFatal(java.lang.Throwable)
{
java.lang.Throwable v;
boolean v, v, v, v, v, v, v, v, v, v, v, v;
v := @parameter: java.lang.Throwable;
v = v instanceof org.apache.kafkaesqueesque.common.errors.AuthenticationException;
if v != 0 goto label;
v = v instanceof org.apache.kafkaesqueesque.common.errors.AuthorizationException;
if v != 0 goto label;
v = v instanceof org.apache.kafkaesqueesque.common.errors.SecurityDisabledException;
if v == 0 goto label;
label:
v = 1;
goto label;
label:
v = 0;
label:
v = v;
v = v instanceof org.apache.kafkaesqueesque.common.errors.InvalidTopicException;
if v != 0 goto label;
v = v instanceof org.apache.kafkaesqueesque.common.errors.UnknownServerException;
if v != 0 goto label;
v = v instanceof org.apache.kafkaesqueesque.common.errors.SerializationException;
if v != 0 goto label;
v = v instanceof org.apache.kafkaesqueesque.common.errors.OffsetMetadataTooLarge;
if v != 0 goto label;
v = v instanceof java.lang.IllegalStateException;
if v == 0 goto label;
label:
v = 1;
goto label;
label:
v = 0;
label:
if v != 0 goto label;
if v == 0 goto label;
label:
v = 1;
goto label;
label:
v = 0;
label:
return v;
}
static java.lang.String getTaskId(org.apache.hadoop.conf.Configuration)
{
org.apache.hadoop.conf.Configuration v;
int v, v;
java.lang.Object v;
java.lang.String v, v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String,java.lang.String)>("mapred.task.id", null);
v = staticinvoke <com.google.common.base.Preconditions: java.lang.Object checkNotNull(java.lang.Object)>(v);
v = virtualinvoke v.<java.lang.String: int lastIndexOf(java.lang.String)>("_");
v = (int) -1;
if v == v goto label;
v = virtualinvoke v.<java.lang.String: java.lang.String substring(int,int)>(0, v);
return v;
label:
return v;
}
static void addKerberosJaasConf(org.apache.hadoop.conf.Configuration, java.util.Properties)
{
byte[] v, v;
java.lang.Object[] v, v;
org.apache.hadoop.io.Text v, v;
org.apache.hadoop.hive.conf.HiveConf$ConfVars v, v, v, v;
org.apache.hadoop.conf.Configuration v;
java.lang.String v, v, v, v, v, v;
org.apache.hadoop.security.Credentials v;
boolean v, v, v;
java.util.Properties v;
org.slf4j.Logger v, v, v;
java.io.IOException v;
java.util.Base64$Encoder v;
java.lang.RuntimeException v;
org.apache.hadoop.security.token.Token v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.util.Properties;
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("security.protocol", "SASL_PLAINTEXT");
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("sasl.mechanism", "GSSAPI");
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("sasl.kerberos.service.name", "kafka");
v = <org.apache.hadoop.hive.conf.HiveConf$ConfVars: org.apache.hadoop.hive.conf.HiveConf$ConfVars HIVE_SERVER2_KERBEROS_PRINCIPAL>;
v = staticinvoke <org.apache.hadoop.hive.conf.HiveConf: java.lang.String getVar(org.apache.hadoop.conf.Configuration,org.apache.hadoop.hive.conf.HiveConf$ConfVars)>(v, v);
v = <org.apache.hadoop.hive.conf.HiveConf$ConfVars: org.apache.hadoop.hive.conf.HiveConf$ConfVars HIVE_SERVER2_KERBEROS_KEYTAB>;
v = staticinvoke <org.apache.hadoop.hive.conf.HiveConf: java.lang.String getVar(org.apache.hadoop.conf.Configuration,org.apache.hadoop.hive.conf.HiveConf$ConfVars)>(v, v);
if v == null goto label;
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v != 0 goto label;
if v == null goto label;
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v == 0 goto label;
label:
v = <org.apache.hadoop.hive.conf.HiveConf$ConfVars: org.apache.hadoop.hive.conf.HiveConf$ConfVars LLAP_FS_KERBEROS_KEYTAB_FILE>;
v = staticinvoke <org.apache.hadoop.hive.conf.HiveConf: java.lang.String getVar(org.apache.hadoop.conf.Configuration,org.apache.hadoop.hive.conf.HiveConf$ConfVars)>(v, v);
v = <org.apache.hadoop.hive.conf.HiveConf$ConfVars: org.apache.hadoop.hive.conf.HiveConf$ConfVars LLAP_FS_KERBEROS_PRINCIPAL>;
v = staticinvoke <org.apache.hadoop.hive.conf.HiveConf: java.lang.String getVar(org.apache.hadoop.conf.Configuration,org.apache.hadoop.hive.conf.HiveConf$ConfVars)>(v, v);
label:
v = staticinvoke <org.apache.hadoop.security.SecurityUtil: java.lang.String getServerPrincipal(java.lang.String,java.lang.String)>(v, "0.0.0.0");
label:
goto label;
label:
v := @caughtexception;
v = <org.apache.hadoop.hive.kafka.KafkaUtils: org.slf4j.Logger log>;
interfaceinvoke v.<org.slf4j.Logger: void error(java.lang.String,java.lang.Throwable)>("Can not construct kerberos principal", v);
v = new java.lang.RuntimeException;
specialinvoke v.<java.lang.RuntimeException: void <init>(java.lang.Throwable)>(v);
throw v;
label:
v = newarray (java.lang.Object)[2];
v[0] = v;
v[1] = v;
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>("com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true storeKey=true keyTab=\"%s\" principal=\"%s\";", v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("sasl.jaas.config", v);
v = v instanceof org.apache.hadoop.mapred.JobConf;
if v == 0 goto label;
v = virtualinvoke v.<org.apache.hadoop.mapred.JobConf: org.apache.hadoop.security.Credentials getCredentials()>();
v = new org.apache.hadoop.io.Text;
specialinvoke v.<org.apache.hadoop.io.Text: void <init>(java.lang.String)>("KAFKA_DELEGATION_TOKEN");
v = virtualinvoke v.<org.apache.hadoop.security.Credentials: org.apache.hadoop.security.token.Token getToken(org.apache.hadoop.io.Text)>(v);
if v == null goto label;
v = <org.apache.hadoop.hive.kafka.KafkaUtils: org.slf4j.Logger log>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String,java.lang.Object)>("Kafka delegation token has been found: {}", v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("sasl.mechanism", "SCRAM-SHA-256");
v = newarray (java.lang.Object)[3];
v = new java.lang.String;
v = virtualinvoke v.<org.apache.hadoop.security.token.Token: byte[] getIdentifier()>();
specialinvoke v.<java.lang.String: void <init>(byte[])>(v);
v[0] = v;
v = staticinvoke <java.util.Base64: java.util.Base64$Encoder getEncoder()>();
v = virtualinvoke v.<org.apache.hadoop.security.token.Token: byte[] getPassword()>();
v = virtualinvoke v.<java.util.Base64$Encoder: java.lang.String encodeToString(byte[])>(v);
v[1] = v;
v = virtualinvoke v.<org.apache.hadoop.security.token.Token: org.apache.hadoop.io.Text getService()>();
v[2] = v;
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>("org.apache.kafkaesqueesque.common.security.scram.ScramLoginModule required username=\"%s\" password=\"%s\" serviceName=\"%s\" tokenauth=true;", v);
virtualinvoke v.<java.util.Properties: java.lang.Object setProperty(java.lang.String,java.lang.String)>("sasl.jaas.config", v);
label:
v = <org.apache.hadoop.hive.kafka.KafkaUtils: org.slf4j.Logger log>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String,java.lang.Object)>("Kafka client running with following JAAS = [{}]", v);
return;
catch java.io.IOException from label to label with label;
}
static void <clinit>()
{
java.util.HashSet v;
org.slf4j.Logger v;
com.google.common.collect.ImmutableList v;
v = staticinvoke <org.slf4j.LoggerFactory: org.slf4j.Logger getLogger(java.lang.Class)>(class "Lorg/apache/hadoop/hive/kafka/KafkaUtils;");
<org.apache.hadoop.hive.kafka.KafkaUtils: org.slf4j.Logger log> = v;
v = new java.util.HashSet;
v = staticinvoke <com.google.common.collect.ImmutableList: com.google.common.collect.ImmutableList of(java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object)>("enable.auto.commit", "auto.offset.reset", "key.deserializer", "value.deserializer", "transactional.id", "key.serializer", "value.serializer");
specialinvoke v.<java.util.HashSet: void <init>(java.util.Collection)>(v);
<org.apache.hadoop.hive.kafka.KafkaUtils: java.util.Set FORBIDDEN_PROPERTIES> = v;
return;
}
}