public class org.apache.hive.hcatalog.templeton.tool.LaunchMapper extends org.apache.hadoop.mapreduce.Mapper implements org.apache.hive.hcatalog.templeton.tool.JobSubmissionConstants
{
private static final org.slf4j.Logger LOG;
public void <init>()
{
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
specialinvoke v.<org.apache.hadoop.mapreduce.Mapper: void <init>()>();
return;
}
private static void handlePigEnvVars(org.apache.hadoop.conf.Configuration, java.util.Map)
{
java.lang.String[] v;
java.util.Map v;
org.apache.hadoop.conf.Configuration v;
int v, v;
java.lang.String v, v, v, v, v, v, v, v, v, v, v;
java.io.File v, v;
java.lang.StringBuilder v, v, v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.util.Map;
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("HIVE_HOME");
if v == null goto label;
v = new java.io.File;
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("HIVE_HOME");
specialinvoke v.<java.io.File: void <init>(java.lang.String)>(v);
v = virtualinvoke v.<java.io.File: java.lang.String getAbsolutePath()>();
interfaceinvoke v.<java.util.Map: java.lang.Object put(java.lang.Object,java.lang.Object)>("HIVE_HOME", v);
label:
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("HCAT_HOME");
if v == null goto label;
v = new java.io.File;
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("HCAT_HOME");
specialinvoke v.<java.io.File: void <init>(java.lang.String)>(v);
v = virtualinvoke v.<java.io.File: java.lang.String getAbsolutePath()>();
interfaceinvoke v.<java.util.Map: java.lang.Object put(java.lang.Object,java.lang.Object)>("HCAT_HOME", v);
label:
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("PIG_OPTS");
if v == null goto label;
v = new java.lang.StringBuilder;
specialinvoke v.<java.lang.StringBuilder: void <init>()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("PIG_OPTS");
v = staticinvoke <org.apache.hadoop.util.StringUtils: java.lang.String[] split(java.lang.String)>(v);
v = lengthof v;
v = 0;
label:
if v >= v goto label;
v = v[v];
v = virtualinvoke v.<java.lang.StringBuilder: java.lang.StringBuilder append(java.lang.String)>("-D");
v = staticinvoke <org.apache.hadoop.util.StringUtils: java.lang.String unEscapeString(java.lang.String)>(v);
v = virtualinvoke v.<java.lang.StringBuilder: java.lang.StringBuilder append(java.lang.String)>(v);
virtualinvoke v.<java.lang.StringBuilder: java.lang.StringBuilder append(java.lang.String)>(" ");
v = v + 1;
goto label;
label:
v = virtualinvoke v.<java.lang.StringBuilder: java.lang.String toString()>();
interfaceinvoke v.<java.util.Map: java.lang.Object put(java.lang.Object,java.lang.Object)>("PIG_OPTS", v);
label:
return;
}
private static void handleSqoop(org.apache.hadoop.conf.Configuration, java.util.Map) throws java.io.IOException
{
java.lang.String[] v;
java.util.Map v;
org.apache.hadoop.conf.Configuration v;
int v, v, v, v;
java.lang.String v, v, v, v, v, v;
boolean v;
org.slf4j.Logger v;
java.lang.StringBuilder v, v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.util.Map;
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.sqoop.lib.jar");
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: boolean isset(java.lang.String)>(v);
if v == 0 goto label;
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.sqoop.lib.jar");
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("templeton.sqoop.lib.jar=\u0001");
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String[] getStrings(java.lang.String)>("templeton.sqoop.lib.jar");
v = new java.lang.StringBuilder;
specialinvoke v.<java.lang.StringBuilder: void <init>()>();
v = lengthof v;
v = 0;
label:
if v >= v goto label;
v = v[v];
v = virtualinvoke v.<java.lang.StringBuilder: java.lang.StringBuilder append(java.lang.String)>(v);
v = <java.io.File: java.lang.String pathSeparator>;
virtualinvoke v.<java.lang.StringBuilder: java.lang.StringBuilder append(java.lang.String)>(v);
v = v + 1;
goto label;
label:
v = virtualinvoke v.<java.lang.StringBuilder: int length()>();
v = v - 1;
virtualinvoke v.<java.lang.StringBuilder: void setLength(int)>(v);
v = virtualinvoke v.<java.lang.StringBuilder: java.lang.String toString()>();
staticinvoke <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void prependPathToVariable(java.lang.String,java.util.Map,java.lang.String)>("HADOOP_CLASSPATH", v, v);
label:
return;
}
private static void handleHadoopClasspathExtras(org.apache.hadoop.conf.Configuration, java.util.Map) throws java.io.IOException
{
java.lang.String[] v;
org.apache.hadoop.fs.FileStatus v;
java.util.Map v;
org.apache.hadoop.conf.Configuration v;
int v, v, v, v;
org.apache.hadoop.fs.Path v;
java.lang.String v, v, v, v, v, v, v;
boolean v, v;
org.slf4j.Logger v;
java.lang.StringBuilder v, v;
org.apache.hadoop.fs.LocalFileSystem v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.util.Map;
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.hadoop.classpath.extras");
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: boolean isset(java.lang.String)>(v);
if v != 0 goto label;
return;
label:
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.hadoop.classpath.extras");
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("templeton.hadoop.classpath.extras=\u0001");
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String[] getStrings(java.lang.String)>("templeton.hadoop.classpath.extras");
v = new java.lang.StringBuilder;
specialinvoke v.<java.lang.StringBuilder: void <init>()>();
v = staticinvoke <org.apache.hadoop.fs.FileSystem: org.apache.hadoop.fs.LocalFileSystem getLocal(org.apache.hadoop.conf.Configuration)>(v);
v = lengthof v;
v = 0;
label:
if v >= v goto label;
v = v[v];
v = new org.apache.hadoop.fs.Path;
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.hadoop.fs.FileSystem: org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path)>(v);
virtualinvoke v.<java.lang.StringBuilder: java.lang.StringBuilder append(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.hadoop.fs.FileStatus: boolean isDir()>();
if v == 0 goto label;
v = <java.io.File: java.lang.String separator>;
v = virtualinvoke v.<java.lang.StringBuilder: java.lang.StringBuilder append(java.lang.String)>(v);
virtualinvoke v.<java.lang.StringBuilder: java.lang.StringBuilder append(java.lang.String)>("*");
label:
v = <java.io.File: java.lang.String pathSeparator>;
virtualinvoke v.<java.lang.StringBuilder: java.lang.StringBuilder append(java.lang.String)>(v);
v = v + 1;
goto label;
label:
v = virtualinvoke v.<java.lang.StringBuilder: int length()>();
v = v - 1;
virtualinvoke v.<java.lang.StringBuilder: void setLength(int)>(v);
v = virtualinvoke v.<java.lang.StringBuilder: java.lang.String toString()>();
staticinvoke <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void prependPathToVariable(java.lang.String,java.util.Map,java.lang.String)>("HADOOP_CLASSPATH", v, v);
return;
}
private static void prependPathToVariable(java.lang.String, java.util.Map, java.lang.String)
{
java.util.Map v;
java.lang.Object v, v;
java.lang.String v, v, v, v, v, v, v, v;
boolean v, v, v, v;
v := @parameter: java.lang.String;
v := @parameter: java.util.Map;
v := @parameter: java.lang.String;
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: boolean isset(java.lang.String)>(v);
if v == 0 goto label;
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: boolean isset(java.lang.String)>(v);
if v == 0 goto label;
if v != null goto label;
label:
return;
label:
v = interfaceinvoke v.<java.util.Map: java.lang.Object get(java.lang.Object)>(v);
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: boolean isset(java.lang.String)>(v);
if v == 0 goto label;
v = <java.io.File: java.lang.String pathSeparator>;
v = interfaceinvoke v.<java.util.Map: java.lang.Object get(java.lang.Object)>(v);
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.lang.String,java.lang.Object)>(v, v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u0001\u0001\u0001");
interfaceinvoke v.<java.util.Map: java.lang.Object put(java.lang.Object,java.lang.Object)>(v, v);
goto label;
label:
v = staticinvoke <java.lang.System: java.lang.String getenv(java.lang.String)>(v);
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: boolean isset(java.lang.String)>(v);
if v == 0 goto label;
v = <java.io.File: java.lang.String pathSeparator>;
v = staticinvoke <java.lang.System: java.lang.String getenv(java.lang.String)>(v);
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.lang.String,java.lang.String)>(v, v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u0001\u0001\u0001");
interfaceinvoke v.<java.util.Map: java.lang.Object put(java.lang.Object,java.lang.Object)>(v, v);
goto label;
label:
interfaceinvoke v.<java.util.Map: java.lang.Object put(java.lang.Object,java.lang.Object)>(v, v);
label:
return;
}
protected java.lang.Process startJob(org.apache.hadoop.mapreduce.Mapper$Context, java.lang.String, java.lang.String, java.lang.String, org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType) throws java.io.IOException, java.lang.InterruptedException
{
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
org.apache.hadoop.io.Text v, v;
java.lang.String[] v;
java.util.ArrayList v;
org.apache.hadoop.mapreduce.Mapper$Context v;
java.util.Map v;
org.apache.hadoop.conf.Configuration v;
org.apache.hadoop.fs.Path v;
java.lang.String v, v, v, v, v;
java.net.URI v, v;
java.util.LinkedList v;
org.apache.hadoop.security.Credentials v, v;
org.apache.hive.hcatalog.templeton.tool.TrivialExecService v;
java.lang.Process v;
org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType v, v;
org.apache.hadoop.security.token.Token v;
java.io.File v;
java.util.List v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
v := @parameter: org.apache.hadoop.mapreduce.Mapper$Context;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType;
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.conf.Configuration getConfiguration()>();
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void copyLocal(java.lang.String,org.apache.hadoop.conf.Configuration)>("templeton.copy", v);
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.args");
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: java.lang.String[] decodeArray(java.lang.String)>(v);
v = new java.util.ArrayList;
specialinvoke v.<java.util.ArrayList: void <init>()>();
virtualinvoke v.<java.util.ArrayList: boolean add(java.lang.Object)>("HADOOP_ROOT_LOGGER");
virtualinvoke v.<java.util.ArrayList: boolean add(java.lang.Object)>("hadoop-command");
virtualinvoke v.<java.util.ArrayList: boolean add(java.lang.Object)>("CLASS");
virtualinvoke v.<java.util.ArrayList: boolean add(java.lang.Object)>("mapredcommand");
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: java.util.Map hadoopUserEnv(java.lang.String,java.lang.String)>(v, v);
staticinvoke <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void handlePigEnvVars(org.apache.hadoop.conf.Configuration,java.util.Map)>(v, v);
staticinvoke <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void handleSqoop(org.apache.hadoop.conf.Configuration,java.util.Map)>(v, v);
staticinvoke <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void handleHadoopClasspathExtras(org.apache.hadoop.conf.Configuration,java.util.Map)>(v, v);
v = new java.util.LinkedList;
v = staticinvoke <java.util.Arrays: java.util.List asList(java.lang.Object[])>(v);
specialinvoke v.<java.util.LinkedList: void <init>(java.util.Collection)>(v);
staticinvoke <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void handleTokenFile(java.util.List,java.lang.String,java.lang.String)>(v, "__MR_JOB_CREDENTIALS_OPTION=WEBHCAT_TOKEN_FILE_LOCATION__", "mapreduce.job.credentials.binary");
staticinvoke <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void handleTokenFile(java.util.List,java.lang.String,java.lang.String)>(v, "__TEZ_CREDENTIALS_OPTION=WEBHCAT_TOKEN_FILE_LOCATION_TEZ__", "tez.credentials.path");
v = <org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType: org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType HIVE>;
if v != v goto label;
v = new org.apache.hadoop.security.Credentials;
specialinvoke v.<org.apache.hadoop.security.Credentials: void <init>()>();
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.security.Credentials getCredentials()>();
v = new org.apache.hadoop.io.Text;
specialinvoke v.<org.apache.hadoop.io.Text: void <init>(java.lang.String)>("hive");
v = virtualinvoke v.<org.apache.hadoop.security.Credentials: org.apache.hadoop.security.token.Token getToken(org.apache.hadoop.io.Text)>(v);
v = new org.apache.hadoop.io.Text;
specialinvoke v.<org.apache.hadoop.io.Text: void <init>(java.lang.String)>("hive");
virtualinvoke v.<org.apache.hadoop.security.Credentials: void addToken(org.apache.hadoop.io.Text,org.apache.hadoop.security.token.Token)>(v, v);
v = staticinvoke <java.io.File: java.io.File createTempFile(java.lang.String,java.lang.String)>("templeton", null);
v = new org.apache.hadoop.fs.Path;
v = virtualinvoke v.<java.io.File: java.net.URI toURI()>();
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.net.URI)>(v);
virtualinvoke v.<org.apache.hadoop.security.Credentials: void writeTokenStorageFile(org.apache.hadoop.fs.Path,org.apache.hadoop.conf.Configuration)>(v, v);
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.net.URI toUri()>();
v = virtualinvoke v.<java.net.URI: java.lang.String getPath()>();
interfaceinvoke v.<java.util.Map: java.lang.Object put(java.lang.Object,java.lang.Object)>("HADOOP_TOKEN_FILE_LOCATION", v);
staticinvoke <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void replaceJobTag(java.util.List,java.lang.String,java.lang.String,java.lang.String)>(v, "__HIVE_QUERY_TAG_OPTION=HIVE_QUERY_TAG_JOBID__", "hive.query.tag", v);
goto label;
label:
staticinvoke <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void replaceJobTag(java.util.List,java.lang.String,java.lang.String,java.lang.String)>(v, "__MR_JOB_TAGS_OPTION=MR_JOB_TAGS_JOBID__", "mapreduce.job.tags", v);
label:
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TrivialExecService: org.apache.hive.hcatalog.templeton.tool.TrivialExecService getInstance()>();
v = virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.TrivialExecService: java.lang.Process run(java.util.List,java.util.List,java.util.Map)>(v, v, v);
return v;
}
private void killLauncherChildJobs(org.apache.hadoop.conf.Configuration, java.lang.String) throws java.io.IOException
{
java.lang.Throwable v;
org.apache.hadoop.hive.shims.HadoopShims v;
org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim v;
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
long v;
org.apache.hadoop.security.UserGroupInformation v;
org.apache.hadoop.conf.Configuration v;
java.lang.String v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.lang.String;
v = specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: long getTempletonLaunchTime(org.apache.hadoop.conf.Configuration)>(v);
v = staticinvoke <org.apache.hadoop.security.UserGroupInformation: org.apache.hadoop.security.UserGroupInformation getCurrentUser()>();
v = staticinvoke <org.apache.hadoop.hive.shims.ShimLoader: org.apache.hadoop.hive.shims.HadoopShims getHadoopShims()>();
v = interfaceinvoke v.<org.apache.hadoop.hive.shims.HadoopShims: org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim getWebHCatShim(org.apache.hadoop.conf.Configuration,org.apache.hadoop.security.UserGroupInformation)>(v, v);
label:
interfaceinvoke v.<org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim: void killJobs(java.lang.String,long)>(v, v);
label:
interfaceinvoke v.<org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim: void close()>();
goto label;
label:
v := @caughtexception;
interfaceinvoke v.<org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim: void close()>();
throw v;
label:
return;
catch java.lang.Throwable from label to label with label;
}
private long getTempletonLaunchTime(org.apache.hadoop.conf.Configuration)
{
java.lang.Object[] v;
org.slf4j.Logger v;
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
java.lang.NumberFormatException v;
long v;
java.lang.RuntimeException v, v;
byte v;
org.apache.hadoop.conf.Configuration v;
int v;
java.lang.String v, v, v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
v := @parameter: org.apache.hadoop.conf.Configuration;
v = 0L;
label:
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.job.launch.time");
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Launch time = \u0001");
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>(v);
if v == null goto label;
v = virtualinvoke v.<java.lang.String: int length()>();
if v <= 0 goto label;
v = staticinvoke <java.lang.Long: long parseLong(java.lang.String)>(v);
label:
goto label;
label:
v := @caughtexception;
v = new java.lang.RuntimeException;
specialinvoke v.<java.lang.RuntimeException: void <init>(java.lang.String,java.lang.Throwable)>("Could not parse Templeton job launch time", v);
throw v;
label:
v = v cmp 0L;
if v != 0 goto label;
v = new java.lang.RuntimeException;
v = newarray (java.lang.Object)[1];
v[0] = "templeton.job.launch.time";
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>("Launch time property \'%s\' not found", v);
specialinvoke v.<java.lang.RuntimeException: void <init>(java.lang.String)>(v);
throw v;
label:
return v;
catch java.lang.NumberFormatException from label to label with label;
}
private static void handleTokenFile(java.util.List, java.lang.String, java.lang.String) throws java.io.IOException
{
java.util.Iterator v;
java.util.List v;
int v, v;
java.lang.Object v, v;
java.lang.String v, v, v, v, v, v;
boolean v, v;
v := @parameter: java.util.List;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v = staticinvoke <java.lang.System: java.lang.String getenv(java.lang.String)>("HADOOP_TOKEN_FILE_LOCATION");
if v == null goto label;
v = virtualinvoke v.<java.lang.String: java.lang.String replaceAll(java.lang.String,java.lang.String)>("\"", "");
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.lang.String)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u0001=\u0001");
v = 0;
label:
v = interfaceinvoke v.<java.util.List: int size()>();
if v >= v goto label;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = virtualinvoke v.<java.lang.String: java.lang.String replace(java.lang.CharSequence,java.lang.CharSequence)>(v, v);
interfaceinvoke v.<java.util.List: java.lang.Object set(int,java.lang.Object)>(v, v);
v = v + 1;
goto label;
label:
v = interfaceinvoke v.<java.util.List: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = virtualinvoke v.<java.lang.String: boolean contains(java.lang.CharSequence)>(v);
if v == 0 goto label;
interfaceinvoke v.<java.util.Iterator: void remove()>();
goto label;
label:
return;
}
private static void replaceJobTag(java.util.List, java.lang.String, java.lang.String, java.lang.String) throws java.io.IOException
{
java.lang.Object[] v, v;
java.lang.RuntimeException v;
java.util.List v;
int v, v;
java.lang.Object v, v;
java.lang.String v, v, v, v, v, v;
boolean v;
v := @parameter: java.util.List;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v = newarray (java.lang.Object)[2];
v[0] = v;
v[1] = v;
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>("%s=%s", v);
v = 0;
label:
v = interfaceinvoke v.<java.util.List: int size()>();
if v >= v goto label;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = virtualinvoke v.<java.lang.String: boolean contains(java.lang.CharSequence)>(v);
if v == 0 goto label;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = virtualinvoke v.<java.lang.String: java.lang.String replace(java.lang.CharSequence,java.lang.CharSequence)>(v, v);
interfaceinvoke v.<java.util.List: java.lang.Object set(int,java.lang.Object)>(v, v);
return;
label:
v = v + 1;
goto label;
label:
v = new java.lang.RuntimeException;
v = newarray (java.lang.Object)[1];
v[0] = v;
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>("Unexpected Error: Tag \'%s\' not found in the list of launcher args", v);
specialinvoke v.<java.lang.RuntimeException: void <init>(java.lang.String)>(v);
throw v;
}
private void copyLocal(java.lang.String, org.apache.hadoop.conf.Configuration) throws java.io.IOException
{
org.slf4j.Logger v;
org.apache.hadoop.fs.FileSystem v;
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
java.lang.String[] v;
org.apache.hadoop.conf.Configuration v;
int v, v;
org.apache.hadoop.fs.Path v, v;
java.lang.String v, v, v, v, v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
v := @parameter: java.lang.String;
v := @parameter: org.apache.hadoop.conf.Configuration;
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>(v);
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: java.lang.String[] decodeArray(java.lang.String)>(v);
if v == null goto label;
v = lengthof v;
v = 0;
label:
if v >= v goto label;
v = v[v];
v = new org.apache.hadoop.fs.Path;
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String)>(v);
v = new org.apache.hadoop.fs.Path;
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.lang.String getName()>();
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.hadoop.fs.Path: org.apache.hadoop.fs.FileSystem getFileSystem(org.apache.hadoop.conf.Configuration)>(v);
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (org.apache.hadoop.fs.Path,org.apache.hadoop.fs.Path)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("templeton: copy \u => \u0001");
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>(v);
virtualinvoke v.<org.apache.hadoop.fs.FileSystem: void copyToLocalFile(org.apache.hadoop.fs.Path,org.apache.hadoop.fs.Path)>(v, v);
v = v + 1;
goto label;
label:
return;
}
private boolean reconnectToRunningJobEnabledAndSupported(org.apache.hadoop.conf.Configuration, org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType)
{
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType v, v, v;
org.apache.hadoop.conf.Configuration v;
java.lang.Boolean v;
java.lang.String v, v;
boolean v, v, v, v, v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType;
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.enablejobreconnect");
if v != null goto label;
return 0;
label:
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.enablejobreconnect");
v = staticinvoke <java.lang.Boolean: boolean parseBoolean(java.lang.String)>(v);
v = staticinvoke <java.lang.Boolean: java.lang.Boolean valueOf(boolean)>(v);
v = virtualinvoke v.<java.lang.Boolean: boolean booleanValue()>();
if v != 0 goto label;
return 0;
label:
v = <org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType: org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType JAR>;
v = virtualinvoke v.<org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType: boolean equals(java.lang.Object)>(v);
if v != 0 goto label;
v = <org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType: org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType STREAMING>;
v = virtualinvoke v.<org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType: boolean equals(java.lang.Object)>(v);
if v == 0 goto label;
label:
v = 1;
goto label;
label:
v = 0;
label:
return v;
}
private boolean tryReconnectToRunningJob(org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapreduce.Mapper$Context, org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType, java.lang.String) throws java.io.IOException, java.lang.InterruptedException
{
java.lang.Float v, v;
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
org.apache.hadoop.mapred.JobID v;
byte v;
org.apache.hadoop.mapreduce.Mapper$Context v;
org.apache.hadoop.conf.Configuration v;
boolean v, v;
org.apache.hadoop.hive.shims.HadoopShims v;
java.util.Set v;
org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType v;
float v, v, v, v;
java.lang.Throwable v;
org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim v;
java.lang.Object[] v, v, v, v, v;
long v;
org.apache.hadoop.mapred.JobStatus v;
org.apache.hadoop.security.UserGroupInformation v;
int v, v, v, v;
java.lang.String v, v, v, v, v, v, v, v, v, v;
org.apache.hadoop.mapreduce.JobID v, v, v, v;
org.slf4j.Logger v, v, v, v, v, v;
java.util.Iterator v;
java.io.IOException v;
java.lang.Object v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: org.apache.hadoop.mapreduce.Mapper$Context;
v := @parameter: org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType;
v := @parameter: java.lang.String;
v = specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: boolean reconnectToRunningJobEnabledAndSupported(org.apache.hadoop.conf.Configuration,org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType)>(v, v);
if v != 0 goto label;
return 0;
label:
v = specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: long getTempletonLaunchTime(org.apache.hadoop.conf.Configuration)>(v);
v = staticinvoke <org.apache.hadoop.security.UserGroupInformation: org.apache.hadoop.security.UserGroupInformation getCurrentUser()>();
v = staticinvoke <org.apache.hadoop.hive.shims.ShimLoader: org.apache.hadoop.hive.shims.HadoopShims getHadoopShims()>();
v = interfaceinvoke v.<org.apache.hadoop.hive.shims.HadoopShims: org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim getWebHCatShim(org.apache.hadoop.conf.Configuration,org.apache.hadoop.security.UserGroupInformation)>(v, v);
label:
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.mapreduce.JobID getJobID()>();
v = virtualinvoke v.<org.apache.hadoop.mapreduce.JobID: java.lang.String toString()>();
v = interfaceinvoke v.<org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim: java.util.Set getJobs(java.lang.String,long)>(v, v);
v = interfaceinvoke v.<java.util.Set: int size()>();
if v != 0 goto label;
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>("No child jobs found to reconnect with");
label:
interfaceinvoke v.<org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim: void close()>();
return 0;
label:
v = interfaceinvoke v.<java.util.Set: int size()>();
if v <= 1 goto label;
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
v = newarray (java.lang.Object)[1];
v = interfaceinvoke v.<java.util.Set: java.lang.Object[] toArray()>();
v = staticinvoke <java.util.Arrays: java.lang.String toString(java.lang.Object[])>(v);
v[0] = v;
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>("Found more than one child job to reconnect with: %s, skipping reconnect", v);
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String)>(v);
label:
interfaceinvoke v.<org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim: void close()>();
return 0;
label:
v = interfaceinvoke v.<java.util.Set: java.util.Iterator iterator()>();
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = staticinvoke <org.apache.hadoop.mapred.JobID: org.apache.hadoop.mapred.JobID forName(java.lang.String)>(v);
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
v = newarray (java.lang.Object)[1];
v[0] = v;
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>("Reconnecting to an existing job %s", v);
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.mapreduce.JobID getJobID()>();
v = virtualinvoke v.<org.apache.hadoop.mapreduce.JobID: java.lang.String toString()>();
staticinvoke <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void updateJobStatePercentAndChildId(org.apache.hadoop.conf.Configuration,java.lang.String,java.lang.String,java.lang.String)>(v, v, null, v);
label:
v = interfaceinvoke v.<org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim: org.apache.hadoop.mapred.JobStatus getJobStatus(org.apache.hadoop.mapred.JobID)>(v);
v = virtualinvoke v.<org.apache.hadoop.mapred.JobStatus: boolean isJobComplete()>();
if v == 0 goto label;
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
v = newarray (java.lang.Object)[1];
v[0] = v;
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>("Child job %s completed", v);
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>(v);
v = 0;
v = virtualinvoke v.<org.apache.hadoop.mapred.JobStatus: int getRunState()>();
v = <org.apache.hadoop.mapred.JobStatus: int SUCCEEDED>;
if v == v goto label;
v = 1;
label:
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.mapreduce.JobID getJobID()>();
v = virtualinvoke v.<org.apache.hadoop.mapreduce.JobID: java.lang.String toString()>();
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void updateJobStateToDoneAndWriteExitValue(org.apache.hadoop.conf.Configuration,java.lang.String,java.lang.String,int)>(v, v, v, v);
goto label;
label:
v = newarray (java.lang.Object)[2];
v = virtualinvoke v.<org.apache.hadoop.mapred.JobStatus: float mapProgress()>();
v = v * 100.0F;
v = staticinvoke <java.lang.Float: java.lang.Float valueOf(float)>(v);
v[0] = v;
v = virtualinvoke v.<org.apache.hadoop.mapred.JobStatus: float reduceProgress()>();
v = v * 100.0F;
v = staticinvoke <java.lang.Float: java.lang.Float valueOf(float)>(v);
v[1] = v;
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>("map %s%%, reduce %s%%", v);
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.mapreduce.JobID getJobID()>();
v = virtualinvoke v.<org.apache.hadoop.mapreduce.JobID: java.lang.String toString()>();
staticinvoke <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void updateJobStatePercentAndChildId(org.apache.hadoop.conf.Configuration,java.lang.String,java.lang.String,java.lang.String)>(v, v, v, null);
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>("KeepAlive Heart beat");
virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: void progress()>();
staticinvoke <java.lang.Thread: void sleep(long)>(30000L);
goto label;
label:
interfaceinvoke v.<org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim: void close()>();
return 1;
label:
v := @caughtexception;
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
interfaceinvoke v.<org.slf4j.Logger: void error(java.lang.String,java.lang.Throwable)>("Exception encountered in tryReconnectToRunningJob", v);
throw v;
label:
v := @caughtexception;
interfaceinvoke v.<org.apache.hadoop.hive.shims.HadoopShims$WebHCatJTShim: void close()>();
throw v;
catch java.io.IOException from label to label with label;
catch java.io.IOException from label to label with label;
catch java.io.IOException from label to label with label;
catch java.lang.Throwable from label to label with label;
catch java.lang.Throwable from label to label with label;
catch java.lang.Throwable from label to label with label;
}
public void run(org.apache.hadoop.mapreduce.Mapper$Context) throws java.io.IOException, java.lang.InterruptedException
{
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
java.net.URISyntaxException v;
org.apache.hadoop.mapreduce.Mapper$Context v;
org.apache.hadoop.conf.Configuration v;
boolean v, v, v, v, v;
java.lang.Process v;
org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType v;
org.apache.hive.hcatalog.templeton.tool.LaunchMapper$KeepAlive v;
int v;
java.lang.Boolean v;
java.lang.String v, v, v, v, v, v, v, v, v, v, v, v, v;
org.apache.hadoop.mapreduce.JobID v, v, v, v, v, v, v;
java.util.concurrent.ExecutorService v;
org.apache.hive.hcatalog.templeton.tool.LogRetriever v;
org.slf4j.Logger v, v;
java.io.IOException v;
org.apache.hive.hcatalog.templeton.tool.JobState v;
java.util.concurrent.TimeUnit v;
java.io.InputStream v, v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
v := @parameter: org.apache.hadoop.mapreduce.Mapper$Context;
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.conf.Configuration getConfiguration()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.jobtype");
v = staticinvoke <org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType: org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType valueOf(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.statusdir");
if v == null goto label;
label:
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("user.name");
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: java.lang.String addUserHomeDirectoryIfApplicable(java.lang.String,java.lang.String)>(v, v);
label:
goto label;
label:
v := @caughtexception;
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
interfaceinvoke v.<org.slf4j.Logger: void error(java.lang.String,java.lang.Throwable)>("Invalid status dir URI", v);
v = new java.io.IOException;
specialinvoke v.<java.io.IOException: void <init>(java.lang.String,java.lang.Throwable)>("Invalid status dir URI", v);
throw v;
label:
v = specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: boolean tryReconnectToRunningJob(org.apache.hadoop.conf.Configuration,org.apache.hadoop.mapreduce.Mapper$Context,org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType,java.lang.String)>(v, v, v, v);
if v == 0 goto label;
return;
label:
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.mapreduce.JobID getJobID()>();
v = virtualinvoke v.<org.apache.hadoop.mapreduce.JobID: java.lang.String toString()>();
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void killLauncherChildJobs(org.apache.hadoop.conf.Configuration,java.lang.String)>(v, v);
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.mapreduce.JobID getJobID()>();
v = virtualinvoke v.<org.apache.hadoop.mapreduce.JobID: java.lang.String toString()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("user.name");
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.override-classpath");
v = virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: java.lang.Process startJob(org.apache.hadoop.mapreduce.Mapper$Context,java.lang.String,java.lang.String,java.lang.String,org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType)>(v, v, v, v, v);
v = new org.apache.hive.hcatalog.templeton.tool.JobState;
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.mapreduce.JobID getJobID()>();
v = virtualinvoke v.<org.apache.hadoop.mapreduce.JobID: java.lang.String toString()>();
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void <init>(java.lang.String,org.apache.hadoop.conf.Configuration)>(v, v);
v = virtualinvoke v.<org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType: java.lang.String toString()>();
virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void setJobType(java.lang.String)>(v);
virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void close()>();
v = staticinvoke <java.util.concurrent.Executors: java.util.concurrent.ExecutorService newCachedThreadPool()>();
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.mapreduce.JobID getJobID()>();
v = virtualinvoke v.<java.lang.Process: java.io.InputStream getInputStream()>();
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void executeWatcher(java.util.concurrent.ExecutorService,org.apache.hadoop.conf.Configuration,org.apache.hadoop.mapreduce.JobID,java.io.InputStream,java.lang.String,java.lang.String)>(v, v, v, v, v, "stdout");
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.mapreduce.JobID getJobID()>();
v = virtualinvoke v.<java.lang.Process: java.io.InputStream getErrorStream()>();
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void executeWatcher(java.util.concurrent.ExecutorService,org.apache.hadoop.conf.Configuration,org.apache.hadoop.mapreduce.JobID,java.io.InputStream,java.lang.String,java.lang.String)>(v, v, v, v, v, "stderr");
v = specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.apache.hive.hcatalog.templeton.tool.LaunchMapper$KeepAlive startCounterKeepAlive(java.util.concurrent.ExecutorService,org.apache.hadoop.mapreduce.Mapper$Context)>(v, v);
virtualinvoke v.<java.lang.Process: int waitFor()>();
v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper$KeepAlive: boolean sendReport> = 0;
interfaceinvoke v.<java.util.concurrent.ExecutorService: void shutdown()>();
v = <java.util.concurrent.TimeUnit: java.util.concurrent.TimeUnit SECONDS>;
v = interfaceinvoke v.<java.util.concurrent.ExecutorService: boolean awaitTermination(long,java.util.concurrent.TimeUnit)>(10L, v);
if v != 0 goto label;
interfaceinvoke v.<java.util.concurrent.ExecutorService: java.util.List shutdownNow()>();
label:
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.mapreduce.JobID getJobID()>();
v = virtualinvoke v.<org.apache.hadoop.mapreduce.JobID: java.lang.String toString()>();
v = virtualinvoke v.<java.lang.Process: int exitValue()>();
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void updateJobStateToDoneAndWriteExitValue(org.apache.hadoop.conf.Configuration,java.lang.String,java.lang.String,int)>(v, v, v, v);
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("templeton.enablelog");
v = staticinvoke <java.lang.Boolean: boolean parseBoolean(java.lang.String)>(v);
v = staticinvoke <java.lang.Boolean: java.lang.Boolean valueOf(boolean)>(v);
v = virtualinvoke v.<java.lang.Boolean: boolean booleanValue()>();
if v == 0 goto label;
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: boolean isset(java.lang.String)>(v);
if v == 0 goto label;
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
v = virtualinvoke v.<org.apache.hadoop.mapreduce.Mapper$Context: org.apache.hadoop.mapreduce.JobID getJobID()>();
v = virtualinvoke v.<org.apache.hadoop.mapreduce.JobID: java.lang.String toString()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.lang.String)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("templeton: collecting logs for \u to \u0001/logs");
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>(v);
v = new org.apache.hive.hcatalog.templeton.tool.LogRetriever;
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LogRetriever: void <init>(java.lang.String,org.apache.hive.hcatalog.templeton.LauncherDelegator$JobType,org.apache.hadoop.conf.Configuration)>(v, v, v);
virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.LogRetriever: void run()>();
label:
return;
catch java.net.URISyntaxException from label to label with label;
}
private void updateJobStateToDoneAndWriteExitValue(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, int) throws java.io.IOException
{
org.slf4j.Logger v, v;
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
org.apache.hive.hcatalog.templeton.tool.JobState v;
org.apache.hadoop.conf.Configuration v;
int v;
java.lang.String v, v, v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: int;
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: void writeExitValue(org.apache.hadoop.conf.Configuration,int,java.lang.String)>(v, v, v);
v = new org.apache.hive.hcatalog.templeton.tool.JobState;
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void <init>(java.lang.String,org.apache.hadoop.conf.Configuration)>(v, v);
virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void setExitValue(long)>(v);
virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void setCompleteStatus(java.lang.String)>("done");
virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void close()>();
if v == 0 goto label;
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("templeton: job failed with exit code \u0001");
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>(v);
goto label;
label:
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>("templeton: job completed with exit code 0");
label:
return;
}
private static void updateJobStatePercentAndChildId(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, java.lang.String)
{
java.lang.Throwable v;
org.slf4j.Logger v;
java.io.IOException v;
org.apache.hive.hcatalog.templeton.tool.JobState v, v, v;
org.apache.hadoop.conf.Configuration v;
java.lang.String v, v, v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
label:
if v != null goto label;
if v == null goto label;
label:
v = new org.apache.hive.hcatalog.templeton.tool.JobState;
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void <init>(java.lang.String,org.apache.hadoop.conf.Configuration)>(v, v);
if v == null goto label;
virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void setPercentComplete(java.lang.String)>(v);
label:
if v == null goto label;
v = new org.apache.hive.hcatalog.templeton.tool.JobState;
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void <init>(java.lang.String,org.apache.hadoop.conf.Configuration)>(v, v);
virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void setParent(java.lang.String)>(v);
virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void addChild(java.lang.String)>(v);
virtualinvoke v.<org.apache.hive.hcatalog.templeton.tool.JobState: void close()>();
label:
goto label;
label:
v := @caughtexception;
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
interfaceinvoke v.<org.slf4j.Logger: void error(java.lang.String,java.lang.Throwable)>("templeton: state error: ", v);
label:
goto label;
label:
v := @caughtexception;
throw v;
label:
return;
catch java.io.IOException from label to label with label;
catch java.lang.Throwable from label to label with label;
catch java.lang.Throwable from label to label with label;
}
private void executeWatcher(java.util.concurrent.ExecutorService, org.apache.hadoop.conf.Configuration, org.apache.hadoop.mapreduce.JobID, java.io.InputStream, java.lang.String, java.lang.String) throws java.io.IOException
{
org.apache.hive.hcatalog.templeton.tool.LaunchMapper$Watcher v;
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
org.apache.hadoop.conf.Configuration v;
java.lang.String v, v;
org.apache.hadoop.mapreduce.JobID v;
java.util.concurrent.ExecutorService v;
java.io.InputStream v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
v := @parameter: java.util.concurrent.ExecutorService;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: org.apache.hadoop.mapreduce.JobID;
v := @parameter: java.io.InputStream;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v = new org.apache.hive.hcatalog.templeton.tool.LaunchMapper$Watcher;
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper$Watcher: void <init>(org.apache.hadoop.conf.Configuration,org.apache.hadoop.mapreduce.JobID,java.io.InputStream,java.lang.String,java.lang.String)>(v, v, v, v, v);
interfaceinvoke v.<java.util.concurrent.ExecutorService: void execute(java.lang.Runnable)>(v);
return;
}
private org.apache.hive.hcatalog.templeton.tool.LaunchMapper$KeepAlive startCounterKeepAlive(java.util.concurrent.ExecutorService, org.apache.hadoop.mapreduce.Mapper$Context) throws java.io.IOException
{
org.apache.hive.hcatalog.templeton.tool.LaunchMapper$KeepAlive v;
org.apache.hadoop.mapreduce.Mapper$Context v;
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
java.util.concurrent.ExecutorService v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
v := @parameter: java.util.concurrent.ExecutorService;
v := @parameter: org.apache.hadoop.mapreduce.Mapper$Context;
v = new org.apache.hive.hcatalog.templeton.tool.LaunchMapper$KeepAlive;
specialinvoke v.<org.apache.hive.hcatalog.templeton.tool.LaunchMapper$KeepAlive: void <init>(org.apache.hadoop.mapreduce.Mapper$Context)>(v);
interfaceinvoke v.<java.util.concurrent.ExecutorService: void execute(java.lang.Runnable)>(v);
return v;
}
private void writeExitValue(org.apache.hadoop.conf.Configuration, int, java.lang.String) throws java.io.IOException
{
java.io.PrintWriter v;
org.slf4j.Logger v, v;
org.apache.hadoop.fs.FileSystem v;
org.apache.hive.hcatalog.templeton.tool.LaunchMapper v;
org.apache.hadoop.fs.FSDataOutputStream v;
org.apache.hadoop.conf.Configuration v;
int v;
org.apache.hadoop.fs.Path v;
java.lang.String v, v;
boolean v;
v := @this: org.apache.hive.hcatalog.templeton.tool.LaunchMapper;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: int;
v := @parameter: java.lang.String;
v = staticinvoke <org.apache.hive.hcatalog.templeton.tool.TempletonUtils: boolean isset(java.lang.String)>(v);
if v == 0 goto label;
v = new org.apache.hadoop.fs.Path;
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String,java.lang.String)>(v, "exit");
v = virtualinvoke v.<org.apache.hadoop.fs.Path: org.apache.hadoop.fs.FileSystem getFileSystem(org.apache.hadoop.conf.Configuration)>(v);
v = virtualinvoke v.<org.apache.hadoop.fs.FileSystem: org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path)>(v);
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int,org.apache.hadoop.fs.Path)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("templeton: Writing exit value \u to \u0001");
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>(v);
v = new java.io.PrintWriter;
specialinvoke v.<java.io.PrintWriter: void <init>(java.io.OutputStream)>(v);
virtualinvoke v.<java.io.PrintWriter: void println(int)>(v);
virtualinvoke v.<java.io.PrintWriter: void close()>();
v = <org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG>;
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>("templeton: Exit value successfully written");
label:
return;
}
static void <clinit>()
{
org.slf4j.Logger v;
v = staticinvoke <org.slf4j.LoggerFactory: org.slf4j.Logger getLogger(java.lang.Class)>(class "Lorg/apache/hive/hcatalog/templeton/tool/LaunchMapper;");
<org.apache.hive.hcatalog.templeton.tool.LaunchMapper: org.slf4j.Logger LOG> = v;
return;
}
}