public class org.apache.hadoop.hive.metastore.utils.HdfsUtils extends java.lang.Object
{
private static final org.slf4j.Logger LOG;
private static final java.lang.String DISTCP_OPTIONS_PREFIX;
private static final java.lang.String HDFS_ID_PATH_PREFIX;
public void <init>()
{
org.apache.hadoop.hive.metastore.utils.HdfsUtils v;
v := @this: org.apache.hadoop.hive.metastore.utils.HdfsUtils;
specialinvoke v.<java.lang.Object: void <init>()>();
return;
}
public static void checkFileAccess(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.FileStatus, org.apache.hadoop.fs.permission.FsAction) throws java.io.IOException, javax.security.auth.login.LoginException
{
org.apache.hadoop.fs.FileSystem v;
org.apache.hadoop.security.UserGroupInformation v;
org.apache.hadoop.fs.FileStatus v;
org.apache.hadoop.fs.permission.FsAction v;
v := @parameter: org.apache.hadoop.fs.FileSystem;
v := @parameter: org.apache.hadoop.fs.FileStatus;
v := @parameter: org.apache.hadoop.fs.permission.FsAction;
v = staticinvoke <org.apache.hadoop.hive.metastore.utils.SecurityUtils: org.apache.hadoop.security.UserGroupInformation getUGI()>();
staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: void checkFileAccess(org.apache.hadoop.fs.FileSystem,org.apache.hadoop.fs.FileStatus,org.apache.hadoop.fs.permission.FsAction,org.apache.hadoop.security.UserGroupInformation)>(v, v, v, v);
return;
}
static void checkFileAccess(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.FileStatus, org.apache.hadoop.fs.permission.FsAction, org.apache.hadoop.security.UserGroupInformation) throws java.io.IOException
{
org.apache.hadoop.fs.FileSystem v;
java.lang.String[] v;
org.apache.hadoop.fs.FileStatus v;
org.apache.hadoop.fs.permission.FsAction v, v, v, v;
org.apache.hadoop.fs.permission.FsPermission v;
org.apache.hadoop.security.UserGroupInformation v;
org.apache.hadoop.conf.Configuration v;
org.apache.hadoop.fs.Path v;
java.lang.String v, v, v, v, v, v;
boolean v, v, v, v, v, v;
org.slf4j.Logger v;
org.apache.hadoop.security.AccessControlException v;
v := @parameter: org.apache.hadoop.fs.FileSystem;
v := @parameter: org.apache.hadoop.fs.FileStatus;
v := @parameter: org.apache.hadoop.fs.permission.FsAction;
v := @parameter: org.apache.hadoop.security.UserGroupInformation;
v = virtualinvoke v.<org.apache.hadoop.security.UserGroupInformation: java.lang.String getShortUserName()>();
v = virtualinvoke v.<org.apache.hadoop.security.UserGroupInformation: java.lang.String[] getGroupNames()>();
if v == null goto label;
v = virtualinvoke v.<org.apache.hadoop.fs.FileSystem: org.apache.hadoop.conf.Configuration getConf()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String,java.lang.String)>("dfs.permissions.supergroup", "");
v = staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: boolean arrayContains(java.lang.String[],java.lang.String)>(v, v);
if v == 0 goto label;
v = <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.slf4j.Logger LOG>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.lang.String,org.apache.hadoop.fs.permission.FsAction)>(v, v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("User \"\u0001\" belongs to super-group \"\u0001\". Permission granted for action: \u.");
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String)>(v);
return;
label:
v = virtualinvoke v.<org.apache.hadoop.fs.FileStatus: org.apache.hadoop.fs.permission.FsPermission getPermission()>();
v = virtualinvoke v.<org.apache.hadoop.fs.FileStatus: java.lang.String getOwner()>();
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>(v);
if v == 0 goto label;
v = virtualinvoke v.<org.apache.hadoop.fs.permission.FsPermission: org.apache.hadoop.fs.permission.FsAction getUserAction()>();
v = virtualinvoke v.<org.apache.hadoop.fs.permission.FsAction: boolean implies(org.apache.hadoop.fs.permission.FsAction)>(v);
if v == 0 goto label;
return;
label:
v = virtualinvoke v.<org.apache.hadoop.fs.FileStatus: java.lang.String getGroup()>();
v = staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: boolean arrayContains(java.lang.String[],java.lang.String)>(v, v);
if v == 0 goto label;
v = virtualinvoke v.<org.apache.hadoop.fs.permission.FsPermission: org.apache.hadoop.fs.permission.FsAction getGroupAction()>();
v = virtualinvoke v.<org.apache.hadoop.fs.permission.FsAction: boolean implies(org.apache.hadoop.fs.permission.FsAction)>(v);
if v == 0 goto label;
return;
label:
v = virtualinvoke v.<org.apache.hadoop.fs.permission.FsPermission: org.apache.hadoop.fs.permission.FsAction getOtherAction()>();
v = virtualinvoke v.<org.apache.hadoop.fs.permission.FsAction: boolean implies(org.apache.hadoop.fs.permission.FsAction)>(v);
if v == 0 goto label;
return;
label:
v = new org.apache.hadoop.security.AccessControlException;
v = virtualinvoke v.<org.apache.hadoop.fs.FileStatus: org.apache.hadoop.fs.Path getPath()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (org.apache.hadoop.fs.permission.FsAction,org.apache.hadoop.fs.Path,java.lang.String)>(v, v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("action \u not permitted on path \u for user \u0001");
specialinvoke v.<org.apache.hadoop.security.AccessControlException: void <init>(java.lang.String)>(v);
throw v;
}
public static boolean isPathEncrypted(org.apache.hadoop.conf.Configuration, java.net.URI, org.apache.hadoop.fs.Path) throws java.io.IOException
{
org.apache.hadoop.fs.FileSystem v;
org.apache.hadoop.conf.Configuration v;
org.apache.hadoop.fs.Path v, v;
java.lang.String v, v, v;
java.net.URI v, v;
boolean v, v, v;
org.slf4j.Logger v;
org.apache.hadoop.hdfs.client.HdfsAdmin v;
java.io.FileNotFoundException v;
org.apache.hadoop.hdfs.protocol.EncryptionZone v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.net.URI;
v := @parameter: org.apache.hadoop.fs.Path;
v = virtualinvoke v.<org.apache.hadoop.fs.Path: boolean isAbsolute()>();
if v == 0 goto label;
v = v;
goto label;
label:
v = virtualinvoke v.<org.apache.hadoop.fs.Path: org.apache.hadoop.fs.FileSystem getFileSystem(org.apache.hadoop.conf.Configuration)>(v);
v = virtualinvoke v.<org.apache.hadoop.fs.FileSystem: org.apache.hadoop.fs.Path makeQualified(org.apache.hadoop.fs.Path)>(v);
label:
v = "hdfs";
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.net.URI toUri()>();
v = virtualinvoke v.<java.net.URI: java.lang.String getScheme()>();
v = virtualinvoke v.<java.lang.String: boolean equalsIgnoreCase(java.lang.String)>(v);
if v != 0 goto label;
return 0;
label:
v = new org.apache.hadoop.hdfs.client.HdfsAdmin;
specialinvoke v.<org.apache.hadoop.hdfs.client.HdfsAdmin: void <init>(java.net.URI,org.apache.hadoop.conf.Configuration)>(v, v);
v = virtualinvoke v.<org.apache.hadoop.hdfs.client.HdfsAdmin: org.apache.hadoop.hdfs.protocol.EncryptionZone getEncryptionZoneForPath(org.apache.hadoop.fs.Path)>(v);
if v == null goto label;
v = 1;
goto label;
label:
v = 0;
label:
return v;
label:
v := @caughtexception;
v = <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.slf4j.Logger LOG>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (org.apache.hadoop.fs.Path)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Failed to get EZ for non-existent path: \u0001");
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String,java.lang.Throwable)>(v, v);
return 0;
catch java.io.FileNotFoundException from label to label with label;
}
private static boolean arrayContains(java.lang.String[], java.lang.String)
{
int v, v;
java.lang.String[] v;
java.lang.String v, v;
boolean v;
v := @parameter: java.lang.String[];
v := @parameter: java.lang.String;
if v != null goto label;
return 0;
label:
v = lengthof v;
v = 0;
label:
if v >= v goto label;
v = v[v];
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>(v);
if v == 0 goto label;
return 1;
label:
v = v + 1;
goto label;
label:
return 0;
}
public static boolean runDistCpAs(java.util.List, org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration, java.lang.String) throws java.io.IOException
{
org.apache.hadoop.hive.metastore.utils.HdfsUtils$1 v;
java.io.IOException v;
java.util.List v;
java.lang.InterruptedException v;
org.apache.hadoop.security.UserGroupInformation v, v;
org.apache.hadoop.conf.Configuration v;
org.apache.hadoop.fs.Path v;
java.lang.Object v;
java.lang.String v;
boolean v;
v := @parameter: java.util.List;
v := @parameter: org.apache.hadoop.fs.Path;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: java.lang.String;
v = staticinvoke <org.apache.hadoop.security.UserGroupInformation: org.apache.hadoop.security.UserGroupInformation getLoginUser()>();
v = staticinvoke <org.apache.hadoop.security.UserGroupInformation: org.apache.hadoop.security.UserGroupInformation createProxyUser(java.lang.String,org.apache.hadoop.security.UserGroupInformation)>(v, v);
label:
v = new org.apache.hadoop.hive.metastore.utils.HdfsUtils$1;
specialinvoke v.<org.apache.hadoop.hive.metastore.utils.HdfsUtils$1: void <init>(java.util.List,org.apache.hadoop.fs.Path,org.apache.hadoop.conf.Configuration)>(v, v, v);
v = virtualinvoke v.<org.apache.hadoop.security.UserGroupInformation: java.lang.Object doAs(java.security.PrivilegedExceptionAction)>(v);
v = virtualinvoke v.<java.lang.Boolean: boolean booleanValue()>();
label:
return v;
label:
v := @caughtexception;
v = new java.io.IOException;
specialinvoke v.<java.io.IOException: void <init>(java.lang.Throwable)>(v);
throw v;
catch java.lang.InterruptedException from label to label with label;
}
public static boolean runDistCp(java.util.List, org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration) throws java.io.IOException
{
java.lang.Throwable v;
org.apache.hadoop.tools.DistCp v;
org.apache.hadoop.tools.DistCpOptions$Builder v, v, v, v;
java.lang.Object[] v;
org.apache.hadoop.tools.DistCpOptions v;
java.lang.String[] v;
org.apache.hadoop.conf.Configuration v;
org.apache.hadoop.tools.DistCpOptions$FileAttribute v;
int v, v;
org.apache.hadoop.fs.Path v;
java.io.IOException v;
java.lang.Exception v;
java.util.List v, v;
v := @parameter: java.util.List;
v := @parameter: org.apache.hadoop.fs.Path;
v := @parameter: org.apache.hadoop.conf.Configuration;
v = new org.apache.hadoop.tools.DistCpOptions$Builder;
specialinvoke v.<org.apache.hadoop.tools.DistCpOptions$Builder: void <init>(java.util.List,org.apache.hadoop.fs.Path)>(v, v);
v = virtualinvoke v.<org.apache.hadoop.tools.DistCpOptions$Builder: org.apache.hadoop.tools.DistCpOptions$Builder withSyncFolder(boolean)>(1);
v = virtualinvoke v.<org.apache.hadoop.tools.DistCpOptions$Builder: org.apache.hadoop.tools.DistCpOptions$Builder withCRC(boolean)>(1);
v = <org.apache.hadoop.tools.DistCpOptions$FileAttribute: org.apache.hadoop.tools.DistCpOptions$FileAttribute BLOCKSIZE>;
v = virtualinvoke v.<org.apache.hadoop.tools.DistCpOptions$Builder: org.apache.hadoop.tools.DistCpOptions$Builder preserve(org.apache.hadoop.tools.DistCpOptions$FileAttribute)>(v);
v = virtualinvoke v.<org.apache.hadoop.tools.DistCpOptions$Builder: org.apache.hadoop.tools.DistCpOptions build()>();
v = staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: java.util.List constructDistCpParams(java.util.List,org.apache.hadoop.fs.Path,org.apache.hadoop.conf.Configuration)>(v, v, v);
label:
virtualinvoke v.<org.apache.hadoop.conf.Configuration: void setBoolean(java.lang.String,boolean)>("mapred.mapper.new-api", 1);
v = new org.apache.hadoop.tools.DistCp;
specialinvoke v.<org.apache.hadoop.tools.DistCp: void <init>(org.apache.hadoop.conf.Configuration,org.apache.hadoop.tools.DistCpOptions)>(v, v);
v = interfaceinvoke v.<java.util.List: int size()>();
v = newarray (java.lang.String)[v];
v = interfaceinvoke v.<java.util.List: java.lang.Object[] toArray(java.lang.Object[])>(v);
v = virtualinvoke v.<org.apache.hadoop.tools.DistCp: int run(java.lang.String[])>(v);
if v != 0 goto label;
label:
virtualinvoke v.<org.apache.hadoop.conf.Configuration: void setBoolean(java.lang.String,boolean)>("mapred.mapper.new-api", 0);
return 1;
label:
virtualinvoke v.<org.apache.hadoop.conf.Configuration: void setBoolean(java.lang.String,boolean)>("mapred.mapper.new-api", 0);
return 0;
label:
v := @caughtexception;
v = new java.io.IOException;
specialinvoke v.<java.io.IOException: void <init>(java.lang.String,java.lang.Throwable)>("Cannot execute DistCp process: ", v);
throw v;
label:
v := @caughtexception;
virtualinvoke v.<org.apache.hadoop.conf.Configuration: void setBoolean(java.lang.String,boolean)>("mapred.mapper.new-api", 0);
throw v;
catch java.lang.Exception from label to label with label;
catch java.lang.Throwable from label to label with label;
}
private static java.util.List constructDistCpParams(java.util.List, org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration)
{
java.util.Iterator v;
java.util.List v, v;
org.apache.hadoop.conf.Configuration v;
int v;
org.apache.hadoop.fs.Path v;
java.lang.Object v;
java.lang.String v, v;
boolean v;
v := @parameter: java.util.List;
v := @parameter: org.apache.hadoop.fs.Path;
v := @parameter: org.apache.hadoop.conf.Configuration;
v = staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: java.util.List constructDistCpOptions(org.apache.hadoop.conf.Configuration)>(v);
v = interfaceinvoke v.<java.util.List: int size()>();
if v != 0 goto label;
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>("-update");
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>("-pbx");
label:
v = interfaceinvoke v.<java.util.List: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.lang.String toString()>();
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
goto label;
label:
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.lang.String toString()>();
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
return v;
}
public static java.util.List constructDistCpOptions(org.apache.hadoop.conf.Configuration)
{
java.util.Iterator v;
java.util.Set v;
java.util.ArrayList v;
java.util.Map v;
org.apache.hadoop.conf.Configuration v;
java.lang.Object v, v, v;
java.lang.String v;
boolean v, v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v = new java.util.ArrayList;
specialinvoke v.<java.util.ArrayList: void <init>()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.util.Map getPropsWithPrefix(java.lang.String)>("distcp.options.");
v = interfaceinvoke v.<java.util.Map: java.util.Set entrySet()>();
v = interfaceinvoke v.<java.util.Set: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = interfaceinvoke v.<java.util.Map$Entry: java.lang.Object getKey()>();
v = interfaceinvoke v.<java.util.Map$Entry: java.lang.Object getValue()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("-\u0001");
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
if v == null goto label;
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v != 0 goto label;
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
goto label;
label:
return v;
}
public static org.apache.hadoop.fs.Path getFileIdPath(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, long)
{
org.apache.hadoop.fs.FileSystem v;
org.apache.hadoop.fs.Path v, v;
long v;
java.lang.String v;
boolean v;
v := @parameter: org.apache.hadoop.fs.FileSystem;
v := @parameter: org.apache.hadoop.fs.Path;
v := @parameter: long;
v = v instanceof org.apache.hadoop.hdfs.DistributedFileSystem;
if v == 0 goto label;
v = new org.apache.hadoop.fs.Path;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (long)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("/.reserved/.inodes/\u0001");
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String)>(v);
goto label;
label:
v = v;
label:
return v;
}
public static long getFileId(org.apache.hadoop.fs.FileSystem, java.lang.String) throws java.io.IOException
{
org.apache.hadoop.hdfs.DFSClient v;
org.apache.hadoop.hdfs.protocol.HdfsFileStatus v;
org.apache.hadoop.hdfs.DistributedFileSystem v;
org.apache.hadoop.fs.FileSystem v;
long v;
java.lang.String v;
v := @parameter: org.apache.hadoop.fs.FileSystem;
v := @parameter: java.lang.String;
v = staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.apache.hadoop.hdfs.DistributedFileSystem ensureDfs(org.apache.hadoop.fs.FileSystem)>(v);
v = virtualinvoke v.<org.apache.hadoop.hdfs.DistributedFileSystem: org.apache.hadoop.hdfs.DFSClient getClient()>();
v = virtualinvoke v.<org.apache.hadoop.hdfs.DFSClient: org.apache.hadoop.hdfs.protocol.HdfsFileStatus getFileInfo(java.lang.String)>(v);
v = interfaceinvoke v.<org.apache.hadoop.hdfs.protocol.HdfsFileStatus: long getFileId()>();
return v;
}
private static org.apache.hadoop.hdfs.DistributedFileSystem ensureDfs(org.apache.hadoop.fs.FileSystem)
{
java.lang.UnsupportedOperationException v;
org.apache.hadoop.fs.FileSystem v;
java.lang.Class v;
java.lang.String v;
boolean v;
v := @parameter: org.apache.hadoop.fs.FileSystem;
v = v instanceof org.apache.hadoop.hdfs.DistributedFileSystem;
if v != 0 goto label;
v = new java.lang.UnsupportedOperationException;
v = virtualinvoke v.<java.lang.Object: java.lang.Class getClass()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.Class)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Only supported for DFS; got \u0001");
specialinvoke v.<java.lang.UnsupportedOperationException: void <init>(java.lang.String)>(v);
throw v;
label:
return v;
}
public static void setFullFileStatus(org.apache.hadoop.conf.Configuration, org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus, java.lang.String, org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, boolean)
{
org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus v;
org.apache.hadoop.fs.FsShell v;
org.apache.hadoop.fs.FileSystem v;
org.apache.hadoop.conf.Configuration v;
org.apache.hadoop.fs.Path v;
java.lang.String v;
boolean v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus;
v := @parameter: java.lang.String;
v := @parameter: org.apache.hadoop.fs.FileSystem;
v := @parameter: org.apache.hadoop.fs.Path;
v := @parameter: boolean;
if v == 0 goto label;
v = new org.apache.hadoop.fs.FsShell;
specialinvoke v.<org.apache.hadoop.fs.FsShell: void <init>()>();
goto label;
label:
v = null;
label:
staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: void setFullFileStatus(org.apache.hadoop.conf.Configuration,org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus,java.lang.String,org.apache.hadoop.fs.FileSystem,org.apache.hadoop.fs.Path,boolean,org.apache.hadoop.fs.FsShell)>(v, v, v, v, v, v, v);
return;
}
static void setFullFileStatus(org.apache.hadoop.conf.Configuration, org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus, java.lang.String, org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, boolean, org.apache.hadoop.fs.FsShell)
{
org.apache.hadoop.fs.FileSystem v;
org.apache.hadoop.fs.permission.AclEntryScope v, v, v;
org.apache.hadoop.fs.FileStatus v, v;
org.apache.hadoop.fs.permission.FsPermission v;
org.apache.hadoop.conf.Configuration v;
org.apache.hadoop.fs.Path v, v;
boolean v, v, v, v, v;
java.lang.Exception v, v;
java.util.List v, v;
org.apache.hadoop.fs.permission.AclEntry v, v, v;
com.google.common.base.Joiner v;
java.lang.String[] v, v, v;
org.apache.hadoop.fs.permission.FsAction v, v, v;
java.util.ArrayList v, v;
org.apache.hadoop.fs.permission.AclEntryType v, v, v;
short v;
java.lang.String v, v, v, v, v, v, v, v, v, v, v, v, v;
org.apache.hadoop.fs.permission.AclStatus v;
org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus v;
org.apache.hadoop.fs.FsShell v;
org.slf4j.Logger v, v, v, v, v;
v := @parameter: org.apache.hadoop.conf.Configuration;
v := @parameter: org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus;
v := @parameter: java.lang.String;
v := @parameter: org.apache.hadoop.fs.FileSystem;
v := @parameter: org.apache.hadoop.fs.Path;
v := @parameter: boolean;
v := @parameter: org.apache.hadoop.fs.FsShell;
label:
v = virtualinvoke v.<org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus: org.apache.hadoop.fs.FileStatus getFileStatus()>();
v = virtualinvoke v.<org.apache.hadoop.fs.FileStatus: java.lang.String getGroup()>();
v = virtualinvoke v.<org.apache.hadoop.conf.Configuration: java.lang.String get(java.lang.String)>("dfs.namenode.acls.enabled");
v = staticinvoke <com.google.common.base.Objects: boolean equal(java.lang.Object,java.lang.Object)>(v, "true");
v = virtualinvoke v.<org.apache.hadoop.fs.FileStatus: org.apache.hadoop.fs.permission.FsPermission getPermission()>();
v = null;
if v == 0 goto label;
v = virtualinvoke v.<org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus: java.util.List getAclEntries()>();
if v == null goto label;
v = <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.slf4j.Logger LOG>;
v = virtualinvoke v.<org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus: org.apache.hadoop.fs.permission.AclStatus getAclStatus()>();
v = virtualinvoke v.<org.apache.hadoop.fs.permission.AclStatus: java.lang.String toString()>();
interfaceinvoke v.<org.slf4j.Logger: void trace(java.lang.String)>(v);
v = new java.util.ArrayList;
v = virtualinvoke v.<org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus: java.util.List getAclEntries()>();
specialinvoke v.<java.util.ArrayList: void <init>(java.util.Collection)>(v);
v = v;
staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: void removeBaseAclEntries(java.util.List)>(v);
v = <org.apache.hadoop.fs.permission.AclEntryScope: org.apache.hadoop.fs.permission.AclEntryScope ACCESS>;
v = <org.apache.hadoop.fs.permission.AclEntryType: org.apache.hadoop.fs.permission.AclEntryType USER>;
v = virtualinvoke v.<org.apache.hadoop.fs.permission.FsPermission: org.apache.hadoop.fs.permission.FsAction getUserAction()>();
v = staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.apache.hadoop.fs.permission.AclEntry newAclEntry(org.apache.hadoop.fs.permission.AclEntryScope,org.apache.hadoop.fs.permission.AclEntryType,org.apache.hadoop.fs.permission.FsAction)>(v, v, v);
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
v = <org.apache.hadoop.fs.permission.AclEntryScope: org.apache.hadoop.fs.permission.AclEntryScope ACCESS>;
v = <org.apache.hadoop.fs.permission.AclEntryType: org.apache.hadoop.fs.permission.AclEntryType GROUP>;
v = virtualinvoke v.<org.apache.hadoop.fs.permission.FsPermission: org.apache.hadoop.fs.permission.FsAction getGroupAction()>();
v = staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.apache.hadoop.fs.permission.AclEntry newAclEntry(org.apache.hadoop.fs.permission.AclEntryScope,org.apache.hadoop.fs.permission.AclEntryType,org.apache.hadoop.fs.permission.FsAction)>(v, v, v);
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
v = <org.apache.hadoop.fs.permission.AclEntryScope: org.apache.hadoop.fs.permission.AclEntryScope ACCESS>;
v = <org.apache.hadoop.fs.permission.AclEntryType: org.apache.hadoop.fs.permission.AclEntryType OTHER>;
v = virtualinvoke v.<org.apache.hadoop.fs.permission.FsPermission: org.apache.hadoop.fs.permission.FsAction getOtherAction()>();
v = staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.apache.hadoop.fs.permission.AclEntry newAclEntry(org.apache.hadoop.fs.permission.AclEntryScope,org.apache.hadoop.fs.permission.AclEntryType,org.apache.hadoop.fs.permission.FsAction)>(v, v, v);
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
label:
if v == 0 goto label;
virtualinvoke v.<org.apache.hadoop.fs.FsShell: void setConf(org.apache.hadoop.conf.Configuration)>(v);
if v == null goto label;
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v != 0 goto label;
v = newarray (java.lang.String)[4];
v[0] = "-chgrp";
v[1] = "-R";
v[2] = v;
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.lang.String toString()>();
v[3] = v;
staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: void run(org.apache.hadoop.fs.FsShell,java.lang.String[])>(v, v);
label:
if v == 0 goto label;
if null == v goto label;
label:
v = staticinvoke <com.google.common.base.Joiner: com.google.common.base.Joiner on(java.lang.String)>(",");
v = virtualinvoke v.<com.google.common.base.Joiner: java.lang.String join(java.lang.Iterable)>(v);
v = newarray (java.lang.String)[5];
v[0] = "-setfacl";
v[1] = "-R";
v[2] = "--set";
v[3] = v;
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.lang.String toString()>();
v[4] = v;
staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: void run(org.apache.hadoop.fs.FsShell,java.lang.String[])>(v, v);
label:
goto label;
label:
v := @caughtexception;
v = <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.slf4j.Logger LOG>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (org.apache.hadoop.fs.Path)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Skipping ACL inheritance: File system for path \u does not support ACLs but dfs.namenode.acls.enabled is set to true. ");
interfaceinvoke v.<org.slf4j.Logger: void info(java.lang.String)>(v);
v = <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.slf4j.Logger LOG>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.Exception)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("The details are: \u0001");
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String,java.lang.Throwable)>(v, v);
goto label;
label:
v = virtualinvoke v.<org.apache.hadoop.fs.permission.FsPermission: short toShort()>();
v = staticinvoke <java.lang.Integer: java.lang.String toString(int,int)>(v, 8);
v = newarray (java.lang.String)[4];
v[0] = "-chmod";
v[1] = "-R";
v[2] = v;
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.lang.String toString()>();
v[3] = v;
staticinvoke <org.apache.hadoop.hive.metastore.utils.HdfsUtils: void run(org.apache.hadoop.fs.FsShell,java.lang.String[])>(v, v);
goto label;
label:
if v == null goto label;
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v != 0 goto label;
if v == null goto label;
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>(v);
if v != 0 goto label;
label:
virtualinvoke v.<org.apache.hadoop.fs.FileSystem: void setOwner(org.apache.hadoop.fs.Path,java.lang.String,java.lang.String)>(v, null, v);
label:
if v == 0 goto label;
if null == v goto label;
virtualinvoke v.<org.apache.hadoop.fs.FileSystem: void setAcl(org.apache.hadoop.fs.Path,java.util.List)>(v, v);
goto label;
label:
virtualinvoke v.<org.apache.hadoop.fs.FileSystem: void setPermission(org.apache.hadoop.fs.Path,org.apache.hadoop.fs.permission.FsPermission)>(v, v);
label:
goto label;
label:
v := @caughtexception;
v = <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.slf4j.Logger LOG>;
v = virtualinvoke v.<org.apache.hadoop.hive.metastore.utils.HdfsUtils$HadoopFileStatus: org.apache.hadoop.fs.FileStatus getFileStatus()>();
v = virtualinvoke v.<org.apache.hadoop.fs.FileStatus: org.apache.hadoop.fs.Path getPath()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (org.apache.hadoop.fs.Path,org.apache.hadoop.fs.Path)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Unable to inherit permissions for file \u from file \u0001");
v = virtualinvoke v.<java.lang.Exception: java.lang.String getMessage()>();
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>(v, v);
v = <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.slf4j.Logger LOG>;
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String,java.lang.Throwable)>("Exception while inheriting permissions", v);
label:
return;
catch java.lang.Exception from label to label with label;
catch java.lang.Exception from label to label with label;
}
private static void removeBaseAclEntries(java.util.List)
{
java.util.List v;
org.apache.hadoop.hive.metastore.utils.HdfsUtils$2 v;
v := @parameter: java.util.List;
v = new org.apache.hadoop.hive.metastore.utils.HdfsUtils$2;
specialinvoke v.<org.apache.hadoop.hive.metastore.utils.HdfsUtils$2: void <init>()>();
staticinvoke <com.google.common.collect.Iterables: boolean removeIf(java.lang.Iterable,com.google.common.base.Predicate)>(v, v);
return;
}
private static org.apache.hadoop.fs.permission.AclEntry newAclEntry(org.apache.hadoop.fs.permission.AclEntryScope, org.apache.hadoop.fs.permission.AclEntryType, org.apache.hadoop.fs.permission.FsAction)
{
org.apache.hadoop.fs.permission.AclEntry$Builder v, v, v, v;
org.apache.hadoop.fs.permission.AclEntryScope v;
org.apache.hadoop.fs.permission.FsAction v;
org.apache.hadoop.fs.permission.AclEntryType v;
org.apache.hadoop.fs.permission.AclEntry v;
v := @parameter: org.apache.hadoop.fs.permission.AclEntryScope;
v := @parameter: org.apache.hadoop.fs.permission.AclEntryType;
v := @parameter: org.apache.hadoop.fs.permission.FsAction;
v = new org.apache.hadoop.fs.permission.AclEntry$Builder;
specialinvoke v.<org.apache.hadoop.fs.permission.AclEntry$Builder: void <init>()>();
v = virtualinvoke v.<org.apache.hadoop.fs.permission.AclEntry$Builder: org.apache.hadoop.fs.permission.AclEntry$Builder setScope(org.apache.hadoop.fs.permission.AclEntryScope)>(v);
v = virtualinvoke v.<org.apache.hadoop.fs.permission.AclEntry$Builder: org.apache.hadoop.fs.permission.AclEntry$Builder setType(org.apache.hadoop.fs.permission.AclEntryType)>(v);
v = virtualinvoke v.<org.apache.hadoop.fs.permission.AclEntry$Builder: org.apache.hadoop.fs.permission.AclEntry$Builder setPermission(org.apache.hadoop.fs.permission.FsAction)>(v);
v = virtualinvoke v.<org.apache.hadoop.fs.permission.AclEntry$Builder: org.apache.hadoop.fs.permission.AclEntry build()>();
return v;
}
private static void run(org.apache.hadoop.fs.FsShell, java.lang.String[]) throws java.lang.Exception
{
org.apache.hadoop.fs.FsShell v;
org.slf4j.Logger v, v;
int v;
java.lang.String[] v;
java.lang.String v, v;
v := @parameter: org.apache.hadoop.fs.FsShell;
v := @parameter: java.lang.String[];
v = <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.slf4j.Logger LOG>;
v = staticinvoke <org.apache.commons.lang.ArrayUtils: java.lang.String toString(java.lang.Object)>(v);
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.hadoop.fs.FsShell: int run(java.lang.String[])>(v);
v = <org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.slf4j.Logger LOG>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Return value is :\u0001");
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String)>(v);
return;
}
static void <clinit>()
{
org.slf4j.Logger v;
v = staticinvoke <org.slf4j.LoggerFactory: org.slf4j.Logger getLogger(java.lang.Class)>(class "Lorg/apache/hadoop/hive/metastore/utils/HdfsUtils;");
<org.apache.hadoop.hive.metastore.utils.HdfsUtils: org.slf4j.Logger LOG> = v;
return;
}
}