public static void checkFileAccessWithImpersonation(final FileSystem fs, final FileStatus stat, final FsAction action, final String user) throws IOException, AccessControlException, InterruptedException, Exception { checkFileAccessWithImpersonation(fs, stat, action, user, null); }
checkFileAccessWithImpersonation(fs, fileStatus, action, userName, subDirsToCheck); } catch (AccessControlException err) {
/** * Checks the permissions for the given path and current user on Hadoop FS. If the given path * does not exists, it returns. */ @SuppressWarnings("deprecation") protected static void checkPermissions(final FileSystem fs, final FileStatus stat, final EnumSet<FsAction> actions, String user) throws IOException, AccessControlException, HiveException { if (stat == null) { // File named by path doesn't exist; nothing to validate. return; } FsAction checkActions = FsAction.NONE; for (FsAction action : actions) { checkActions = checkActions.or(action); } try { FileUtils.checkFileAccessWithImpersonation(fs, stat, checkActions, user); } catch (Exception err) { // fs.permission.AccessControlException removed by HADOOP-11356, but Hive users on older // Hadoop versions may still see this exception .. have to reference by name. if (err.getClass().getName().equals("org.apache.hadoop.fs.permission.AccessControlException")) { throw accessControlException(err); } throw new HiveException(err); } }
/** * Checks the permissions for the given path and current user on Hadoop FS. If the given path * does not exists, it returns. */ @SuppressWarnings("deprecation") protected static void checkPermissions(final FileSystem fs, final FileStatus stat, final EnumSet<FsAction> actions, String user) throws IOException, AccessControlException, HiveException { if (stat == null) { // File named by path doesn't exist; nothing to validate. return; } FsAction checkActions = FsAction.NONE; for (FsAction action : actions) { checkActions = checkActions.or(action); } try { FileUtils.checkFileAccessWithImpersonation(fs, stat, checkActions, user); } catch (Exception err) { // fs.permission.AccessControlException removed by HADOOP-11356, but Hive users on older // Hadoop versions may still see this exception .. have to reference by name. if (err.getClass().getName().equals("org.apache.hadoop.fs.permission.AccessControlException")) { throw accessControlException(err); } throw new HiveException(err); } }
/** * Checks if a given path has read-only access permissions. * * @param path The path to check for read-only permissions. * @return True if the path is read-only; False otherwise. * @throws HiveException If an error occurs while checking file permissions. */ private boolean isPathReadOnly(Path path) throws HiveException { HiveConf conf = SessionState.get().getConf(); try { FileSystem fs = path.getFileSystem(conf); UserGroupInformation ugi = Utils.getUGI(); FileStatus status = fs.getFileStatus(path); // We just check for writing permissions. If it fails with AccessControException, then it // means the location may be read-only. FileUtils.checkFileAccessWithImpersonation(fs, status, FsAction.WRITE, ugi.getUserName()); // Path has writing permissions return false; } catch (AccessControlException e) { // An AccessControlException may be caused for other different errors, // but we take it as if our path is read-only return true; } catch (Exception e) { throw new HiveException("Unable to determine if " + path + " is read only: " + e, e); } }
/** * Checks if a given path has read-only access permissions. * * @param path The path to check for read-only permissions. * @return True if the path is read-only; False otherwise. * @throws HiveException If an error occurs while checking file permissions. */ private boolean isPathReadOnly(Path path) throws HiveException { HiveConf conf = SessionState.get().getConf(); try { FileSystem fs = path.getFileSystem(conf); UserGroupInformation ugi = Utils.getUGI(); FileStatus status = fs.getFileStatus(path); // We just check for writing permissions. If it fails with AccessControException, then it // means the location may be read-only. FileUtils.checkFileAccessWithImpersonation(fs, status, FsAction.WRITE, ugi.getUserName()); // Path has writing permissions return false; } catch (AccessControlException e) { // An AccessControlException may be caused for other different errors, // but we take it as if our path is read-only return true; } catch (Exception e) { throw new HiveException("Unable to determine if " + path + " is read only: " + e, e); } }
FileUtils.checkFileAccessWithImpersonation(fs, stat, FsAction.WRITE, user);
public static void checkFileAccessWithImpersonation(final FileSystem fs, final FileStatus stat, final FsAction action, final String user) throws IOException, AccessControlException, InterruptedException, Exception { checkFileAccessWithImpersonation(fs, stat, action, user, null); }
/** * Checks the permissions for the given path and current user on Hadoop FS. If the given path * does not exists, it returns. */ @SuppressWarnings("deprecation") protected static void checkPermissions(final FileSystem fs, final FileStatus stat, final EnumSet<FsAction> actions, String user) throws IOException, AccessControlException, HiveException { if (stat == null) { // File named by path doesn't exist; nothing to validate. return; } FsAction checkActions = FsAction.NONE; for (FsAction action : actions) { checkActions = checkActions.or(action); } try { FileUtils.checkFileAccessWithImpersonation(fs, stat, checkActions, user); } catch (Exception err) { // fs.permission.AccessControlException removed by HADOOP-11356, but Hive users on older // Hadoop versions may still see this exception .. have to reference by name. if (err.getClass().getName().equals("org.apache.hadoop.fs.permission.AccessControlException")) { throw accessControlException(err); } throw new HiveException(err); } }
checkFileAccessWithImpersonation(fs, fileStatus, action, userName, subDirsToCheck); } catch (AccessControlException err) {
/** * Checks if a given path has read-only access permissions. * * @param path The path to check for read-only permissions. * @return True if the path is read-only; False otherwise. * @throws HiveException If an error occurs while checking file permissions. */ private boolean isPathReadOnly(Path path) throws HiveException { HiveConf conf = SessionState.get().getConf(); try { FileSystem fs = path.getFileSystem(conf); UserGroupInformation ugi = Utils.getUGI(); FileStatus status = fs.getFileStatus(path); // We just check for writing permissions. If it fails with AccessControException, then it // means the location may be read-only. FileUtils.checkFileAccessWithImpersonation(fs, status, FsAction.WRITE, ugi.getUserName()); // Path has writing permissions return false; } catch (AccessControlException e) { // An AccessControlException may be caused for other different errors, // but we take it as if our path is read-only return true; } catch (Exception e) { throw new HiveException("Unable to determine if " + path + " is read only: " + e, e); } }
checkFileAccessWithImpersonation(fs, fileStatus, action, userName); } catch (AccessControlException err) {
FileUtils.checkFileAccessWithImpersonation(fs, stat, FsAction.WRITE, user);
FileUtils.checkFileAccessWithImpersonation(fs, stat, FsAction.WRITE, user);