Refine search
ugi = UserGroupInformation.getCurrentUser(); if (serverId.isEmpty()) { throw new AccessControlException( "Kerberos principal name does NOT have the expected " + "hostname part: " + ugi.getUserName()); throw new AccessControlException( "Server does not support SASL " + authMethod); saslServer = ugi.doAs( new PrivilegedExceptionAction<SaslServer>() { @Override throw new AccessControlException( "Unable to find SASL server implementation for " + mechanism);
Path file1 = new Path("/simpleAppend.dat"); FSDataOutputStream stm = AppendTestUtil.createFile(fs, file1, 1); System.out.println("Created file simpleAppend.dat"); out = fs.append(new Path("/non-existing.dat")); fail("Expected to have FileNotFoundException"); Path root = new Path("/"); fs.setPermission(root, new FsPermission((short)0777)); fs.close(); UserGroupInformation.getCurrentUser(); String username = "testappenduser"; String group = "testappendgroup"; assertFalse(superuser.getShortUserName().equals(username)); assertFalse(Arrays.asList(superuser.getGroupNames()).contains(group)); UserGroupInformation appenduser = UserGroupInformation.createUserForTesting(username, new String[]{group}); ace.printStackTrace(System.out);
try { HadoopAccessorService has = Services.get().get(HadoopAccessorService.class); URI uri = new Path(appPath).toUri(); Configuration fsConf = has.createConfiguration(uri.getAuthority()); FileSystem fs = has.createFileSystem(user, uri, fsConf); Path path = new Path(appPath); try { if (!fs.exists(path)) { throw new AuthorizationException(ErrorCode.E0507, appPath, ex.getMessage(), ex);
@Test public void testAccessOthers() throws IOException, InterruptedException { FileSystem rootFs = FileSystem.get(conf); Path p3 = new Path("/p3"); rootFs.mkdirs(p3); rootFs.setPermission(p3, new FsPermission((short) 0774)); fs = USER1.doAs(new PrivilegedExceptionAction<FileSystem>() { @Override public FileSystem run() throws Exception { return FileSystem.get(conf); } }); fs.access(p3, FsAction.READ); try { fs.access(p3, FsAction.READ_WRITE); fail("The access call should have failed."); } catch (AccessControlException e) { assertTrue("Permission denied messages must carry the username", e.getMessage().contains(USER1_NAME)); assertTrue("Permission denied messages must carry the path parent", e.getMessage().contains( p3.getParent().toUri().getPath())); } }
private void preCheckPermission() throws IOException, AccessControlException { if (shouldIgnorePreCheckPermission()) { return; } Path hbaseDir = new Path(getConf().get(HConstants.HBASE_DIR)); FileSystem fs = hbaseDir.getFileSystem(getConf()); User user = User.getCurrent(); FileStatus[] files = fs.listStatus(hbaseDir); for (FileStatus file : files) { try { FSUtils.checkAccess(user, file, FsAction.WRITE); } catch (AccessControlException ace) { LOG.warn("Got AccessControlException when preCheckPermission ", ace); errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + user.getShortName() + " does not have write perms to " + file.getPath() + ". Please rerun hbck as hdfs user " + file.getOwner()); throw new AccessControlException(ace); } } }
long fileSize = dfso.getFileStatus(new org.apache.hadoop.fs.Path( path)).getLen(); if (fileSize > Settings.FILE_PREVIEW_TXT_SIZE_BYTES) { filePreviewDTO = new FilePreviewDTO("text", "md", new String(headContent)); } catch (AccessControlException ex) { throw new AccessControlException( "Permission denied: You can not view the file."); } finally {
@Test public void testAccessGroupMember() throws IOException, InterruptedException { FileSystem rootFs = FileSystem.get(conf); Path p2 = new Path("/p2"); rootFs.mkdirs(p2); rootFs.setOwner(p2, UserGroupInformation.getCurrentUser().getShortUserName(), GROUP1_NAME); rootFs.setPermission(p2, new FsPermission((short) 0740)); fs = USER1.doAs(new PrivilegedExceptionAction<FileSystem>() { @Override public FileSystem run() throws Exception { return FileSystem.get(conf); } }); fs.access(p2, FsAction.READ); try { fs.access(p2, FsAction.EXECUTE); fail("The access call should have failed."); } catch (AccessControlException e) { assertTrue("Permission denied messages must carry the username", e.getMessage().contains(USER1_NAME)); assertTrue("Permission denied messages must carry the path parent", e.getMessage().contains( p2.getParent().toUri().getPath())); } }
private Set<String> getViews() { Set<String> viewSet = Sets.newHashSet(); // Look for files with ".view.drill" extension. List<DotDrillFile> files; try { files = DotDrillUtil.getDotDrills(getFS(), new Path(config.getLocation()), DotDrillType.VIEW); for (DotDrillFile f : files) { viewSet.add(f.getBaseName()); } } catch (UnsupportedOperationException e) { logger.debug("The filesystem for this workspace does not support this operation.", e); } catch (AccessControlException e) { if (!schemaConfig.getIgnoreAuthErrors()) { logger.debug(e.getMessage()); throw UserException .permissionError(e) .message("Not authorized to list view tables in schema [%s]", getFullSchemaName()) .build(logger); } } catch (Exception e) { logger.warn("Failure while trying to list .view.drill files in workspace [{}]", getFullSchemaName(), e); } return viewSet; }
createSourceData(); UserGroupInformation tmpUser = UserGroupInformation.createRemoteUser("guest"); cluster.getFileSystem().setPermission(new Path(SOURCE_PATH + "/src/file"), new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ)); cluster.getFileSystem().setPermission(new Path(TARGET_PATH), } catch (IOException e) { LOG.error("Exception encountered ", e); Assert.fail("Test failed: " + e.getMessage()); throw new RuntimeException("Test ought to fail here");
private void assertPermissionDenied(UserGroupInformation user, String path, FsAction access) throws IOException { try { INodesInPath iip = dir.getINodesInPath(path, true); dir.getPermissionChecker(SUPERUSER, SUPERGROUP, user).checkPermission(iip, false, null, null, access, null, false); fail("expected AccessControlException for user + " + user + ", path = " + path + ", access = " + access); } catch (AccessControlException e) { assertTrue("Permission denied messages must carry the username", e.getMessage().contains(user.getUserName().toString())); assertTrue("Permission denied messages must carry the path parent", e.getMessage().contains( new Path(path).getParent().toUri().getPath())); } }
final Path fpath = new Path("/p4/file"); DataOutputStream out = rootFs.create(fpath); out.writeBytes("dhruba: " + fpath); assertTrue(rootFs.exists(fpath)); fs = USER1.doAs(new PrivilegedExceptionAction<FileSystem>() { @Override public FileSystem run() throws Exception { final Path nfpath = new Path("/p4/file/nonexisting"); assertFalse(rootFs.exists(nfpath)); } catch (AccessControlException e) { assertTrue("Permission denied messages must carry file path", e.getMessage().contains(fpath.getName())); assertTrue("Permission denied messages must specify existing_file is not " + "a directory, when checked on /existing_file/non_existing_name", e.getMessage().contains("is not a directory")); assertFalse("Permission denied messages must not carry full file path," + "since the user does not have permission on /p4: " + e.getMessage(), e.getMessage().contains(fpath.getName())); assertFalse("Permission denied messages must not specify /p4" + " is not a directory: " + e.getMessage(), e.getMessage().contains("is not a directory"));
Path rootDir = new Path("/BSS"); Path user1Dir = new Path("/BSS/user1"); Path user1File = new Path("/BSS/user1/test"); fs.mkdirs(user1Dir); fs.setPermission(user1Dir, new FsPermission((short) 0755)); fs.setOwner(user1Dir, USER1.getShortUserName(), GROUP2_NAME); fs.setOwner(user1File, USER1.getShortUserName(), GROUP1_NAME); fs.setPermission(user1File, new FsPermission((short) 0600)); fail("User2 should not be allowed to delete user1's dir."); } catch (AccessControlException e) { e.printStackTrace(); assertTrue("Permission denied messages must carry the username", e.getMessage().contains(USER2_NAME));
private void checkBlockLocalPathAccess() throws IOException { checkKerberosAuthMethod("getBlockLocalPathInfo()"); String currentUser = UserGroupInformation.getCurrentUser().getShortUserName(); if (!usersWithLocalPathAccess.contains(currentUser)) { throw new AccessControlException( "Can't continue with getBlockLocalPathInfo() " + "authorization. The user " + currentUser + " is not configured in " + DFSConfigKeys.DFS_BLOCK_LOCAL_PATH_ACCESS_USER_KEY); } }
private void verifyFilesUnreadablebyHDFS(MiniDFSCluster cluster, Path root) throws Exception{ DistributedFileSystem fs = cluster.getFileSystem(); Queue<Path> paths = new LinkedList<>(); paths.add(root); while (!paths.isEmpty()) { Path p = paths.poll(); FileStatus stat = fs.getFileStatus(p); if (!stat.isDirectory()) { try { LOG.warn("\n\n ##Testing path [" + p + "]\n\n"); fs.open(p); Assert.fail("Super user should not be able to read ["+ UserGroupInformation.getCurrentUser() + "] [" + p.getName() + "]"); } catch (AccessControlException e) { Assert.assertTrue(e.getMessage().contains("superuser is not allowed to perform this operation")); } catch (Exception e) { Assert.fail("Should get an AccessControlException here"); } } if (stat.isDirectory()) { FileStatus[] ls = fs.listStatus(p); for (FileStatus f : ls) { paths.add(f.getPath()); } } } }
@Override protected void checkRpcAdminAccess() throws IOException, AccessControlException { UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); UserGroupInformation zkfcUgi = UserGroupInformation.getLoginUser(); if (adminAcl.isUserAllowed(ugi) || ugi.getShortUserName().equals(zkfcUgi.getShortUserName())) { LOG.info("Allowed RPC access from " + ugi + " at " + Server.getRemoteAddress()); return; } String msg = "Disallowed RPC access from " + ugi + " at " + Server.getRemoteAddress() + ". Not listed in " + DFSConfigKeys.DFS_ADMIN; LOG.warn(msg); throw new AccessControlException(msg); }
/** Ensure the authentication method is kerberos */ private void checkKerberosAuthMethod(String msg) throws IOException { // User invoking the call must be same as the datanode user if (!UserGroupInformation.isSecurityEnabled()) { return; } if (UserGroupInformation.getCurrentUser().getAuthenticationMethod() != AuthenticationMethod.KERBEROS) { throw new AccessControlException("Error in " + msg + "Only kerberos based authentication is allowed."); } }
/** * Checks if the user can access a path. The mode specifies which access * checks to perform. If the requested permissions are granted, then the * method returns normally. If access is denied, then the method throws an * {@link AccessControlException}. * * @param path Path to check * @param mode type of access to check * @throws AccessControlException if access is denied * @throws java.io.FileNotFoundException if the path does not exist * @throws IOException see specific implementation */ @Override public void access(final Path path, FsAction mode) throws IOException { statistics.incrementReadOps(1); if (!adlClient.checkAccess(toRelativeFilePath(path), mode.SYMBOL)) { throw new AccessControlException("Access Denied : " + path.toString()); } }
String fullName = UserGroupInformation.getCurrentUser().getUserName(); if (LOG.isDebugEnabled()) LOG.debug("Kerberos principal name is " + fullName); throw new AccessControlException( "Server does not support SASL " + authMethod);
} else { user.setAuthenticationMethod(authMethod); && (!protocolUser.getUserName().equals(user.getUserName()))) { if (authMethod == AuthMethod.TOKEN) { new AccessControlException("Authenticated user (" + user + ") doesn't match what the client claims to be (" + protocolUser + ")"));
throws AccessControlException, IOException { FsPermission perm = stat.getPermission(); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); String user = ugi.getShortUserName(); if (user.equals(stat.getOwner())) { if (perm.getUserAction().implies(mode)) { return; } else if (ugi.getGroups().contains(stat.getGroup())) { if (perm.getGroupAction().implies(mode)) { return; throw new AccessControlException(String.format( "Permission denied: user=%s, path=\"%s\":%s:%s:%s%s", user, stat.getPath(), stat.getOwner(), stat.getGroup(), stat.isDirectory() ? "d" : "-", perm));