Codota Logo
AccessControlList
Code IndexAdd Codota to your IDE (free)

How to use
AccessControlList
in
org.apache.hadoop.security.authorize

Best Java code snippets using org.apache.hadoop.security.authorize.AccessControlList (Showing top 20 results out of 540)

Refine searchRefine arrow

  • UserGroupInformation
  • Configuration
  • Common ways to obtain AccessControlList
private void myMethod () {
AccessControlList a =
  • Codota IconServletContext servletContext;String name;(AccessControlList) servletContext.getAttribute(name)
  • Codota IconString aclString;new AccessControlList(aclString)
  • Codota IconConfiguration conf;String name;String defaultValue;new AccessControlList(conf.get(name, defaultValue))
  • Smart code suggestions by Codota
}
origin: org.apache.hadoop/hadoop-common

 new IdentityHashMap<Class<?>, MachineList[]>();
String defaultAcl = conf.get(
  CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_ACL,
  AccessControlList.WILDCARD_ACL_VALUE);
String defaultBlockedAcl = conf.get(
 CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_BLOCKED_ACL, "");
String defaultMachineList = conf.get(defaultServiceHostsKey,
 MachineList.WILDCARD_VALUE);
String defaultBlockedMachineList= conf.get(
 for (Service service : services) {
  AccessControlList acl =
    new AccessControlList(
      conf.get(service.getServiceKey(),
        defaultAcl)
    );
  AccessControlList blockedAcl =
    new AccessControlList(
    conf.get(service.getServiceKey() + BLOCKED,
    defaultBlockedAcl));
origin: apache/hbase

@Test
public void testRequiresAuthorizationAccess() throws Exception {
 Configuration conf = new Configuration();
 ServletContext context = Mockito.mock(ServletContext.class);
 Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
 HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
 HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
 //requires admin access to instrumentation, FALSE by default
 Assert.assertTrue(HttpServer.isInstrumentationAccessAllowed(context, request, response));
 //requires admin access to instrumentation, TRUE
 conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, true);
 conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
 AccessControlList acls = Mockito.mock(AccessControlList.class);
 Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
 Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
 Assert.assertFalse(HttpServer.isInstrumentationAccessAllowed(context, request, response));
}
origin: org.apache.hadoop/hadoop-common

/**
 * Serializes the AccessControlList object
 */
@Override
public void write(DataOutput out) throws IOException {
 String aclString = getAclString();
 Text.writeString(out, aclString);
}
origin: org.apache.hadoop/hadoop-common

/**
 * Remove user from the names of users allowed for this service.
 * 
 * @param user
 *          The user name
 */
public void removeUser(String user) {
 if (isWildCardACLValue(user)) {
  throw new IllegalArgumentException("User " + user + " can not be removed");
 }
 if (!isAllAllowed()) {
  users.remove(user);
 }
}
origin: org.apache.hadoop/hadoop-hdfs

this.conf = conf;
Configuration confForInfoServer = new Configuration(conf);
confForInfoServer.setInt(HttpServer2.HTTP_MAX_THREADS_KEY,
  HTTP_MAX_THREADS);
confForInfoServer.setInt(HttpServer2.HTTP_SELECTOR_COUNT_KEY,
  HTTP_SELECTOR_THREADS);
confForInfoServer.setInt(HttpServer2.HTTP_ACCEPTOR_COUNT_KEY,
  .setName("datanode")
  .setConf(confForInfoServer)
  .setACL(new AccessControlList(conf.get(DFS_ADMIN, " ")))
  .hostName(getHostnameForSpnegoPrincipal(confForInfoServer))
  .addEndpoint(URI.create("http://localhost:" + proxyPort))
origin: org.apache.hadoop/hadoop-mapred

ACLsManager(Configuration conf, JobACLsManager jobACLsManager,
  QueueManager queueManager) throws IOException {
 mrOwner = UserGroupInformation.getCurrentUser();
 adminAcl = new AccessControlList(conf.get(MRConfig.MR_ADMINS, " "));
 adminAcl.addUser(mrOwner.getShortUserName());
 
 String deprecatedSuperGroup = conf.get(MRConfig.MR_SUPERGROUP);
 if (deprecatedSuperGroup != null) {
  LOG.warn(MRConfig.MR_SUPERGROUP + " is deprecated. Use " 
    + MRConfig.MR_ADMINS + " instead");
  adminAcl.addGroup(deprecatedSuperGroup);
 }
 aclsEnabled = conf.getBoolean(MRConfig.MR_ACLS_ENABLED, false);
 this.jobACLsManager = jobACLsManager;
 this.queueManager = queueManager;
}
origin: apache/hbase

@Ignore
public void testAuthorizationOfDefaultServlets() throws Exception {
 Configuration conf = new Configuration();
 conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
   true);
 conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
   true);
 conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
   .setACL(new AccessControlList("userA,userB groupC,groupD")).build();
 myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
 myServer.start();
origin: com.github.jiayuhan-it/hadoop-yarn-server-resourcemanager

private AccessControlList getAdminAclList(Configuration conf) {
 AccessControlList aclList =
   new AccessControlList(conf.get(YarnConfiguration.YARN_ADMIN_ACL,
    YarnConfiguration.DEFAULT_YARN_ADMIN_ACL));
 aclList.addUser(daemonUser.getShortUserName());
 return aclList;
}
origin: ch.cern.hadoop/hadoop-mapreduce-client-hs

@Override
public void refreshAdminAcls() throws IOException {
 UserGroupInformation user = checkAcls("refreshAdminAcls");
 Configuration conf = createConf();
 adminAcl = new AccessControlList(conf.get(JHAdminConfig.JHS_ADMIN_ACL,
   JHAdminConfig.DEFAULT_JHS_ADMIN_ACL));
 HSAuditLogger.logSuccess(user.getShortUserName(), "refreshAdminAcls",
   HISTORY_ADMIN_SERVER);
}
origin: org.apache.hadoop/hadoop-common

  try {
   clientPrincipal = SecurityUtil.getServerPrincipal(
     conf.get(clientKey), addr);
  } catch (IOException e) {
   throw (AuthorizationException) new AuthorizationException(
if((clientPrincipal != null && !clientPrincipal.equals(user.getUserName())) || 
  acls.length != 2  || !acls[0].isUserAllowed(user) || acls[1].isUserAllowed(user)) {
 String cause = clientPrincipal != null ?
   ": this service is only accessible by " + clientPrincipal :
origin: org.apache.hadoop/hadoop-hdfs

  .setConf(conf).setACL(new AccessControlList(conf.get(DFS_ADMIN, " ")))
  .setSecurityEnabled(UserGroupInformation.isSecurityEnabled())
  .setUsernameConfKey(spnegoUserNameKey)
  .setKeytabConfKey(getSpnegoKeytabKey(conf, spnegoKeytabFileKey));
if (UserGroupInformation.isSecurityEnabled()) {
 LOG.info("Starting web server as: "
   + SecurityUtil.getServerPrincipal(conf.get(spnegoUserNameKey),
     httpAddr.getHostName()));
origin: ch.cern.hadoop/hadoop-mapreduce-client-hs

private UserGroupInformation checkAcls(String method) throws IOException {
 UserGroupInformation user;
 try {
  user = UserGroupInformation.getCurrentUser();
 } catch (IOException ioe) {
  LOG.warn("Couldn't get current user", ioe);
  HSAuditLogger.logFailure("UNKNOWN", method, adminAcl.toString(),
    HISTORY_ADMIN_SERVER, "Couldn't get current user");
  throw ioe;
 }
 if (!adminAcl.isUserAllowed(user)) {
  LOG.warn("User " + user.getShortUserName() + " doesn't have permission"
    + " to call '" + method + "'");
  HSAuditLogger.logFailure(user.getShortUserName(), method,
    adminAcl.toString(), HISTORY_ADMIN_SERVER,
    AuditConstants.UNAUTHORIZED_USER);
  throw new AccessControlException("User " + user.getShortUserName()
    + " doesn't have permission" + " to call '" + method + "'");
 }
 LOG.info("HS Admin: " + method + " invoked by user "
   + user.getShortUserName());
 return user;
}
origin: org.apache.hadoop/hadoop-hdfs

@Override
protected void checkRpcAdminAccess() throws IOException, AccessControlException {
 UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
 UserGroupInformation zkfcUgi = UserGroupInformation.getLoginUser();
 if (adminAcl.isUserAllowed(ugi) ||
   ugi.getShortUserName().equals(zkfcUgi.getShortUserName())) {
  LOG.info("Allowed RPC access from " + ugi + " at " + Server.getRemoteAddress());
  return;
 }
 String msg = "Disallowed RPC access from " + ugi + " at " +
   Server.getRemoteAddress() + ". Not listed in " + DFSConfigKeys.DFS_ADMIN; 
 LOG.warn(msg);
 throw new AccessControlException(msg);
}
origin: org.apache.hadoop/hadoop-common

@Override
public void authorize(UserGroupInformation user, 
  String remoteAddress) throws AuthorizationException {
 
 if (user == null) {
  throw new IllegalArgumentException("user is null.");
 }
 UserGroupInformation realUser = user.getRealUser();
 if (realUser == null) {
  return;
 }
 
 AccessControlList acl = proxyUserAcl.get(configPrefix +
   realUser.getShortUserName());
 if (acl == null || !acl.isUserAllowed(user)) {
  throw new AuthorizationException("User: " + realUser.getUserName()
    + " is not allowed to impersonate " + user.getUserName());
 }
 MachineList MachineList = proxyHosts.get(
   getProxySuperuserIpConfKey(realUser.getShortUserName()));
 if(MachineList == null || !MachineList.includes(remoteAddress)) {
  throw new AuthorizationException("Unauthorized connection for super-user: "
    + realUser.getUserName() + " from IP " + remoteAddress);
 }
}

origin: org.apache.hadoop/hadoop-mapred

/**
 * Rereads the config to get hosts and exclude list file names.
 * Rereads the files to update the hosts and exclude lists.
 */
public synchronized void refreshNodes() throws IOException {
 String user = UserGroupInformation.getCurrentUser().getShortUserName();
 // check access
 if (!aclsManager.isMRAdmin(UserGroupInformation.getCurrentUser())) {
  AuditLogger.logFailure(user, Constants.REFRESH_NODES,
    aclsManager.getAdminsAcl().toString(), Constants.JOBTRACKER,
    Constants.UNAUTHORIZED_USER);
  throw new AccessControlException(user + 
                   " is not authorized to refresh nodes.");
 }
 
 AuditLogger.logSuccess(user, Constants.REFRESH_NODES, Constants.JOBTRACKER);
 // call the actual api
 refreshHosts();
}
origin: io.hops/hadoop-mapreduce-client-hs

@Override
public void refreshLoadedJobCache() throws IOException {
 UserGroupInformation user = checkAcls("refreshLoadedJobCache");
 try {
  jobHistoryService.refreshLoadedJobCache();
 } catch (UnsupportedOperationException e) {
  HSAuditLogger.logFailure(user.getShortUserName(),
    "refreshLoadedJobCache", adminAcl.toString(), HISTORY_ADMIN_SERVER,
    e.getMessage());
  throw e;
 }
 HSAuditLogger.logSuccess(user.getShortUserName(), "refreshLoadedJobCache",
   HISTORY_ADMIN_SERVER);
}
origin: org.apache.hadoop/hadoop-mapreduce-client-core

assertEquals(
  firstSubQueue.getAcls().get("mapred.queue.first.acl-submit-job")
    .toString(),
  "Users [user1, user2] and members of the groups [group1, group2] are allowed");
Queue secondSubQueue = iterator.next();
when(mockUGI.getShortUserName()).thenReturn("user1");
String[] groups = { "group1" };
when(mockUGI.getGroupNames()).thenReturn(groups);
assertTrue(manager.hasAccess("first", QueueACL.SUBMIT_JOB, mockUGI));
assertFalse(manager.hasAccess("second", QueueACL.SUBMIT_JOB, mockUGI));
assertFalse(manager.hasAccess("first", QueueACL.ADMINISTER_JOBS, mockUGI));
when(mockUGI.getShortUserName()).thenReturn("user3");
assertTrue(manager.hasAccess("first", QueueACL.ADMINISTER_JOBS, mockUGI));
conf.unset(DeprecatedQueueConfigurationParser.MAPRED_QUEUE_NAMES_KEY);
QueueManager.dumpConfiguration(writer, f.getAbsolutePath(), conf);
String result = writer.toString();
origin: apache/hbase

/**
 * Get the admin ACLs from the given ServletContext and check if the given
 * user is in the ACL.
 *
 * @param servletContext the context containing the admin ACL.
 * @param remoteUser the remote user to check for.
 * @return true if the user is present in the ACL, false if no ACL is set or
 *         the user is not present
 */
public static boolean userHasAdministratorAccess(ServletContext servletContext,
  String remoteUser) {
 AccessControlList adminsAcl = (AccessControlList) servletContext
   .getAttribute(ADMINS_ACL);
 UserGroupInformation remoteUserUGI =
   UserGroupInformation.createRemoteUser(remoteUser);
 return adminsAcl != null && adminsAcl.isUserAllowed(remoteUserUGI);
}
origin: org.apache.hadoop/hadoop-kms

private boolean checkKeyAccess(Map<KeyOpType, AccessControlList> keyAcl,
  UserGroupInformation ugi, KeyOpType opType) {
 AccessControlList acl = keyAcl.get(opType);
 if (acl == null) {
  // If no acl is specified for this operation,
  // deny access
  LOG.debug("No ACL available for key, denying access for {}", opType);
  return false;
 } else {
  if (LOG.isDebugEnabled()) {
   LOG.debug("Checking user [{}] for: {}: {}" + ugi.getShortUserName(),
     opType.toString(), acl.getAclString());
  }
  return acl.isUserAllowed(ugi);
 }
}
origin: org.apache.hadoop/hadoop-common-test

/**
 * Tests adding user/group to an wild card acl.
 */
public void testAddRemoveToWildCardACL() {
 AccessControlList acl = new AccessControlList(" * ");
 assertTrue(acl.isAllAllowed());
 UserGroupInformation drwho =
  UserGroupInformation.createUserForTesting("drwho@APACHE.ORG",
    new String[] { "aliens" });
 UserGroupInformation drwho2 =
  UserGroupInformation.createUserForTesting("drwho2@APACHE.ORG",
    new String[] { "tardis" });
 acl.addUser("drwho");
 assertTrue(acl.isAllAllowed());
 assertFalse(acl.getAclString().contains("drwho"));
 acl.addGroup("tardis");
 assertTrue(acl.isAllAllowed());
 assertFalse(acl.getAclString().contains("tardis"));
  acl.removeUser("drwho");
 assertTrue(acl.isAllAllowed());
 assertUserAllowed(drwho, acl);
 acl.removeGroup("tardis");
 assertTrue(acl.isAllAllowed());
 assertUserAllowed(drwho2, acl);
}
org.apache.hadoop.security.authorizeAccessControlList

Javadoc

Class representing a configured access control list.

Most used methods

  • <init>
    Construct a new ACL from String representation of users and groups The arguments are comma separated
  • isUserAllowed
  • getAclString
    Returns the access control list as a String that can be used for building a new instance by sending
  • addUser
    Add user to the names of users allowed for this service.
  • isAllAllowed
  • toString
    Returns descriptive way of users and groups that are part of this ACL. Use #getAclString() to get th
  • getGroups
  • addGroup
    Add group to the names of groups allowed for this service.
  • isUserInList
    Checks if a user represented by the provided UserGroupInformationis a member of the Access Control L
  • buildACL
    Build ACL from the given two Strings. The Strings contain comma separated values.
  • getGroupsString
    Returns comma-separated concatenated single String of the set 'groups'
  • getString
    Returns comma-separated concatenated single String of all strings of the given set
  • getGroupsString,
  • getString,
  • getUsersString,
  • isWildCardACLValue,
  • readFields,
  • write,
  • getUsers,
  • removeGroup,
  • removeUser

Popular in Java

  • Reading from database using SQL prepared statement
  • getSystemService (Context)
  • setContentView (Activity)
  • onCreateOptionsMenu (Activity)
  • FileReader (java.io)
    A specialized Reader that reads from a file in the file system. All read requests made by calling me
  • Charset (java.nio.charset)
    A charset is a named mapping between Unicode characters and byte sequences. Every Charset can decode
  • Queue (java.util)
    A collection designed for holding elements prior to processing. Besides basic java.util.Collection o
  • AtomicInteger (java.util.concurrent.atomic)
    An int value that may be updated atomically. See the java.util.concurrent.atomic package specificati
  • Stream (java.util.stream)
    A sequence of elements supporting sequential and parallel aggregate operations. The following exampl
  • Table (org.hibernate.mapping)
    A relational table
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now