/** * creates a new user exception builder . * * @see com.dremio.exec.proto.UserBitShared.DremioPBError.ErrorType#PERMISSION * @return user exception builder */ public static Builder permissionError() { return permissionError(null); }
@Override public List<String> getTableNames(final String dbName, boolean ignoreAuthzErrors) throws TException { try { authorizer.authorizeShowTables(dbName); } catch (final HiveAccessControlException e) { if (ignoreAuthzErrors) { return Collections.emptyList(); } throw UserException.permissionError(e).build(logger); } return super.getTableNames(dbName, ignoreAuthzErrors); }
@Override public List<String> getDatabases(boolean ignoreAuthzErrors) throws TException { try { authorizer.authorizeShowDatabases(); } catch (final HiveAccessControlException e) { if (ignoreAuthzErrors) { return Collections.emptyList(); } throw UserException.permissionError(e).build(logger); } return super.getDatabases(ignoreAuthzErrors); }
@Override Table getTable(final String dbName, final String tableName, boolean ignoreAuthzErrors) throws TException { try { authorizer.authorizeReadTable(dbName, tableName); } catch (final HiveAccessControlException e) { if (!ignoreAuthzErrors) { throw UserException.permissionError(e).build(logger); } else { return null; } } return super.getTable(dbName, tableName, ignoreAuthzErrors); }
/** * Helper method that gets the delegation token using <i>processHiveClient</i> for given <i>proxyUserName</i> * and sets it in proxy user UserGroupInformation and proxy user HiveConf. */ protected static void getAndSetDelegationToken(final HiveConf proxyUserHiveConf, final UserGroupInformation proxyUGI, final HiveClient processHiveClient) { checkNotNull(processHiveClient, "process user Hive client required"); checkNotNull(proxyUserHiveConf, "Proxy user HiveConf required"); checkNotNull(proxyUGI, "Proxy user UserGroupInformation required"); try { final String delegationToken = processHiveClient.getDelegationToken(proxyUGI.getUserName()); Utils.setTokenStr(proxyUGI, delegationToken, "DremioDelegationTokenForHiveMetaStoreServer"); proxyUserHiveConf.set("hive.metastore.token.signature", "DremioDelegationTokenForHiveMetaStoreServer"); } catch (Exception e) { final String processUsername = ImpersonationUtil.getProcessUserUGI().getShortUserName(); throw UserException.permissionError(e) .message("Failed to generate Hive metastore delegation token for user %s. " + "Check Hadoop services (including metastore) have correct proxy user impersonation settings (%s, %s) " + "and services are restarted after applying those settings.", proxyUGI.getUserName(), String.format("hadoop.proxyuser.%s.hosts", processUsername), String.format("hadoop.proxyuser.%s.groups", processUsername) ) .addContext("Proxy user", proxyUGI.getUserName()) .build(logger); } }
/** * Make sure the <i>givenPath</i> refers to an entity under the given <i>basePath</i>. Idea is to avoid using ".." to * refer entities outside the ba * @param basePath * @param givenPath */ public static void verifyNoAccessOutsideBase(Path basePath, Path givenPath) { final String givenPathNormalized = Path.getPathWithoutSchemeAndAuthority(givenPath).toString(); final String basePathNormalized = Path.getPathWithoutSchemeAndAuthority(basePath).toString(); if (!givenPathNormalized.startsWith(basePathNormalized)) { throw UserException.permissionError() .message("Not allowed to access files outside of the source root") .addContext("Source root", basePathNormalized) .addContext("Requested to path", givenPathNormalized) .build(logger); } }
@Override public FunctionRender visitCall(RexCall call) { final String funcName = call.getOperator().getName().toLowerCase(); final ElasticFunction elasticFunction = ElasticFunctions.getFunction(call); if (elasticFunction == null) { throw new RuntimeException("Unknown function, " + funcName + ", encountered while trying to pushdown to elasticsearch."); } if (!renderer.isScriptsEnabled()) { throw UserException.permissionError().message("Scripts must be enabled to allow for complex expression pushdowns.").build(logger); } requiresScripts = true; FunctionRender render = elasticFunction.render(renderer, call); return render; }
if (!schemaConfig.getIgnoreAuthErrors()) { logger.debug(e.getMessage()); throw UserException.permissionError(e) .message("Not authorized to list or query tables in schema %s", tableSchemaPath) .build(logger); if (!schemaConfig.getIgnoreAuthErrors()) { logger.debug(e.getMessage()); throw UserException.permissionError(e) .message("Not authorized to read view [%s] in schema %s", tableSchemaPath.get(tableSchemaPath.size() - 1), tableSchemaPath.subList(0, tableSchemaPath.size() - 1)) .build(logger);
throw (UserException) cause; throw UserException.permissionError(cause) .message("Access denied reading dataset %s.", namespaceKey.toString()) .build(logger);
addContextAndThrow(UserException.permissionError().message("Unauthorized to connect to Elasticsearch cluster. " + "Please make sure that the username and the password provided are correct."), contextWithAlias); addContextAndThrow(UserException.permissionError().message(unauthorizedMsg), contextWithAlias);
throw UserException.permissionError().message("No permission to requested path.").build(logger);
@Override public FunctionRender visitLiteral(RexLiteral literal) { if (!renderer.isScriptsEnabled()) { throw UserException.permissionError().message("Scripts must be enabled to allow for complex expression pushdowns.").build(logger);
public void checkAccess(NamespaceKey key, DatasetConfig datasetConfig, final MetadataRequestOptions options) { try(AutoCloseableLock l = readLock()) { checkState(); if (!permissionsCache.hasAccess(options.getSchemaConfig().getUserName(), key, datasetConfig, options.getStatsCollector())) { throw UserException.permissionError() .message("Access denied reading dataset %s.", key) .build(logger); } } }
if (!retrievalOptions.ignoreAuthzErrors()) { logger.debug(e.getMessage()); throw UserException.permissionError(e) .message("Not authorized to read table %s at path ", datasetPath) .build(logger);
public static Exception coerceException(Logger logger, String sql, Exception e, boolean coerceToPlan){ if(e instanceof UserException){ return e; } else if(e instanceof ValidationException){ throw validationError(sql, (ValidationException) e).build(logger); } else if (e instanceof AccessControlException){ throw UserException.permissionError(e) .addContext(SQL_QUERY_CONTEXT, sql) .build(logger); } else if (e instanceof SqlUnsupportedException){ throw UserException.unsupportedError(e) .addContext(SQL_QUERY_CONTEXT, sql) .build(logger); } else if (e instanceof IOException || e instanceof RelConversionException){ return new QueryInputException("Failure handling SQL.", e); } else if (coerceToPlan){ throw planError(sql, e).build(logger); } return e; } }
} catch (AccessControlException e) { throw UserException .permissionError(e) .build(logger); } catch(SqlUnsupportedException e) {
.permissionError(e) .message("Unauthorized to drop table") .build(logger);