private void authorizeDropDatabase(PreDropDatabaseEvent context) throws InvalidOperationException, MetaException { try { for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { authorizer.authorize(new Database(context.getDatabase()), HiveOperation.DROPDATABASE.getInputRequiredPrivileges(), HiveOperation.DROPDATABASE.getOutputRequiredPrivileges()); } } catch (AuthorizationException e) { throw invalidOperationException(e); } catch (HiveException e) { throw metaException(e); } }
public String getCommandType() { if (commandType == null) { return null; } return commandType.getOperationName(); }
/** * test that all enums in {@link HiveOperation} match one in @{link HiveOperationType} */ @Test public void checkHiveOperationTypeMatch(){ for (HiveOperation op : HiveOperation.values()) { try { HiveOperationType.valueOf(op.name()); } catch(IllegalArgumentException ex) { // if value is null or not found, exception would get thrown fail("Unable to find corresponding type in HiveOperationType for " + op + " : " + ex ); } } }
if (op.equals(HiveOperation.CREATEDATABASE)) { authorizer.authorize( op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges()); } else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) { authorizer.authorize( db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); } else { if (op.equals(HiveOperation.IMPORT)) { ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem; if (!isa.existsTable()) { authorizer.authorize( db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); if (!op.equals(HiveOperation.IMPORT)){ null, op.getOutputRequiredPrivileges()); if (part != null) { authorizer.authorize(write.getPartition(), null, op.getOutputRequiredPrivileges()); continue; op.getOutputRequiredPrivileges()); authorizer.authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null); continue;
org.apache.hadoop.hive.ql.plan.HiveOperation.values()) { if (op.toString().equals(operation.toString())) { commandType = op; semanticAnalyzer, null, commandType != null ? commandType.getOperationName() : null );
LOG.debug("Going to authorize statement " + hiveOp.name() + " for subject " + subject.getName()); if (!authProvider.hasAccess(subject, inputHierarchy, entry.getValue(), activeRoleSet)) { throw new AuthorizationException("User " + subject.getName() + " does not have privileges for " + hiveOp.name()); if (!found && !key.equals(AuthorizableType.URI) && !(hiveOp.equals(HiveOperation.QUERY)) && !(hiveOp.equals(HiveOperation.CREATETABLE_AS_SELECT))) { if (!authProvider.hasAccess(subject, outputHierarchy, requiredOutputPrivileges.get(key), activeRoleSet)) { throw new AuthorizationException("User " + subject.getName() + " does not have privileges for " + hiveOp.name()); if(!found && !(key.equals(AuthorizableType.URI)) && !(hiveOp.equals(HiveOperation.QUERY))) {
private static HiveOperationType getHiveOperationType(HiveOperation op) { return HiveOperationType.valueOf(op.name()); }
private static void setupOperationMap() { //Populate OPERATION_MAP - string to HiveOperation mapping for (HiveOperation hiveOperation : HiveOperation.values()) { OPERATION_MAP.put(hiveOperation.getOperationName(), hiveOperation); } }
if (txnManager.isTxnOpen() && !plan.getOperation().isAllowedInTransaction()) { assert !txnManager.getAutoCommit() : "didn't expect AC=true"; return rollback(new CommandProcessorResponse(12, ErrorMsg.OP_NOT_ALLOWED_IN_TXN, null, plan.getOperationName(), Long.toString(txnManager.getCurrentTxnId()))); if(!txnManager.isTxnOpen() && plan.getOperation().isRequiresOpenTransaction()) { return rollback(new CommandProcessorResponse(12, ErrorMsg.OP_NOT_ALLOWED_WITHOUT_TXN, null, plan.getOperationName()));
if(!queryPlan.getOperation().isAllowedInTransaction() && isExplicitTransaction) { if(allowOperationInATransaction(queryPlan)) { break;
if (op.equals(HiveOperation.CREATEDATABASE)) { authorizer.authorize( op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges()); } else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) { authorizer.authorize( db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); } else { if (op.equals(HiveOperation.IMPORT)) { ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem; if (!isa.existsTable()) { authorizer.authorize( db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); if (!op.equals(HiveOperation.IMPORT)){ null, op.getOutputRequiredPrivileges()); if (part != null) { authorizer.authorize(write.getPartition(), null, op.getOutputRequiredPrivileges()); continue; op.getOutputRequiredPrivileges()); authorizer.authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null); continue;
org.apache.hadoop.hive.ql.plan.HiveOperation.values()) { if (op.toString().equals(operation.toString())) { commandType = op; semanticAnalyzer, null, commandType != null ? commandType.getOperationName() : null );
LOG.debug("Going to authorize statement " + hiveOp.name() + " for subject " + subject.getName()); stmtAuthPrivileges.getGrantOption(), activeRoleSet)) { throw new AuthorizationException("User " + subject.getName() + " does not have privileges for " + hiveOp.name()); if (!found && !key.equals(AuthorizableType.URI) && !(hiveOp.equals(HiveOperation.QUERY)) && !(hiveOp.equals(HiveOperation.CREATETABLE_AS_SELECT))) { stmtAuthPrivileges.getGrantOption(), activeRoleSet)) { throw new AuthorizationException("User " + subject.getName() + " does not have privileges for " + hiveOp.name()); if(!found && !(key.equals(AuthorizableType.URI)) && !(hiveOp.equals(HiveOperation.QUERY))) {
private static HiveOperationType getHiveOperationType(HiveOperation op) { return HiveOperationType.valueOf(op.name()); }
private void authorizeAlterDatabase(PreAlterDatabaseEvent context) throws InvalidOperationException, MetaException { try { for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { authorizer.authorize(new Database(context.getOldDatabase()), HiveOperation.ALTERDATABASE_LOCATION.getInputRequiredPrivileges(), HiveOperation.ALTERDATABASE_LOCATION.getOutputRequiredPrivileges()); } } catch (AuthorizationException e) { throw invalidOperationException(e); } catch (HiveException e) { throw metaException(e); } }
Hive db = sem.getDb(); if (op != null) { if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) { ss.getAuthorizer().authorize( db.getDatabase(db.getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); if (part != null) { ss.getAuthorizer().authorize(write.getPartition(), null, op.getOutputRequiredPrivileges()); continue; op.getOutputRequiredPrivileges()); if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.QUERY)) { SemanticAnalyzer querySem = (SemanticAnalyzer) sem; ParseContext parseCtx = querySem.getParseContext(); if (cols != null && cols.size() > 0) { ss.getAuthorizer().authorize(read.getPartition().getTable(), read.getPartition(), cols, op.getInputRequiredPrivileges(), null); } else { ss.getAuthorizer().authorize(read.getPartition(), op.getInputRequiredPrivileges(), null); if (cols != null && cols.size() > 0) {
public String getOperationName() { return operation == null ? null : operation.getOperationName(); } public HiveOperation getOperation() {
object.put("CURRENT_USER", jsonUser); Object jsonOperation = toJson("OPERATION", operation.name(), out, work); if (work.isFormatted()) { object.put("OPERATION", jsonOperation);