protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException { PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); ASTNode child = (ASTNode) ast.getChild(0); ASTNode gchild = (ASTNode)child.getChild(0); if (child.getType() == HiveParser.TOK_TABLE_TYPE) { subject.setTable(true); String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild); subject.setObject(BaseSemanticAnalyzer.getDotName(qualified)); } else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) { throw new SemanticException("Hive authorization does not support the URI or SERVER objects"); } else { subject.setTable(false); subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText())); } //if partition spec node is present, set partition spec for (int i = 1; i < child.getChildCount(); i++) { gchild = (ASTNode) child.getChild(i); if (gchild.getType() == HiveParser.TOK_PARTSPEC) { subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild)); } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) { subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild)); } } return subject; }
private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast, HashSet<WriteEntity> outputs) throws SemanticException { PrivilegeObjectDesc subject = parsePrivObject(ast); if (subject.getTable()) { Table tbl = getTable(subject.getObject()); if (subject.getPartSpec() != null) { Partition part = getPartition(tbl, subject.getPartSpec()); outputs.add(new WriteEntity(part, WriteEntity.WriteType.DDL_NO_LOCK)); } else { outputs.add(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK)); } } return subject; }
@Override public HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc) throws HiveException { // null means ALL for show grants, GLOBAL for grant/revoke HivePrivilegeObjectType objectType = null; String[] dbTable; List<String> partSpec = null; List<String> columns = null; if (privSubjectDesc == null) { dbTable = new String[] {null, null}; } else { if (privSubjectDesc.getTable()) { dbTable = Utilities.getDbTableName(privSubjectDesc.getObject()); } else { dbTable = new String[] {privSubjectDesc.getObject(), null}; } if (privSubjectDesc.getPartSpec() != null) { partSpec = new ArrayList<String>(privSubjectDesc.getPartSpec().values()); } columns = privSubjectDesc.getColumns(); objectType = AuthorizationUtils.getPrivObjectType(privSubjectDesc); } return new HivePrivilegeObject(objectType, dbTable[0], dbTable[1], partSpec, columns, null); }
private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast, HashSet<WriteEntity> outputs) throws SemanticException { PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); subject.setObject(unescapeIdentifier(ast.getChild(0).getText())); if (ast.getChildCount() > 1) { for (int i =0;i< ast.getChildCount();i++) { ASTNode astChild = (ASTNode) ast.getChild(i); if (astChild.getToken().getType() == HiveParser.TOK_PARTSPEC) { subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(astChild)); } else { subject.setTable(ast.getChild(0) != null); } } } try { if (subject.getTable()) { Table tbl = db.getTable(subject.getObject()); if (subject.getPartSpec() != null) { Partition part = db.getPartition(tbl, subject.getPartSpec(), false); outputs.add(new WriteEntity(part)); } else { outputs.add(new WriteEntity(tbl)); } } } catch (HiveException e) { throw new SemanticException(e); } return subject; }
public SentryHivePrivilegeObjectDesc() { // reset table type which is on by default super.setTable(false); }
@Override public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { PrincipalDesc principalDesc = null; PrivilegeObjectDesc privHiveObj = null; ASTNode param = null; if (ast.getChildCount() > 0) { param = (ASTNode) ast.getChild(0); principalDesc = AuthorizationParseUtils.getPrincipalDesc(param); if (principalDesc != null) { param = (ASTNode) ast.getChild(1); // shift one } } if (param != null) { if (param.getType() == HiveParser.TOK_RESOURCE_ALL) { privHiveObj = new PrivilegeObjectDesc(); } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) { privHiveObj = parsePrivObject(param); } } ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj); return TaskFactory.get(new DDLWork(inputs, outputs, showGrant)); }
@Override public Task<? extends Serializable> createRevokeTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0)); List<PrincipalDesc> principalDesc = analyzePrincipalListDef((ASTNode) ast.getChild(1)); PrivilegeObjectDesc privilegeObj = null; if (ast.getChildCount() > 2) { ASTNode astChild = (ASTNode) ast.getChild(2); privilegeObj = analyzePrivilegeObject(astChild); } if (privilegeObj != null && privilegeObj.getPartSpec() != null) { throw new SemanticException(SentryHiveConstants.PARTITION_PRIVS_NOT_SUPPORTED); } for (PrincipalDesc princ : principalDesc) { if (princ.getType() != PrincipalType.ROLE) { String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + princ.getType(); throw new SemanticException(msg); } } RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, privilegeObj); return createTask(new DDLWork(inputs, outputs, revokeDesc)); }
@Override public HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc) throws HiveException { // null means ALL for show grants, GLOBAL for grant/revoke HivePrivilegeObjectType objectType = null; String[] dbTable; List<String> partSpec = null; List<String> columns = null; if (privSubjectDesc == null) { dbTable = new String[] {null, null}; } else { if (privSubjectDesc.getTable()) { dbTable = Utilities.getDbTableName(privSubjectDesc.getObject()); } else { dbTable = new String[] {privSubjectDesc.getObject(), null}; } if (privSubjectDesc.getPartSpec() != null) { partSpec = new ArrayList<String>(privSubjectDesc.getPartSpec().values()); } columns = privSubjectDesc.getColumns(); objectType = AuthorizationUtils.getPrivObjectType(privSubjectDesc); } return new HivePrivilegeObject(objectType, dbTable[0], dbTable[1], partSpec, columns, null); }
public SentryHivePrivilegeObjectDesc() { // reset table type which is on by default super.setTable(false); }
@Override public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { PrincipalDesc principalDesc = null; PrivilegeObjectDesc privHiveObj = null; ASTNode param = null; if (ast.getChildCount() > 0) { param = (ASTNode) ast.getChild(0); principalDesc = AuthorizationParseUtils.getPrincipalDesc(param); if (principalDesc != null) { param = (ASTNode) ast.getChild(1); // shift one } } if (param != null) { if (param.getType() == HiveParser.TOK_RESOURCE_ALL) { privHiveObj = new PrivilegeObjectDesc(); } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) { privHiveObj = parsePrivObject(param); } } ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj); return TaskFactory.get(new DDLWork(inputs, outputs, showGrant), conf); }
protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException { PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); ASTNode child = (ASTNode) ast.getChild(0); ASTNode gchild = (ASTNode)child.getChild(0); if (child.getType() == HiveParser.TOK_TABLE_TYPE) { subject.setTable(true); String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild); subject.setObject(BaseSemanticAnalyzer.getDotName(qualified)); } else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) { throw new SemanticException("Hive authorization does not support the URI or SERVER objects"); } else { subject.setTable(false); subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText())); } //if partition spec node is present, set partition spec for (int i = 1; i < child.getChildCount(); i++) { gchild = (ASTNode) child.getChild(i); if (gchild.getType() == HiveParser.TOK_PARTSPEC) { subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild)); } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) { subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild)); } } return subject; }
private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast, HashSet<WriteEntity> outputs) throws SemanticException { PrivilegeObjectDesc subject = parsePrivObject(ast); if (subject.getTable()) { Table tbl = getTable(subject.getObject()); if (subject.getPartSpec() != null) { Partition part = getPartition(tbl, subject.getPartSpec()); outputs.add(new WriteEntity(part, WriteEntity.WriteType.DDL_NO_LOCK)); } else { outputs.add(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK)); } } return subject; }
public static HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc) throws HiveException { // null means ALL for show grants, GLOBAL for grant/revoke HivePrivilegeObjectType objectType = null; String[] dbTable; List<String> partSpec = null; List<String> columns = null; if (privSubjectDesc == null) { dbTable = new String[] {null, null}; } else { if (privSubjectDesc.getTable()) { dbTable = Utilities.getDbTableName(privSubjectDesc.getObject()); } else { dbTable = new String[] {privSubjectDesc.getObject(), null}; } if (privSubjectDesc.getPartSpec() != null) { partSpec = new ArrayList<String>(privSubjectDesc.getPartSpec().values()); } columns = privSubjectDesc.getColumns(); objectType = getPrivObjectType(privSubjectDesc); } return new HivePrivilegeObject(objectType, dbTable[0], dbTable[1], partSpec, columns, null); }
@Override public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { PrincipalDesc principalDesc = null; PrivilegeObjectDesc privHiveObj = null; ASTNode param = null; if (ast.getChildCount() > 0) { param = (ASTNode) ast.getChild(0); principalDesc = AuthorizationParseUtils.getPrincipalDesc(param); if (principalDesc != null) { param = (ASTNode) ast.getChild(1); // shift one } } if (param != null) { if (param.getType() == HiveParser.TOK_RESOURCE_ALL) { privHiveObj = new PrivilegeObjectDesc(); } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) { privHiveObj = parsePrivObject(param); } } ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj); return TaskFactory.get(new DDLWork(inputs, outputs, showGrant), conf); }
protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException { PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); ASTNode child = (ASTNode) ast.getChild(0); ASTNode gchild = (ASTNode)child.getChild(0); if (child.getType() == HiveParser.TOK_TABLE_TYPE) { subject.setTable(true); String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild); subject.setObject(BaseSemanticAnalyzer.getDotName(qualified)); } else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) { throw new SemanticException("Hive authorization does not support the URI or SERVER objects"); } else { subject.setTable(false); subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText())); } //if partition spec node is present, set partition spec for (int i = 1; i < child.getChildCount(); i++) { gchild = (ASTNode) child.getChild(i); if (gchild.getType() == HiveParser.TOK_PARTSPEC) { subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild)); } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) { subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild)); } } return subject; }
public static void grantUserTable(String privStr, PrivilegeType privType, QueryState queryState, Hive db) throws Exception { DDLWork work = AuthorizationTestUtil.analyze("GRANT " + privStr + " ON TABLE " + TABLE + " TO USER " + USER, queryState, db); GrantDesc grantDesc = work.getGrantDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); //check privileges for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) { Assert.assertEquals(privType, privilege.getPrivilege().getPriv()); } //check other parts for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.USER, principal.getType()); Assert.assertEquals(USER, principal.getName()); } Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable()); Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject()); }
private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast, HashSet<WriteEntity> outputs) throws SemanticException { PrivilegeObjectDesc subject = parsePrivObject(ast); if (subject.getTable()) { Table tbl = getTable(subject.getObject()); if (subject.getPartSpec() != null) { Partition part = getPartition(tbl, subject.getPartSpec()); outputs.add(new WriteEntity(part, WriteEntity.WriteType.DDL_NO_LOCK)); } else { outputs.add(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK)); } } return subject; }
ASTNode child = (ASTNode) ast.getChild(1); if (child.getToken().getType() == HiveParser.TOK_PRIV_OBJECT_COL) { privHiveObj = new PrivilegeObjectDesc(); privHiveObj.setObject(unescapeIdentifier(child.getChild(0).getText())); if (child.getChildCount() > 1) { for (int i = 1; i < child.getChildCount(); i++) { ASTNode grandChild = (ASTNode) child.getChild(i); if (grandChild.getToken().getType() == HiveParser.TOK_PARTSPEC) { privHiveObj.setPartSpec(DDLSemanticAnalyzer.getPartSpec(grandChild)); } else if (grandChild.getToken().getType() == HiveParser.TOK_TABCOLNAME) { cols = getColumnNames((ASTNode) grandChild); } else { privHiveObj.setTable(child.getChild(i) != null);