@Override public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { PrincipalDesc principalDesc = null; PrivilegeObjectDesc privHiveObj = null; ASTNode param = null; if (ast.getChildCount() > 0) { param = (ASTNode) ast.getChild(0); principalDesc = AuthorizationParseUtils.getPrincipalDesc(param); if (principalDesc != null) { param = (ASTNode) ast.getChild(1); // shift one } } if (param != null) { if (param.getType() == HiveParser.TOK_RESOURCE_ALL) { privHiveObj = new PrivilegeObjectDesc(); } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) { privHiveObj = parsePrivObject(param); } } ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj); return TaskFactory.get(new DDLWork(inputs, outputs, showGrant)); }
@Override public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { PrincipalDesc principalDesc = null; PrivilegeObjectDesc privHiveObj = null; ASTNode param = null; if (ast.getChildCount() > 0) { param = (ASTNode) ast.getChild(0); principalDesc = AuthorizationParseUtils.getPrincipalDesc(param); if (principalDesc != null) { param = (ASTNode) ast.getChild(1); // shift one } } if (param != null) { if (param.getType() == HiveParser.TOK_RESOURCE_ALL) { privHiveObj = new PrivilegeObjectDesc(); } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) { privHiveObj = parsePrivObject(param); } } ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj); return TaskFactory.get(new DDLWork(inputs, outputs, showGrant), conf); }
protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException { PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); ASTNode child = (ASTNode) ast.getChild(0); ASTNode gchild = (ASTNode)child.getChild(0); if (child.getType() == HiveParser.TOK_TABLE_TYPE) { subject.setTable(true); String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild); subject.setObject(BaseSemanticAnalyzer.getDotName(qualified)); } else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) { throw new SemanticException("Hive authorization does not support the URI or SERVER objects"); } else { subject.setTable(false); subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText())); } //if partition spec node is present, set partition spec for (int i = 1; i < child.getChildCount(); i++) { gchild = (ASTNode) child.getChild(i); if (gchild.getType() == HiveParser.TOK_PARTSPEC) { subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild)); } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) { subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild)); } } return subject; }
protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException { PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); ASTNode child = (ASTNode) ast.getChild(0); ASTNode gchild = (ASTNode)child.getChild(0); if (child.getType() == HiveParser.TOK_TABLE_TYPE) { subject.setTable(true); String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild); subject.setObject(BaseSemanticAnalyzer.getDotName(qualified)); } else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) { throw new SemanticException("Hive authorization does not support the URI or SERVER objects"); } else { subject.setTable(false); subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText())); } //if partition spec node is present, set partition spec for (int i = 1; i < child.getChildCount(); i++) { gchild = (ASTNode) child.getChild(i); if (gchild.getType() == HiveParser.TOK_PARTSPEC) { subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild)); } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) { subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild)); } } return subject; }
@Override public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { PrincipalDesc principalDesc = null; PrivilegeObjectDesc privHiveObj = null; ASTNode param = null; if (ast.getChildCount() > 0) { param = (ASTNode) ast.getChild(0); principalDesc = AuthorizationParseUtils.getPrincipalDesc(param); if (principalDesc != null) { param = (ASTNode) ast.getChild(1); // shift one } } if (param != null) { if (param.getType() == HiveParser.TOK_RESOURCE_ALL) { privHiveObj = new PrivilegeObjectDesc(); } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) { privHiveObj = parsePrivObject(param); } } ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj); return TaskFactory.get(new DDLWork(inputs, outputs, showGrant), conf); }
private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast, HashSet<WriteEntity> outputs) throws SemanticException { PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); subject.setObject(unescapeIdentifier(ast.getChild(0).getText())); if (ast.getChildCount() > 1) { for (int i =0;i< ast.getChildCount();i++) { ASTNode astChild = (ASTNode) ast.getChild(i); if (astChild.getToken().getType() == HiveParser.TOK_PARTSPEC) { subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(astChild)); } else { subject.setTable(ast.getChild(0) != null); } } } try { if (subject.getTable()) { Table tbl = db.getTable(subject.getObject()); if (subject.getPartSpec() != null) { Partition part = db.getPartition(tbl, subject.getPartSpec(), false); outputs.add(new WriteEntity(part)); } else { outputs.add(new WriteEntity(tbl)); } } } catch (HiveException e) { throw new SemanticException(e); } return subject; }
ASTNode child = (ASTNode) ast.getChild(1); if (child.getToken().getType() == HiveParser.TOK_PRIV_OBJECT_COL) { privHiveObj = new PrivilegeObjectDesc(); privHiveObj.setObject(unescapeIdentifier(child.getChild(0).getText())); if (child.getChildCount() > 1) {
protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException { PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); ASTNode child = (ASTNode) ast.getChild(0); ASTNode gchild = (ASTNode)child.getChild(0); if (child.getType() == HiveParser.TOK_TABLE_TYPE) { subject.setTable(true); String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild); subject.setObject(BaseSemanticAnalyzer.getDotName(qualified)); } else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) { throw new SemanticException("Hive authorization does not support the URI or SERVER objects"); } else { subject.setTable(false); subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText())); } //if partition spec node is present, set partition spec for (int i = 1; i < child.getChildCount(); i++) { gchild = (ASTNode) child.getChild(i); if (gchild.getType() == HiveParser.TOK_PARTSPEC) { subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild)); } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) { subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild)); } } return subject; }