private void assertCombineInputFormat(Tree numerator, String message) throws SemanticException { String inputFormat = conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez") ? HiveConf.getVar(conf, HiveConf.ConfVars.HIVETEZINPUTFORMAT): HiveConf.getVar(conf, HiveConf.ConfVars.HIVEINPUTFORMAT); if (!inputFormat.equals(CombineHiveInputFormat.class.getName())) { throw new SemanticException(generateErrorMessage((ASTNode) numerator, message + " sampling is not supported in " + inputFormat)); } }
private void assertCombineInputFormat(Tree numerator, String message) throws SemanticException { String inputFormat = conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez") ? HiveConf.getVar(conf, HiveConf.ConfVars.HIVETEZINPUTFORMAT): HiveConf.getVar(conf, HiveConf.ConfVars.HIVEINPUTFORMAT); if (!inputFormat.equals(CombineHiveInputFormat.class.getName())) { throw new SemanticException(generateErrorMessage((ASTNode) numerator, message + " sampling is not supported in " + inputFormat)); } }
Table target = qb.getMetaData().getDestTableForAlias(dest); Partition partition = target == null ? qb.getMetaData().getDestPartitionForAlias(dest) : null; throw new SemanticException(generateErrorMessage(selExprList, "Expected " + targetTableSchema.size() + " columns for " + dest + (target != null ? "/" + target.getCompleteName() : (partition != null ? "/" + partition.getCompleteName() : "")) + Partition partition = target == null ? qb.getMetaData().getDestPartitionForAlias(dest) : null; if(target == null && partition == null) { throw new SemanticException(generateErrorMessage(selExprList, "No table/partition found in QB metadata for dest='" + dest + "'"));
if (pos == null) { throw new SemanticException( generateErrorMessage((ASTNode) child.getChild(j), ErrorMsg.HIVE_GROUPING_SETS_EXPR_NOT_IN_GROUPBY.getErrorCodedMsg()));
private void processQueryWindowClause(WindowingSpec spec, ASTNode node) throws SemanticException { ASTNode nameNode = (ASTNode) node.getChild(0); ASTNode wsNode = (ASTNode) node.getChild(1); if(spec.getWindowSpecs() != null && spec.getWindowSpecs().containsKey(nameNode.getText())){ throw new SemanticException(generateErrorMessage(nameNode, "Duplicate definition of window " + nameNode.getText() + " is not allowed")); } WindowSpec ws = processWindowSpec(wsNode); spec.addWindowSpec(nameNode.getText(), ws); }
if (pos == null) { throw new SemanticException( generateErrorMessage((ASTNode) child.getChild(j), ErrorMsg.HIVE_GROUPING_SETS_EXPR_NOT_IN_GROUPBY.getErrorCodedMsg()));
private void processQueryWindowClause(WindowingSpec spec, ASTNode node) throws SemanticException { ASTNode nameNode = (ASTNode) node.getChild(0); ASTNode wsNode = (ASTNode) node.getChild(1); if(spec.getWindowSpecs() != null && spec.getWindowSpecs().containsKey(nameNode.getText())){ throw new SemanticException(generateErrorMessage(nameNode, "Duplicate definition of window " + nameNode.getText() + " is not allowed")); } WindowSpec ws = processWindowSpec(wsNode); spec.addWindowSpec(nameNode.getText(), ws); }
private DynamicPartitionCtx checkDynPart(QB qb, QBMetaData qbm, Table dest_tab, Map<String, String> partSpec, String dest) throws SemanticException { List<FieldSchema> parts = dest_tab.getPartitionKeys(); if (parts == null || parts.isEmpty()) { return null; // table is not partitioned } if (partSpec == null || partSpec.size() == 0) { // user did NOT specify partition throw new SemanticException(generateErrorMessage(qb.getParseInfo().getDestForClause(dest), ErrorMsg.NEED_PARTITION_ERROR.getMsg())); } DynamicPartitionCtx dpCtx = qbm.getDPCtx(dest); if (dpCtx == null) { dest_tab.validatePartColumnNames(partSpec, false); dpCtx = new DynamicPartitionCtx(partSpec, conf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME), conf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTSPERNODE)); qbm.setDPCtx(dest, dpCtx); } if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING)) { // allow DP throw new SemanticException(generateErrorMessage(qb.getParseInfo().getDestForClause(dest), ErrorMsg.DYNAMIC_PARTITION_DISABLED.getMsg())); } if ((dest_tab.getNumBuckets() > 0)) { dpCtx.setNumBuckets(dest_tab.getNumBuckets()); } return dpCtx; }
tblDesc.getProperties().setProperty(serdeConstants.LINE_DELIM, lineDelim); if (!lineDelim.equals("\n") && !lineDelim.equals("10")) { throw new SemanticException(generateErrorMessage(rowChild, ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg()));
private PTFInputSpec processPTFSource(QB qb, ASTNode inputNode) throws SemanticException{ PTFInputSpec qInSpec = null; int type = inputNode.getType(); String alias; switch(type) { case HiveParser.TOK_TABREF: alias = processTable(qb, inputNode); qInSpec = new PTFQueryInputSpec(); ((PTFQueryInputSpec)qInSpec).setType(PTFQueryInputType.TABLE); ((PTFQueryInputSpec)qInSpec).setSource(alias); break; case HiveParser.TOK_SUBQUERY: alias = processSubQuery(qb, inputNode); qInSpec = new PTFQueryInputSpec(); ((PTFQueryInputSpec)qInSpec).setType(PTFQueryInputType.SUBQUERY); ((PTFQueryInputSpec)qInSpec).setSource(alias); break; case HiveParser.TOK_PTBLFUNCTION: qInSpec = processPTFChain(qb, inputNode); break; default: throw new SemanticException(generateErrorMessage(inputNode, "Unknown input type to PTF")); } qInSpec.setAstNode(inputNode); return qInSpec; }
if (!lineDelim.equals("\n") && !lineDelim.equals("10")) { throw new SemanticException(SemanticAnalyzer.generateErrorMessage(rowChild, ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg()));
private PTFInputSpec processPTFSource(QB qb, ASTNode inputNode) throws SemanticException{ PTFInputSpec qInSpec = null; int type = inputNode.getType(); String alias; switch(type) { case HiveParser.TOK_TABREF: alias = processTable(qb, inputNode); qInSpec = new PTFQueryInputSpec(); ((PTFQueryInputSpec)qInSpec).setType(PTFQueryInputType.TABLE); ((PTFQueryInputSpec)qInSpec).setSource(alias); break; case HiveParser.TOK_SUBQUERY: alias = processSubQuery(qb, inputNode); qInSpec = new PTFQueryInputSpec(); ((PTFQueryInputSpec)qInSpec).setType(PTFQueryInputType.SUBQUERY); ((PTFQueryInputSpec)qInSpec).setSource(alias); break; case HiveParser.TOK_PTBLFUNCTION: qInSpec = processPTFChain(qb, inputNode); break; default: throw new SemanticException(generateErrorMessage(inputNode, "Unknown input type to PTF")); } qInSpec.setAstNode(inputNode); return qInSpec; }
if (!lineDelim.equals("\n") && !lineDelim.equals("10")) { throw new SemanticException(SemanticAnalyzer.generateErrorMessage(rowChild, ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg()));
public static SubQueryType get(ASTNode opNode) throws SemanticException { if(opNode == null) { return SCALAR; } switch(opNode.getType()) { // opNode's type is always either KW_EXISTS or KW_IN never NOTEXISTS or NOTIN // to figure this out we need to check it's grand parent's parent case HiveParser.KW_EXISTS: if(opNode.getParent().getParent().getParent() != null && opNode.getParent().getParent().getParent().getType() == HiveParser.KW_NOT) { return NOT_EXISTS; } return EXISTS; case HiveParser.TOK_SUBQUERY_OP_NOTEXISTS: return NOT_EXISTS; case HiveParser.KW_IN: if(opNode.getParent().getParent().getParent() != null && opNode.getParent().getParent().getParent().getType() == HiveParser.KW_NOT) { return NOT_IN; } return IN; case HiveParser.TOK_SUBQUERY_OP_NOTIN: return NOT_IN; default: throw new SemanticException(SemanticAnalyzer.generateErrorMessage(opNode, "Operator not supported in SubQuery use.")); } } }
if ((numChildren != 2) && (numChildren != 3) && join.getToken().getType() != HiveParser.TOK_UNIQUEJOIN) { throw new SemanticException(generateErrorMessage(join, "Join with multiple children")); ptfInvocationSpec.getFunction().getAlias();; if ( inputAlias == null ) { throw new SemanticException(generateErrorMessage(child, "PTF invocation in a Join must have an alias"));
public static SubQueryType get(ASTNode opNode) throws SemanticException { if(opNode == null) { return SCALAR; } switch(opNode.getType()) { // opNode's type is always either KW_EXISTS or KW_IN never NOTEXISTS or NOTIN // to figure this out we need to check it's grand parent's parent case HiveParser.KW_EXISTS: if(opNode.getParent().getParent().getParent() != null && opNode.getParent().getParent().getParent().getType() == HiveParser.KW_NOT) { return NOT_EXISTS; } return EXISTS; case HiveParser.TOK_SUBQUERY_OP_NOTEXISTS: return NOT_EXISTS; case HiveParser.KW_IN: if(opNode.getParent().getParent().getParent() != null && opNode.getParent().getParent().getParent().getType() == HiveParser.KW_NOT) { return NOT_IN; } return IN; case HiveParser.TOK_SUBQUERY_OP_NOTIN: return NOT_IN; default: throw new SemanticException(SemanticAnalyzer.generateErrorMessage(opNode, "Operator not supported in SubQuery use.")); } } }
throw new CalciteSubquerySemanticException(SemanticAnalyzer.generateErrorMessage(sqNode, ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg("Currently SubQuery expressions are only allowed as " + "Where and Having Clause predicates")));
if ((numChildren != 2) && (numChildren != 3) && join.getToken().getType() != HiveParser.TOK_UNIQUEJOIN) { throw new SemanticException(generateErrorMessage(join, "Join with multiple children")); ptfInvocationSpec.getFunction().getAlias();; if ( inputAlias == null ) { throw new SemanticException(generateErrorMessage(child, "PTF invocation in a Join must have an alias"));
int child_count = ptf.getChildCount(); if (child_count < 2) { throw new SemanticException(generateErrorMessage(ptf, "Not enough Children " + child_count));
int child_count = ptf.getChildCount(); if (child_count < 2) { throw new SemanticException(generateErrorMessage(ptf, "Not enough Children " + child_count));