public List<RexNode> getValuesInOrder(List<RexNodeRef> columns) throws SemanticException { List<RexNode> ret = new ArrayList<>(); for (RexNodeRef rexInputRef : columns) { Constraint constraint = constraints.get(rexInputRef); if (constraint == null) { throw new SemanticException("Unable to find constraint which was earlier added."); } ret.add(constraint.exprNode); } return ret; } }
private String buildTriggerExpression(ASTNode ast) throws SemanticException { if (ast.getType() != HiveParser.TOK_TRIGGER_EXPRESSION || ast.getChildCount() == 0) { throw new SemanticException("Invalid trigger expression."); } StringBuilder builder = new StringBuilder(); for (int i = 0; i < ast.getChildCount(); ++i) { builder.append(ast.getChild(i).getText()); // Don't strip quotes. builder.append(' '); } builder.deleteCharAt(builder.length() - 1); return builder.toString(); }
public static Operator<?> getSingleParent(Operator<?> current, Operator<?> terminal) throws SemanticException { if (current == terminal) { return null; } List<Operator<?>> parents = current.getParentOperators(); if (parents == null || parents.isEmpty()) { if (terminal != null) { throw new SemanticException("Failed to meet terminal operator"); } return null; } if (parents.size() == 1) { return parents.get(0); } if (terminal != null && parents.contains(terminal)) { return terminal; } throw new SemanticException("Met multiple parent operators"); }
/** * This constructor is used for partitioned CTAS. Basically we pass the name of * partitioned columns, which will all be dynamic partitions since the binding * is done after executing the query in the CTAS. */ public DynamicPartitionCtx(List<String> partColNames, String defaultPartName, int maxParts) throws SemanticException { this.partSpec = new LinkedHashMap<>(); this.spNames = new ArrayList<>(); this.dpNames = new ArrayList<>(); for (String colName : partColNames) { this.partSpec.put(colName, null); this.dpNames.add(colName); } this.numBuckets = 0; this.maxPartsPerNode = maxParts; this.defaultPartName = defaultPartName; this.numDPCols = dpNames.size(); this.numSPCols = spNames.size(); this.spPath = null; String confVal; try { confVal = Hive.get().getMetaConf(ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN.varname); } catch (HiveException e) { throw new SemanticException(e); } this.whiteListPattern = confVal == null || confVal.isEmpty() ? null : Pattern.compile(confVal); }
ASTNode tabColName = (ASTNode)ast.getChild(1); if(ast.getType() == HiveParser.TOK_INSERT_INTO && tabColName != null && tabColName.getType() == HiveParser.TOK_TABCOLNAME) { assert ((ASTNode)col).getType() == HiveParser.Identifier : "expected token " + HiveParser.Identifier + " found " + ((ASTNode)col).getType(); targetColNames.add(((ASTNode)col).getText().toLowerCase()); String fullTableName = getUnescapedName((ASTNode) ast.getChild(0).getChild(0), SessionState.get().getCurrentDatabase()); qbp.setDestSchemaForClause(ctx_1.dest, targetColNames); Set<String> targetColumns = new HashSet<String>(); targetColumns.addAll(targetColNames); if(targetColNames.size() != targetColumns.size()) { throw new SemanticException(generateErrorMessage(tabColName, "Duplicate column name detected in " + fullTableName + " table schema specification")); throw new SemanticException(ex); throw new SemanticException(generateErrorMessage(ast, "Unable to access metadata for table " + fullTableName)); tokPartVal = (ASTNode)n; throw new SemanticException(generateErrorMessage(tabColName, "'" + (targetColumns.size() == 1 ? targetColumns.iterator().next() : targetColumns) + "' in insert schema specification " + (targetColumns.size() == 1 ? "is" : "are") +
/** * Skewed column name and value should match. * * @param skewedColNames * @param skewedColValues * @throws SemanticException */ public static void validateSkewedColNameValueNumberMatch(List<String> skewedColNames, List<List<String>> skewedColValues) throws SemanticException { for (List<String> colValue : skewedColValues) { if (colValue.size() != skewedColNames.size()) { throw new SemanticException( ErrorMsg.SKEWED_TABLE_SKEWED_COL_NAME_VALUE_MISMATCH_3.getMsg() + skewedColNames.size() + " : " + colValue.size()); } } }
public void addPartition(String dbName, String tableName, Map <String, String> partSpec) throws SemanticException { if (dbName == null) { throw new SemanticException("db name can not be null"); } String key = getKey(normalizeIdentifier(dbName), normalizeIdentifier(tableName)); Integer idx = updateMetaDataMap.get(key); if (idx == null) { throw new SemanticException("add partition to metadata map failed as list is not yet set for table : " + key); } updateMetaDataList.get(idx).addPartition(partSpec); }
private String buildTriggerActionExpression(ASTNode ast) throws SemanticException { switch (ast.getType()) { case HiveParser.KW_KILL: return "KILL"; case HiveParser.KW_MOVE: if (ast.getChildCount() != 1) { throw new SemanticException("Invalid move to clause in trigger action."); } String poolPath = poolPath(ast.getChild(0)); return "MOVE TO " + poolPath; default: throw new SemanticException("Unknown token in action clause: " + ast.getType()); } }
private void processQueryWindowClause(WindowingSpec spec, ASTNode node) throws SemanticException { ASTNode nameNode = (ASTNode) node.getChild(0); ASTNode wsNode = (ASTNode) node.getChild(1); if(spec.getWindowSpecs() != null && spec.getWindowSpecs().containsKey(nameNode.getText())){ throw new SemanticException(generateErrorMessage(nameNode, "Duplicate definition of window " + nameNode.getText() + " is not allowed")); } WindowSpec ws = processWindowSpec(wsNode); spec.addWindowSpec(nameNode.getText(), ws); }
public static VarcharTypeInfo getVarcharTypeInfo(ASTNode node) throws SemanticException { if (node.getChildCount() != 1) { throw new SemanticException("Bad params for type varchar"); } String lengthStr = node.getChild(0).getText(); return TypeInfoFactory.getVarcharTypeInfo(Integer.parseInt(lengthStr)); }
public static void writeOutput(List<String> values, Path outputFile, HiveConf hiveConf) throws SemanticException { DataOutputStream outStream = null; try { FileSystem fs = outputFile.getFileSystem(hiveConf); outStream = fs.create(outputFile); outStream.writeBytes((values.get(0) == null ? Utilities.nullStringOutput : values.get(0))); for (int i = 1; i < values.size(); i++) { outStream.write(Utilities.tabCode); outStream.writeBytes((values.get(i) == null ? Utilities.nullStringOutput : values.get(i))); } outStream.write(Utilities.newLineCode); } catch (IOException e) { throw new SemanticException(e); } finally { IOUtils.closeStream(outStream); } }
public DynamicPartitionCtx(Map<String, String> partSpec, String defaultPartName, int maxParts) throws SemanticException { this.partSpec = partSpec; this.spNames = new ArrayList<String>(); this.dpNames = new ArrayList<String>(); this.numBuckets = 0; this.maxPartsPerNode = maxParts; this.defaultPartName = defaultPartName; for (Map.Entry<String, String> me: partSpec.entrySet()) { if (me.getValue() == null) { dpNames.add(me.getKey()); } else { spNames.add(me.getKey()); } } this.numDPCols = dpNames.size(); this.numSPCols = spNames.size(); if (this.numSPCols > 0) { this.spPath = Warehouse.makeDynamicPartName(partSpec); } else { this.spPath = null; } String confVal; try { confVal = Hive.get().getMetaConf(ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN.varname); } catch (HiveException e) { throw new SemanticException(e); } this.whiteListPattern = confVal == null || confVal.isEmpty() ? null : Pattern.compile(confVal); }
/** * Skewed column name and value should match. * * @param skewedColNames * @param skewedColValues * @throws SemanticException */ public static void validateSkewedColNameValueNumberMatch(List<String> skewedColNames, List<List<String>> skewedColValues) throws SemanticException { for (List<String> colValue : skewedColValues) { if (colValue.size() != skewedColNames.size()) { throw new SemanticException( ErrorMsg.SKEWED_TABLE_SKEWED_COL_NAME_VALUE_MISMATCH_3.getMsg() + skewedColNames.size() + " : " + colValue.size()); } } }
@Override public List<String> partitions(ImportTableDesc tblDesc) throws SemanticException { List<String> partitions = new ArrayList<>(); try { for (Partition partition : metadata.getPartitions()) { String partName = Warehouse.makePartName(tblDesc.getPartCols(), partition.getValues()); partitions.add(partName); } } catch (MetaException e) { throw new SemanticException(e); } return partitions; }
protected RexNode convert(ExprNodeColumnDesc col) throws SemanticException { //if this is co-rrelated we need to make RexCorrelVariable(with id and type) // id and type should be retrieved from outerRR InputCtx ic = getInputCtx(col); if(ic == null) { // we have correlated column, build data type from outer rr RelDataType rowType = TypeConverter.getType(cluster, this.outerRR, null); if (this.outerNameToPosMap.get(col.getColumn()) == null) { throw new SemanticException(ErrorMsg.INVALID_COLUMN_NAME.getMsg(col.getColumn())); } int pos = this.outerNameToPosMap.get(col.getColumn()); CorrelationId colCorr = new CorrelationId(this.correlatedId); RexNode corExpr = cluster.getRexBuilder().makeCorrel(rowType, colCorr); return cluster.getRexBuilder().makeFieldAccess(corExpr, pos); } int pos = ic.hiveNameToPosMap.get(col.getColumn()); return cluster.getRexBuilder().makeInputRef( ic.calciteInpDataType.getFieldList().get(pos).getType(), pos + ic.offsetInCalciteSchema); }
private static void validateCheckExprAST(ASTNode checkExpr) throws SemanticException { if(checkExpr == null) { return; } if(checkExpr.getType() == HiveParser.TOK_SUBQUERY_EXPR) { throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Subqueries are not allowed " + "in Check Constraints")); } for(int i=0; i<checkExpr.getChildCount(); i++) { validateCheckExprAST((ASTNode)checkExpr.getChild(i)); } } // recursively go through expression and make sure the following:
public static CharTypeInfo getCharTypeInfo(ASTNode node) throws SemanticException { if (node.getChildCount() != 1) { throw new SemanticException("Bad params for type char"); } String lengthStr = node.getChild(0).getText(); return TypeInfoFactory.getCharTypeInfo(Integer.parseInt(lengthStr)); }
public static Operator<?> getSingleParent(Operator<?> current, Operator<?> terminal) throws SemanticException { if (current == terminal) { return null; } List<Operator<?>> parents = current.getParentOperators(); if (parents == null || parents.isEmpty()) { if (terminal != null) { throw new SemanticException("Failed to meet terminal operator"); } return null; } if (parents.size() == 1) { return parents.get(0); } if (terminal != null && parents.contains(terminal)) { return terminal; } throw new SemanticException("Met multiple parent operators"); }
public DynamicPartitionCtx(Table tbl, Map<String, String> partSpec, String defaultPartName, int maxParts) throws SemanticException { this.partSpec = partSpec; this.spNames = new ArrayList<String>(); this.dpNames = new ArrayList<String>(); this.numBuckets = 0; this.maxPartsPerNode = maxParts; this.defaultPartName = defaultPartName; for (Map.Entry<String, String> me: partSpec.entrySet()) { if (me.getValue() == null) { dpNames.add(me.getKey()); } else { spNames.add(me.getKey()); } } this.numDPCols = dpNames.size(); this.numSPCols = spNames.size(); if (this.numSPCols > 0) { this.spPath = Warehouse.makeDynamicPartName(partSpec); } else { this.spPath = null; } String confVal; try { confVal = Hive.get().getMetaConf(ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN.varname); } catch (HiveException e) { throw new SemanticException(e); } this.whiteListPattern = confVal == null || confVal.isEmpty() ? null : Pattern.compile(confVal); }
/** * GB-RS-GB1-RS-GB2 * * @param inputOpAf * @param aggRel * @param gbInfo * @return * @throws SemanticException */ private static OpAttr genMapSideGBSkewGBKeysOrDistUDAFPresent(OpAttr inputOpAf, HiveAggregate aggRel, GBInfo gbInfo) throws SemanticException { // 1. Sanity check if (gbInfo.grpSetRqrAdditionalMRJob) { String errorMsg = "The number of rows per input row due to grouping sets is " + gbInfo.grpSets.size(); throw new SemanticException( ErrorMsg.HIVE_GROUPING_SETS_THRESHOLD_NOT_ALLOWED_WITH_SKEW.getMsg(errorMsg)); } // 2. Gen GB-RS-GB-RS-GB pipeline return genGBRSGBRSGBOpPipeLine(inputOpAf, aggRel, gbInfo); }