@Override public String toString() { String reason; switch (vectorizerNodeIssue) { case NODE_ISSUE: reason = (issue == null ? "unknown" : issue); break; case OPERATOR_ISSUE: reason = (operator == null ? "Unknown" : operator.getType()) + " operator: " + (issue == null ? "unknown" : issue); break; case EXPRESSION_ISSUE: reason = expressionTitle + " expression for " + (operator == null ? "Unknown" : operator.getType()) + " operator: " + (issue == null ? "unknown" : issue); break; default: reason = "Unknown " + vectorizerNodeIssue; } return reason; } }
@Override public String toString() { String reason; switch (vectorizerNodeIssue) { case NODE_ISSUE: reason = (issue == null ? "unknown" : issue); break; case OPERATOR_ISSUE: reason = (operator == null ? "Unknown" : operator.getType()) + " operator: " + (issue == null ? "unknown" : issue); break; case EXPRESSION_ISSUE: reason = expressionTitle + " expression for " + (operator == null ? "Unknown" : operator.getType()) + " operator: " + (issue == null ? "unknown" : issue); break; default: reason = "Unknown " + vectorizerNodeIssue; } return reason; } }
private void setOperatorNotSupported(Operator<? extends OperatorDesc> op) { OperatorDesc desc = op.getConf(); Annotation note = AnnotationUtils.getAnnotation(desc.getClass(), Explain.class); if (note != null) { Explain explainNote = (Explain) note; setNodeIssue(explainNote.displayName() + " (" + op.getType() + ") not supported"); } else { setNodeIssue("Operator " + op.getType() + " not supported"); } }
private void setOperatorNotSupported(Operator<? extends OperatorDesc> op) { OperatorDesc desc = op.getConf(); Annotation note = AnnotationUtils.getAnnotation(desc.getClass(), Explain.class); if (note != null) { Explain explainNote = (Explain) note; setNodeIssue(explainNote.displayName() + " (" + op.getType() + ") not supported"); } else { setNodeIssue("Operator " + op.getType() + " not supported"); } }
/** * Creates job feature list: consists of a tasktag and a set of operators * * @param ops * @param taskTagId * @return */ private String[] getFeatures(List<Operator<?>> ops, int taskTagId) { if (ops == null) { return EMPTY_ARR; } Set<String> features = Sets.newHashSet(); for (Operator<?> op : ops) { OperatorType opType = op.getType(); // some operators are discarded if (!skipType(opType)) { features.add(opType.toString()); } } // if taskTag is other than 'NO_TAG', include it in the feature list if (taskTagId == Task.NO_TAG) { return features.toArray(new String[features.size()]); } String[] result = features.toArray(new String[features.size() + 1]); result[result.length - 1] = TaskTag.get(taskTagId); return result; }
private Boolean isVectorizedGroupByThatOutputsRows(Operator<? extends OperatorDesc> op) throws SemanticException { if (op.getType().equals(OperatorType.GROUPBY)) { GroupByDesc desc = (GroupByDesc) op.getConf(); return !((VectorGroupByDesc) desc.getVectorDesc()).isVectorOutput(); } return false; }
new org.apache.hadoop.hive.ql.plan.api.Operator(); operator.setOperatorId(op.getOperatorId()); operator.setOperatorType(op.getType()); task.addToOperatorList(operator);
new org.apache.hadoop.hive.ql.plan.api.Operator(); operator.setOperatorId(op.getOperatorId()); operator.setOperatorType(op.getType()); task.addToOperatorList(operator);
} else { throw new SemanticException("View " + alias + " is corresponding to " + operator.getType().name() + ", rather than a SelectOperator.");
boolean validateReduceWorkOperator(Operator<? extends OperatorDesc> op) { boolean ret; switch (op.getType()) { case MAPJOIN: ret = validateGroupByOperator((GroupByOperator) op, true, true); } else { setNodeIssue("Operator " + op.getType() + " not enabled (" + HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCE_GROUPBY_ENABLED.name() + "=true IS false)"); ret = false;
} else { throw new SemanticException("View " + alias + " is corresponding to " + operator.getType().name() + ", rather than a SelectOperator.");
boolean validateMapWorkOperator(Operator<? extends OperatorDesc> op, MapWork mWork, boolean isTezOrSpark) { boolean ret; switch (op.getType()) { case MAPJOIN: if (op instanceof MapJoinOperator) {
switch (op.getType()) { case TABLESCAN: vectorOp = vectorizeTableScanOperator(op, vContext);
switch (op.getType()) { case MAPJOIN:
public Boolean nonVectorizableChildOfGroupBy(Operator<? extends OperatorDesc> op) { Operator<? extends OperatorDesc> currentOp = op; while (currentOp.getParentOperators().size() > 0) { currentOp = currentOp.getParentOperators().get(0); if (currentOp.getType().equals(OperatorType.GROUPBY)) { GroupByDesc desc = (GroupByDesc)currentOp.getConf(); boolean isVectorOutput = desc.getVectorDesc().isVectorOutput(); if (isVectorOutput) { // This GROUP BY does vectorize its output. return false; } return true; } } return false; }
new org.apache.hadoop.hive.ql.plan.api.Operator(); operator.setOperatorId(op.getOperatorId()); operator.setOperatorType(op.getType()); task.addToOperatorList(operator);
new org.apache.hadoop.hive.ql.plan.api.Operator(); operator.setOperatorId(op.getOperatorId()); operator.setOperatorType(op.getType()); task.addToOperatorList(operator);
Operator<? extends OperatorDesc> vectorOp = null; switch (op.getType()) { case MAPJOIN:
boolean validateMapWorkOperator(Operator<? extends OperatorDesc> op, MapWork mWork, boolean isTez) { boolean ret = false; switch (op.getType()) { case MAPJOIN: if (op instanceof MapJoinOperator) {
boolean validateReduceWorkOperator(Operator<? extends OperatorDesc> op) { boolean ret = false; switch (op.getType()) { case MAPJOIN: