private Boolean isVectorizedGroupByThatOutputsRows(Operator<? extends OperatorDesc> op) throws SemanticException { if (op.getType().equals(OperatorType.GROUPBY)) { GroupByDesc desc = (GroupByDesc) op.getConf(); return !((VectorGroupByDesc) desc.getVectorDesc()).isVectorOutput(); } return false; }
case 2: // OPERATOR_TYPE if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.operatorType = org.apache.hadoop.hive.ql.plan.api.OperatorType.findByValue(iprot.readI32()); struct.setOperatorTypeIsSet(true); } else {
@Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_operatorId = true && (isSetOperatorId()); list.add(present_operatorId); if (present_operatorId) list.add(operatorId); boolean present_operatorType = true && (isSetOperatorType()); list.add(present_operatorType); if (present_operatorType) list.add(operatorType.getValue()); boolean present_operatorAttributes = true && (isSetOperatorAttributes()); list.add(present_operatorAttributes); if (present_operatorAttributes) list.add(operatorAttributes); boolean present_operatorCounters = true && (isSetOperatorCounters()); list.add(present_operatorCounters); if (present_operatorCounters) list.add(operatorCounters); boolean present_done = true; list.add(present_done); if (present_done) list.add(done); boolean present_started = true; list.add(present_started); if (present_started) list.add(started); return list.hashCode(); }
/** * Creates job feature list: consists of a tasktag and a set of operators * * @param ops * @param taskTagId * @return */ private String[] getFeatures(List<Operator<?>> ops, int taskTagId) { if (ops == null) { return EMPTY_ARR; } Set<String> features = Sets.newHashSet(); for (Operator<?> op : ops) { OperatorType opType = op.getType(); // some operators are discarded if (!skipType(opType)) { features.add(opType.toString()); } } // if taskTag is other than 'NO_TAG', include it in the feature list if (taskTagId == Task.NO_TAG) { return features.toArray(new String[features.size()]); } String[] result = features.toArray(new String[features.size() + 1]); result[result.length - 1] = TaskTag.get(taskTagId); return result; }
} else { throw new SemanticException("View " + alias + " is corresponding to " + operator.getType().name() + ", rather than a SelectOperator.");
String beforeEnum = ""; for(int index = 0; index < keywords.length; index++) {* bool isEnum = false; for(OperatorType opr : OperatorType.values()) { if(keywords[index].equals(opr.toString())) { isEnum = true; } } if(isEnum){ if(!beforeEnum.equals("")){ newArray.add(beforeEnum); } newArray.add(keywords[index]); } else{ beforeEnum = beforeEnum + " " + keywords[index]; } }
} else { throw new SemanticException("View " + alias + " is corresponding to " + operator.getType().name() + ", rather than a SelectOperator.");
@Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_operatorId = true && (isSetOperatorId()); list.add(present_operatorId); if (present_operatorId) list.add(operatorId); boolean present_operatorType = true && (isSetOperatorType()); list.add(present_operatorType); if (present_operatorType) list.add(operatorType.getValue()); boolean present_operatorAttributes = true && (isSetOperatorAttributes()); list.add(present_operatorAttributes); if (present_operatorAttributes) list.add(operatorAttributes); boolean present_operatorCounters = true && (isSetOperatorCounters()); list.add(present_operatorCounters); if (present_operatorCounters) list.add(operatorCounters); boolean present_done = true; list.add(present_done); if (present_done) list.add(done); boolean present_started = true; list.add(present_started); if (present_started) list.add(started); return list.hashCode(); }
case 2: // OPERATOR_TYPE if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.operatorType = org.apache.hadoop.hive.ql.plan.api.OperatorType.findByValue(iprot.readI32()); struct.setOperatorTypeIsSet(true); } else {
if (!(this_present_operatorType && that_present_operatorType)) return false; if (!this.operatorType.equals(that.operatorType)) return false;
oprot.writeI32(struct.operatorType.getValue()); oprot.writeFieldEnd();
struct.operatorType = org.apache.hadoop.hive.ql.plan.api.OperatorType.findByValue(iprot.readI32()); struct.setOperatorTypeIsSet(true);
if (!(this_present_operatorType && that_present_operatorType)) return false; if (!this.operatorType.equals(that.operatorType)) return false;
oprot.writeI32(struct.operatorType.getValue()); oprot.writeFieldEnd();
struct.operatorType = org.apache.hadoop.hive.ql.plan.api.OperatorType.findByValue(iprot.readI32()); struct.setOperatorTypeIsSet(true);
public Boolean nonVectorizableChildOfGroupBy(Operator<? extends OperatorDesc> op) { Operator<? extends OperatorDesc> currentOp = op; while (currentOp.getParentOperators().size() > 0) { currentOp = currentOp.getParentOperators().get(0); if (currentOp.getType().equals(OperatorType.GROUPBY)) { GroupByDesc desc = (GroupByDesc)currentOp.getConf(); boolean isVectorOutput = desc.getVectorDesc().isVectorOutput(); if (isVectorOutput) { // This GROUP BY does vectorize its output. return false; } return true; } } return false; }
oprot.writeI32(struct.operatorType.getValue());
case 2: // OPERATOR_TYPE if (field.type == TType.I32) { this.operatorType = OperatorType.findByValue(iprot.readI32()); } else { TProtocolUtil.skip(iprot, field.type);
if (!(this_present_operatorType && that_present_operatorType)) return false; if (!this.operatorType.equals(that.operatorType)) return false;
oprot.writeI32(struct.operatorType.getValue());