/** * Create a {@link FieldReference} given an unquoted name. (Note: the * name here is a misnomer: the name may have been quoted in SQL, but * must be unquoted when passed in here.) * * @param safeString the unquoted field reference * @return the field reference expression */ public static FieldReference getWithQuotedRef(CharSequence safeString) { return new FieldReference(safeString, ExpressionPosition.UNKNOWN, false); }
@Override public FieldReference deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { String ref = this._parseString(jp, ctxt); ref = ref.replace("`", ""); return new FieldReference(ref, ExpressionPosition.UNKNOWN, true); } }
/** * Create a {@link FieldReference} given an unquoted name. (Note: the * name here is a misnomer: the name may have been quoted in SQL, but * must be unquoted when passed in here.) * * @param safeString the unquoted field reference * @return the field reference expression */ public static FieldReference getWithQuotedRef(CharSequence safeString) { return new FieldReference(safeString, ExpressionPosition.UNKNOWN, false); }
@JsonIgnore @Override public FieldReference getPartitionFieldRef() { return new FieldReference(RANGE_PARTITION_EXPR_NAME); }
@Override public FieldReference deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { String ref = this._parseString(jp, ctxt); ref = ref.replace("`", ""); return new FieldReference(ref, ExpressionPosition.UNKNOWN, true); } }
/** * Create a distribution hash expression. * * @param fields Distribution fields * @param rowType Row type * @return */ public static LogicalExpression getHashExpression(List<DistributionField> fields, RelDataType rowType) { assert fields.size() > 0; final List<String> childFields = rowType.getFieldNames(); // If we already included a field with hash - no need to calculate hash further down if ( childFields.contains(HASH_EXPR_NAME)) { return new FieldReference(HASH_EXPR_NAME); } final List<LogicalExpression> expressions = new ArrayList<LogicalExpression>(childFields.size()); for(int i =0; i < fields.size(); i++){ expressions.add(new FieldReference(childFields.get(fields.get(i).getFieldId()), ExpressionPosition.UNKNOWN)); } final LogicalExpression distSeed = ValueExpressions.getInt(DIST_SEED); return createHashBasedPartitionExpression(expressions, distSeed, HASH_HELPER_LOGICALEXPRESSION); } }
public static LogicalExpression toDrill(AggregateCall call, List<String> fn, DrillImplementor implementor) { List<LogicalExpression> args = Lists.newArrayList(); for(Integer i : call.getArgList()) { args.add(new FieldReference(fn.get(i))); } // for count(1). if (args.isEmpty()) { args.add(new ValueExpressions.LongExpression(1L)); } return FunctionCallFactory.createExpression(call.getAggregation().getName().toLowerCase(), ExpressionPosition.UNKNOWN, args); }
protected LogicalExpression toDrill(AggregateCall call, List<String> fn) { List<LogicalExpression> args = Lists.newArrayList(); for (Integer i : call.getArgList()) { args.add(new FieldReference(fn.get(i))); } // for count(1). if (args.isEmpty()) { args.add(new ValueExpressions.LongExpression(1l)); } LogicalExpression expr = new FunctionCall(call.getAggregation().getName().toLowerCase(), args, ExpressionPosition.UNKNOWN); return expr; } }
private static LogicalOperator rename(DrillImplementor implementor, LogicalOperator inputOp, List<String> inputFields, List<String> outputFields) { Project.Builder builder = Project.builder(); builder.setInput(inputOp); for (Pair<String, String> pair : Pair.zip(inputFields, outputFields)) { builder.addExpr(new FieldReference(pair.right), new FieldReference(pair.left)); } return builder.build(); }
private List<NamedExpression> getExpressionList() { List<NamedExpression> exprs = Lists.newArrayList(); for (MaterializedField field : incoming.getSchema()) { String fieldName = field.getName(); if (fieldName.equals(popConfig.getColumn().getRootSegmentPath())) { continue; } exprs.add(new NamedExpression(SchemaPath.getSimplePath(fieldName), new FieldReference(fieldName))); } return exprs; }
public static List<Ordering> getOrdering(RelCollation collation, RelDataType rowType) { List<Ordering> orderExpr = Lists.newArrayList(); final List<String> childFields = rowType.getFieldNames(); for (RelFieldCollation fc : collation.getFieldCollations()) { FieldReference fr = new FieldReference(childFields.get(fc.getFieldIndex()), ExpressionPosition.UNKNOWN, false); orderExpr.add(new Ordering(fc.getDirection(), fr, fc.nullDirection)); } return orderExpr; }
private TransferPair resetUnnestTransferPair() throws SchemaChangeException { final List<TransferPair> transfers = Lists.newArrayList(); final FieldReference fieldReference = new FieldReference(popConfig.getColumn()); final TransferPair transferPair = getUnnestFieldTransferPair(fieldReference); transfers.add(transferPair); logger.debug("Added transfer for unnest expression."); unnest.close(); unnest.setup(context, incoming, this, transfers); setUnnestVector(); return transferPair; }
protected LogicalExpression toDrill(AggregateCall call, List<String> fn) { DrillParseContext context = new DrillParseContext(PrelUtil.getSettings(getCluster())); List<LogicalExpression> args = Lists.newArrayList(); for (Integer i : call.getArgList()) { final int indexInConstants = i - fn.size(); if (i < fn.size()) { args.add(new FieldReference(fn.get(i))); } else { final RexLiteral constant = constants.get(indexInConstants); LogicalExpression expr = DrillOptiq.toDrill(context, getInput(), constant); args.add(expr); } } // for count(1). if (args.isEmpty()) { args.add(new ValueExpressions.LongExpression(1l)); } return new FunctionCall(call.getAggregation().getName().toLowerCase(), args, ExpressionPosition.UNKNOWN); }
private void setupHashTable() throws SchemaChangeException { final List<Comparator> comparators = Lists.newArrayListWithExpectedSize(conditions.size()); conditions.forEach(cond->comparators.add(JoinUtils.checkAndReturnSupportedJoinComparator(cond))); if ( skipHashTableBuild ) { return; } // Setup the hash table configuration object List<NamedExpression> leftExpr = new ArrayList<>(conditions.size()); // Create named expressions from the conditions for (int i = 0; i < conditions.size(); i++) { leftExpr.add(new NamedExpression(conditions.get(i).getLeft(), new FieldReference("probe_side_" + i))); } // Set the left named expression to be null if the probe batch is empty. if (leftUpstream != IterOutcome.OK_NEW_SCHEMA && leftUpstream != IterOutcome.OK) { leftExpr = null; } else { if (probeBatch.getSchema().getSelectionVectorMode() != BatchSchema.SelectionVectorMode.NONE) { final String errorMsg = new StringBuilder().append("Hash join does not support probe batch with selection vectors. ").append("Probe batch has selection mode = ").append (probeBatch.getSchema().getSelectionVectorMode()).toString(); throw new SchemaChangeException(errorMsg); } } final HashTableConfig htConfig = new HashTableConfig((int) context.getOptions().getOption(ExecConstants.MIN_HASH_TABLE_SIZE), true, HashTable.DEFAULT_LOAD_FACTOR, rightExpr, leftExpr, comparators, joinControl.asInt()); // Create the chained hash table baseHashTable = new ChainedHashTable(htConfig, context, allocator, buildBatch, probeBatch, null); if (enableRuntimeFilter) { setupHash64(htConfig); } }
@Override public LogicalOperator implement(DrillImplementor implementor) { List<String> fields = new ArrayList<>(); fields.addAll(getInput(0).getRowType().getFieldNames()); fields.addAll(getInput(1).getRowType().getFieldNames()); Preconditions.checkArgument(DrillJoinRel.isUnique(fields)); final int leftCount = left.getRowType().getFieldCount(); final List<String> leftFields = fields.subList(0, leftCount); final List<String> rightFields = fields.subList(leftCount, leftCount + right.getRowType().getFieldCount()); final LogicalOperator leftOp = DrillJoinRel.implementInput(implementor, 0, 0, left, this, fields); final LogicalOperator rightOp = DrillJoinRel.implementInput(implementor, 1, leftCount, right, this, fields); Join.Builder builder = Join.builder(); builder.type(joinType); builder.left(leftOp); builder.right(rightOp); List<JoinCondition> conditions = Lists.newArrayList(); for (Pair<Integer, Integer> pair : Pair.zip(leftKeys, rightKeys)) { conditions.add(new JoinCondition(DrillJoinRel.EQUALITY_CONDITION, new FieldReference(leftFields.get(pair.left)), new FieldReference(rightFields.get(pair.right)))); } return new LogicalSemiJoin(leftOp, rightOp, conditions, joinType); }
@Override public LogicalOperator implement(DrillImplementor implementor) { final List<String> fields = getRowType().getFieldNames(); assert isUnique(fields); final int leftCount = left.getRowType().getFieldCount(); final List<String> leftFields = fields.subList(0, leftCount); final List<String> rightFields = fields.subList(leftCount, fields.size()); final LogicalOperator leftOp = implementInput(implementor, 0, 0, left); final LogicalOperator rightOp = implementInput(implementor, 1, leftCount, right); Join.Builder builder = Join.builder(); builder.type(joinType); builder.left(leftOp); builder.right(rightOp); for (Pair<Integer, Integer> pair : Pair.zip(leftKeys, rightKeys)) { builder.addCondition(EQUALITY_CONDITION, new FieldReference(leftFields.get(pair.left)), new FieldReference(rightFields.get(pair.right))); } return builder.build(); }
private List<NamedExpression> getExpressionList() { if (popConfig.getExprs() != null) { return popConfig.getExprs(); } final List<NamedExpression> exprs = Lists.newArrayList(); for (final MaterializedField field : incoming.getSchema()) { String fieldName = field.getName(); if (Types.isComplex(field.getType()) || Types.isRepeated(field.getType())) { final LogicalExpression convertToJson = FunctionCallFactory.createConvert(ConvertExpression.CONVERT_TO, "JSON", SchemaPath.getSimplePath(fieldName), ExpressionPosition.UNKNOWN); final String castFuncName = FunctionReplacementUtils.getCastFunc(MinorType.VARCHAR); final List<LogicalExpression> castArgs = Lists.newArrayList(); castArgs.add(convertToJson); //input_expr // implicitly casting to varchar, since we don't know actual source length, cast to undefined length, which will preserve source length castArgs.add(new ValueExpressions.LongExpression(Types.MAX_VARCHAR_LENGTH, null)); final FunctionCall castCall = new FunctionCall(castFuncName, castArgs, ExpressionPosition.UNKNOWN); exprs.add(new NamedExpression(castCall, new FieldReference(fieldName))); } else { exprs.add(new NamedExpression(SchemaPath.getSimplePath(fieldName), new FieldReference(fieldName))); } } return exprs; }
@Override public LogicalOperator implement(DrillImplementor implementor) { final LogicalOperator inputOp = implementor.visitChild(this, 0, getInput()); org.apache.drill.common.logical.data.Window.Builder builder = new org.apache.drill.common.logical.data.Window.Builder(); final List<String> fields = getRowType().getFieldNames(); final List<String> childFields = getInput().getRowType().getFieldNames(); for (Group window : groups) { for(RelFieldCollation orderKey : window.orderKeys.getFieldCollations()) { builder.addOrdering(new Order.Ordering(orderKey.getDirection(), new FieldReference(fields.get(orderKey.getFieldIndex())))); } for (int group : BitSets.toIter(window.keys)) { FieldReference fr = new FieldReference(childFields.get(group), ExpressionPosition.UNKNOWN); builder.addWithin(fr, fr); } int groupCardinality = window.keys.cardinality(); for (Ord<AggregateCall> aggCall : Ord.zip(window.getAggregateCalls(this))) { FieldReference ref = new FieldReference(fields.get(groupCardinality + aggCall.i)); LogicalExpression expr = toDrill(aggCall.e, childFields); builder.addAggregation(ref, expr); } } builder.setInput(inputOp); org.apache.drill.common.logical.data.Window frame = builder.build(); return frame; }
@Override public LogicalOperator implement(DrillImplementor implementor) { GroupingAggregate.Builder builder = GroupingAggregate.builder(); builder.setInput(implementor.visitChild(this, 0, getInput())); final List<String> childFields = getInput().getRowType().getFieldNames(); final List<String> fields = getRowType().getFieldNames(); for (int group : BitSets.toIter(groupSet)) { FieldReference fr = new FieldReference(childFields.get(group), ExpressionPosition.UNKNOWN); builder.addKey(fr, fr); } for (Ord<AggregateCall> aggCall : Ord.zip(aggCalls)) { FieldReference ref = new FieldReference(fields.get(groupSet.cardinality() + aggCall.i)); LogicalExpression expr = toDrill(aggCall.e, childFields, implementor); builder.addExpr(ref, expr); } return builder.build(); }
@Override public LogicalOperator implement(DrillImplementor implementor) { final Order.Builder builder = Order.builder(); builder.setInput(implementor.visitChild(this, 0, getInput())); final List<String> childFields = getInput().getRowType().getFieldNames(); for(RelFieldCollation fieldCollation : this.collation.getFieldCollations()){ builder.addOrdering(fieldCollation.getDirection(), new FieldReference(childFields.get(fieldCollation.getFieldIndex())), fieldCollation.nullDirection); } return builder.build(); }