@Override public void implementRewrite(RewriteImplementor implementor) { Map<String, RelDataType> rewriteFields = this.context.rewriteFields; for (Map.Entry<String, RelDataType> rewriteField : rewriteFields.entrySet()) { String fieldName = rewriteField.getKey(); RelDataTypeField field = rowType.getField(fieldName, true, false); if (field != null) { RelDataType fieldType = field.getType(); rewriteField.setValue(fieldType); } } // add dynamic field to the table scan if join not exist if (!this.context.hasJoin && !this.context.dynamicFields.isEmpty()) { Map<TblColRef, RelDataType> dynFields = this.context.dynamicFields; List<TblColRef> newCols = Lists.newArrayList(this.columnRowType.getAllColumns()); List<RelDataTypeField> newFieldList = Lists.newArrayList(this.rowType.getFieldList()); int paramIndex = this.rowType.getFieldList().size(); for (TblColRef fieldCol : dynFields.keySet()) { newCols.add(fieldCol); RelDataType fieldType = dynFields.get(fieldCol); RelDataTypeField newField = new RelDataTypeFieldImpl(fieldCol.getName(), paramIndex++, fieldType); newFieldList.add(newField); } // rebuild row type RelDataTypeFactory.FieldInfoBuilder fieldInfo = getCluster().getTypeFactory().builder(); fieldInfo.addAll(newFieldList); this.rowType = getCluster().getTypeFactory().createStructType(fieldInfo); this.columnRowType = new ColumnRowType(newCols); } }
/** * Apply any data format conversion expressions. */ private RexNode createColumnFormatConversion(final DrillScanRel hiveScanRel, final DrillScanRel nativeScanRel, final String colName, final RexBuilder rb) { final RelDataType outputType = hiveScanRel.getRowType().getField(colName, false, false).getType(); final RelDataTypeField inputField = nativeScanRel.getRowType().getField(colName, false, false); final RexInputRef inputRef = rb.makeInputRef(inputField.getType(), inputField.getIndex()); if (outputType.getSqlTypeName() == SqlTypeName.TIMESTAMP) { // TIMESTAMP is stored as INT96 by Hive in ParquetFormat. Use convert_fromTIMESTAMP_IMPALA UDF to convert // INT96 format data to TIMESTAMP // TODO: Remove this conversion once "store.parquet.reader.int96_as_timestamp" will be true by default return rb.makeCall(INT96_TO_TIMESTAMP, inputRef); } return inputRef; }
for (final RelDataTypeField field : table.getRowType().getFieldList()) { final RelDataTypeField targetField = logicalTargetRowType.getField(field.getName(), true, false); switch (strategies.get(field.getIndex())) { case NOT_NULLABLE:
@Override public String apply(String input) { RelDataTypeField field = type.getField(input, false, false); if(field == null){ throw UserException.validationError() .message("Unable to find field %s in table %s. Available fields were: %s.", input, SqlUtils.quotedCompound(path), FluentIterable.from(type.getFieldNames()).transform(SqlUtils.QUOTER).join(Joiner.on(", ")) ).build(logger); } return field.getName(); }
for (Map.Entry<String, RelDataType> rewriteField : this.context.rewriteFields.entrySet()) { String fieldName = rewriteField.getKey(); if (this.rowType.getField(fieldName, true, false) == null) { RelDataType fieldType = rewriteField.getValue(); RelDataTypeField newField = new RelDataTypeFieldImpl(fieldName, paramIndex++, fieldType);
/** * Create a cast for partition column. Partition column is output as "VARCHAR" in native parquet reader. Cast it * appropriate type according the partition type in HiveScan. */ private RexNode createPartitionColumnCast(final DrillScanRel hiveScanRel, final DrillScanRel nativeScanRel, final String outputColName, final String dirColName, final RexBuilder rb) { final RelDataType outputType = hiveScanRel.getRowType().getField(outputColName, false, false).getType(); final RelDataTypeField inputField = nativeScanRel.getRowType().getField(dirColName, false, false); final RexInputRef inputRef = rb.makeInputRef(rb.getTypeFactory().createSqlType(SqlTypeName.VARCHAR), inputField.getIndex()); if (outputType.getSqlTypeName() == SqlTypeName.CHAR) { return rb.makeCall(RTRIM, inputRef); } return rb.makeCast(outputType, inputRef); } }
int rowIDPos = tableScan.getTable().getRowType().getField( VirtualColumn.ROWID.getName(), false, false).getIndex(); RexNode rowIDFieldAccess = rexBuilder.makeFieldAccess(
RelDataTypeField field = getInput().getRowType().getField(func.getRewriteFieldName(), true, false); if (newArgList.isEmpty()) { newArgList.add(field.getIndex());
/** * Returns the "extra" field in a row type whose presence signals that * fields will come into existence just by asking for them. * * @param rowType Row type * @return The "extra" field, or null */ public static RelDataTypeField extra(RelDataType rowType) { // Even in a case-insensitive connection, the name must be precisely // "_extra". return rowType.getField("_extra", true, false); }
/** * Returns the "extra" field in a row type whose presence signals that * fields will come into existence just by asking for them. * * @param rowType Row type * @return The "extra" field, or null */ public static RelDataTypeField extra(RelDataType rowType) { // Even in a case-insensitive connection, the name must be precisely // "_extra". return rowType.getField("_extra", true, false); }
@Override public RelDataTypeField field(RelDataType rowType, String columnName) { return rowType.getField(columnName, false, false); }
public RelDataTypeField field(RelDataType rowType, String fieldName) { return rowType.getField(fieldName, caseSensitive, false); }
public RelDataTypeField field(RelDataType rowType, String fieldName) { return rowType.getField(fieldName, caseSensitive, false); }
@Deprecated // to be removed before 2.0 public static RelDataTypeField lookupField(boolean caseSensitive, final RelDataType rowType, String columnName) { return rowType.getField(columnName, caseSensitive, false); }
@Deprecated // to be removed before 2.0 public static RelDataTypeField lookupField(boolean caseSensitive, final RelDataType rowType, String columnName) { return rowType.getField(columnName, caseSensitive, false); }
@Override public Integer apply(String input) { return Preconditions.checkNotNull(inputRowType.getField(input, false, false), String.format("Partition column '%s' could not be resolved in the table's column lists", input)) .getIndex(); } }).toList();
public RelDataTypeField getField(String fieldName, boolean caseSensitive, boolean elideRecord) { return delegate.getField(fieldName, caseSensitive, elideRecord); }
public RelDataTypeField getField(String fieldName, boolean caseSensitive, boolean elideRecord) { return delegate.getField(fieldName, caseSensitive, elideRecord); }
private List<Integer> resolvePartitionKeys(){ final List<Integer> keys = Lists.newArrayList(); final RelDataType inputRowType = getInput().getRowType(); final List<String> partitionCol = getCreateTableEntry().getPartitionColumns(); for (final String col : partitionCol) { final RelDataTypeField field = inputRowType.getField(col, false, false); Preconditions.checkArgument(field != null, String.format("partition col %s could not be resolved in table's column lists!", col)); keys.add(field.getIndex()); } return keys; }
@Override public RelNode visit(TableScan tableScan) { if (!(tableScan instanceof IncrementallyUpdateable)) { return tableScan; } final RelNode newScan = updateScan((IncrementallyUpdateable) tableScan); // build new filter to apply refresh condition. final RexBuilder rexBuilder = tableScan.getCluster().getRexBuilder(); final RexNode inputRef = rexBuilder.makeInputRef(newScan, newScan.getRowType().getField(UPDATE_COLUMN, false, false).getIndex()); final RexNode literal = generateLiteral(rexBuilder, tableScan.getCluster().getTypeFactory()); final RexNode condition = tableScan.getCluster().getRexBuilder().makeCall(SqlStdOperatorTable.GREATER_THAN, ImmutableList.of(inputRef, literal)); return LogicalFilter.create(newScan, condition); }