default: throw new AssertionError( "Unexpected new DataMode value '" + field.getDataMode().name() + "'" );
/** * Build a writer for a non-projected column. * @param schema schema of the column * @return a "dummy" writer for the column */ public static AbstractObjectWriter buildDummyColumnWriter(ColumnMetadata schema) { switch (schema.type()) { case GENERIC_OBJECT: case LATE: case LIST: case MAP: case UNION: throw new UnsupportedOperationException(schema.type().toString()); default: ScalarObjectWriter scalarWriter = new ScalarObjectWriter( new DummyScalarWriter(schema)); switch (schema.mode()) { case OPTIONAL: case REQUIRED: return scalarWriter; case REPEATED: return new ArrayObjectWriter( new DummyArrayWriter(schema, scalarWriter)); default: throw new UnsupportedOperationException(schema.mode().toString()); } } }
.computeEnumSize(2, mode_.getNumber());
throw new UnsupportedOperationException(String.format(UNSUPPORTED, type.getName(), mode.name())); case ARRAY: break; throw new UnsupportedOperationException(String.format(UNSUPPORTED, type.getName(), mode.name())); throw new UnsupportedOperationException(String.format(UNSUPPORTED, type.getName(), mode.name()));
org.apache.drill.common.types.TypeProtos.DataMode value = org.apache.drill.common.types.TypeProtos.DataMode.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue);
default : throw new IllegalArgumentException("Mode [" + type.getMode().name() + "] not supported.."); default : throw new IllegalArgumentException("Mode [" + type.getMode().name() + "] not supported.."); default : throw new IllegalArgumentException("Mode [" + type.getMode().name() + "] not supported..");
(TypeProtos.DataMode.OPTIONAL.equals(parmType.getMode()) || matchedFuncHolder.getNullHandling() == FunctionTemplate.NullHandling.NULL_IF_NULL)) { argsWithCast.add(new TypedNullConstant(parmType));
default: throw new AssertionError( "Unexpected new DataMode value '" + field.getDataMode().name() + "'" );
.append(type.getMode().name()) .append(")");
break; case 2: builder.setMode(org.apache.drill.common.types.TypeProtos.DataMode.valueOf(input.readEnum())); break; case 3:
.computeEnumSize(2, mode_.getNumber());
field.getType().getMode().name(), type.name()));
.append(metadata.getName()) .append("(type: ") .append(metadata.getType().getMode().name()) .append(" ") .append(metadata.getType().getMinorType().name())
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, minorType_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, mode_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt32(3, width_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeInt32(4, precision_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeInt32(5, scale_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeInt32(6, timeZone_); } for (int i = 0; i < subType_.size(); i++) { output.writeEnum(7, subType_.get(i).getNumber()); } getUnknownFields().writeTo(output); }
/** * Build a writer for a non-projected column. * @param schema schema of the column * @return a "dummy" writer for the column */ public static AbstractObjectWriter buildDummyColumnWriter(ColumnMetadata schema) { switch (schema.type()) { case GENERIC_OBJECT: case LATE: case LIST: case MAP: case UNION: throw new UnsupportedOperationException(schema.type().toString()); default: ScalarObjectWriter scalarWriter = new ScalarObjectWriter( new DummyScalarWriter(schema)); switch (schema.mode()) { case OPTIONAL: case REQUIRED: return scalarWriter; case REPEATED: return new ArrayObjectWriter( new DummyArrayWriter(schema, scalarWriter)); default: throw new UnsupportedOperationException(schema.mode().toString()); } } }
public static AbstractObjectWriter buildColumnWriter(ColumnMetadata schema, ValueVector vector) { if (vector == null) { return buildDummyColumnWriter(schema); } // Build a writer for a materialized column. assert schema.type() == vector.getField().getType().getMinorType(); assert schema.mode() == vector.getField().getType().getMode(); switch (schema.type()) { case GENERIC_OBJECT: case LATE: case NULL: case LIST: case MAP: case UNION: throw new UnsupportedOperationException(schema.type().toString()); default: switch (schema.mode()) { case OPTIONAL: return nullableScalarWriter(schema, (NullableVector) vector); case REQUIRED: return requiredScalarWriter(schema, vector); case REPEATED: return repeatedScalarWriter(schema, (RepeatedValueVector) vector); default: throw new UnsupportedOperationException(schema.mode().toString()); } } }
/** * Convert holder to a string for debugging use. */ @Override public String toString() { DebugStringBuilder buf = new DebugStringBuilder(this); if (isConstant()) { buf.append("const "); } buf.append(holder.type().fullName()) .append(" ") .append(holder.name()) .append(", ") .append(type.getMode().name()) .append(" ") .append(type.getMinorType().name()) .append(", "); holder.generate(buf.formatter()); buf.append(", "); value.generate(buf.formatter()); return buf.toString(); } }
/** * Represents RelDataTypeReader content as string, used in query plan json. * Example: RelDataTypeReader{columnNames=[col1], columnTypes=[INTERVALYEAR-OPTIONAL]} * * @return string representation of RelDataTypeReader content */ @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("RelDataTypeReader{columnNames="); builder.append(columnNames).append(", columnTypes="); List<String> columnTypesList = new ArrayList<>(columnTypes.size()); for (TypeProtos.MajorType columnType : columnTypes) { columnTypesList.add(columnType.getMinorType().toString() + "-" + columnType.getMode().toString()); } builder.append(columnTypesList); builder.append("}"); return builder.toString(); } }
private static void logFunctionResolutionError(ErrorCollector errorCollector, FunctionCall call) { // add error to collector StringBuilder sb = new StringBuilder(); sb.append("Missing function implementation: "); sb.append("["); sb.append(call.getName()); sb.append("("); boolean first = true; for(LogicalExpression e : call.args) { TypeProtos.MajorType mt = e.getMajorType(); if (first) { first = false; } else { sb.append(", "); } sb.append(mt.getMinorType().name()); sb.append("-"); sb.append(mt.getMode().name()); } sb.append(")"); sb.append("]"); errorCollector.addGeneralError(call.getPosition(), sb.toString()); }
/** * We treat fields with same set of Subtypes as equal, even if they are in a different order * @param t1 * @param t2 * @return */ private boolean majorTypeEqual(MajorType t1, MajorType t2) { if (t1.equals(t2)) { return true; } if (!t1.getMinorType().equals(t2.getMinorType())) { return false; } if (!t1.getMode().equals(t2.getMode())) { return false; } if (!Sets.newHashSet(t1.getSubTypeList()).equals(Sets.newHashSet(t2.getSubTypeList()))) { return false; } return true; }