public Column(ColumnProto proto) { name = proto.getName(); type = TypeProtobufEncoder.decode(proto.getType()); }
@Override public TypeProto getProto() { return TypeProtobufEncoder.encode(this); } }
public static Type decode(TypeProto proto) { Stack<Type> stack = new Stack<>(); for (int curIdx = 0; curIdx < proto.getElementsCount(); curIdx++) { TypeElement e = proto.getElements(curIdx); if (e.hasChildNum()) { // if it is a type-parameterized, that is List<Type> childTypes = popMultiItems(stack, e.getChildNum()); if (e.getKind() == ARRAY || e.getKind() == MAP) { stack.push(createTypeParameterizedType(e, childTypes)); } else { // record assertCondition(e.getKind() == RECORD, "This type must be RECORD type."); assertCondition(childTypes.size() == e.getFieldNamesCount(), "The number of Field types and names must be equal."); ImmutableList.Builder<Field> fields = ImmutableList.builder(); for (int i = 0; i < childTypes.size(); i++) { fields.add(new Field(QualifiedIdentifier.fromProto(e.getFieldNames(i)), childTypes.get(i))); } stack.push(Record(fields.build())); } } else { stack.push(createPrimitiveType(e)); } } assertCondition(stack.size() == 1, "Stack size has two or more items."); return stack.pop(); }
static Type createPrimitiveType(TypeElement element) { assertPrimitiveType(element); if (isValueParameterized(element.getKind())) { return TypeFactory.create(element.getKind(), EMPTY_LIST, element.getValueParamsList(), EMPTY_LIST); } else if (element.getKind() == PROTOBUF) { // TODO - PROTOBUF type should be removed later return new Protobuf(element.getStringParams(0)); } else { return TypeFactory.create(element.getKind()); } }
resultSet = null; org.apache.tajo.type.Type type = TypeProtobufEncoder.decode(columnProto.getType());
typeStr = TypeStringEncoder.encode(TypeProtobufEncoder.decode(columnProto.getType())); } else { throw new UndefinedColumnException(alterColumnProto.getOldColumnName());
for (int i = 0; i < table.getSchema().getFieldsCount(); i++) { ColumnProto col = table.getSchema().getFields(i); org.apache.tajo.type.Type type = TypeProtobufEncoder.decode(col.getType());
aTuple.put(fieldId, DatumFactory.createText(column.getType().toString())); } else if ("type_length".equalsIgnoreCase(colObj.getSimpleName())) { Type type = TypeProtobufEncoder.decode(column.getType()); if (type.isValueParameterized()) { aTuple.put(fieldId, DatumFactory.createInt4(type.getValueParameters().get(0)));
private void addNewColumn(String databaseName, String tableName, CatalogProtos.ColumnProto columnProto) { HiveCatalogStoreClientPool.HiveCatalogStoreClient client = null; try { client = clientPool.getClient(); Table table = client.getHiveClient().getTable(databaseName, tableName); List<FieldSchema> columns = table.getSd().getCols(); columns.add(new FieldSchema(columnProto.getName(), HiveCatalogUtil.getHiveFieldType(TypeProtobufEncoder.decode(columnProto.getType())), "")); client.getHiveClient().alter_table(databaseName, tableName, table); } catch (NoSuchObjectException nsoe) { } catch (Exception e) { throw new TajoInternalError(e); } finally { if (client != null) { client.release(); } } }
break; case CAST: current = new CastEval(context, child, TypeProtobufEncoder.decode(unaryProto.getCastingType())); break; case SIGNED: