private static List<PrimitiveTypeInfo> extractPartColTypes(Table tab) { List<FieldSchema> pCols = tab.getPartCols(); List<PrimitiveTypeInfo> partColTypeInfos = new ArrayList<PrimitiveTypeInfo>(pCols.size()); for (FieldSchema pCol : pCols) { partColTypeInfos.add(TypeInfoFactory.getPrimitiveTypeInfo(pCol.getType())); } return partColTypeInfos; }
private static void extractColumnInfos(Table table, List<String> colNames, List<String> colTypes) { for (FieldSchema col : table.getAllCols()) { colNames.add(col.getName()); colTypes.add(col.getType()); } }
public Object getFieldValue(_Fields field) { switch (field) { case NAME: return getName(); case TYPE: return getType(); case COMMENT: return getComment(); } throw new IllegalStateException(); }
private List<MFieldSchema> convertToMFieldSchemas(List<FieldSchema> keys) { List<MFieldSchema> mkeys = null; if (keys != null) { mkeys = new ArrayList<>(keys.size()); for (FieldSchema part : keys) { mkeys.add(new MFieldSchema(part.getName().toLowerCase(), part.getType(), part.getComment())); } } return mkeys; }
public static Column fromMetastoreApiFieldSchema(FieldSchema fieldSchema) { return new Column(fieldSchema.getName(), HiveType.valueOf(fieldSchema.getType()), Optional.ofNullable(emptyToNull(fieldSchema.getComment()))); }
private List<FieldDesc> convertSchema(List<FieldSchema> fieldSchemas) { List<FieldDesc> colDescs = new ArrayList<FieldDesc>(); for (FieldSchema fs : fieldSchemas) { String colName = fs.getName(); String typeString = fs.getType(); colDescs.add(new FieldDesc(colName, TypeInfoUtils.getTypeInfoFromTypeString(typeString))); } return colDescs; }
private static String getColTypeOf(Table tbl, String partKey) throws SemanticException{ for (FieldSchema fs : tbl.getPartitionKeys()) { if (partKey.equalsIgnoreCase(fs.getName())) { return fs.getType().toLowerCase(); } } throw new SemanticException("Unknown partition key : " + partKey); }
public ExpressionBuilder(Table table, Map<String, String> partSpecs) { this.partSpecs = partSpecs; for (FieldSchema partField : table.getPartitionKeys()) { partColumnTypesMap.put(partField.getName().toLowerCase(), TypeInfoFactory.getPrimitiveTypeInfo(partField.getType())); } }
private static List<Column> getColumns(List<FieldSchema> fieldSchemas) { List<Column> columns = Lists.newArrayListWithCapacity(fieldSchemas.size()); for (FieldSchema fieldSchema : fieldSchemas) { columns.add(new Column(fieldSchema.getName(), fieldSchema.getType(), fieldSchema.getComment())); } return columns; }
public static List<String> getFieldSchemaString(List<FieldSchema> fl) { if (fl == null) { return null; } ArrayList<String> ret = new ArrayList<String>(); for (FieldSchema f : fl) { ret.add(f.getName() + " " + f.getType() + (f.getComment() != null ? (" " + f.getComment()) : "")); } return ret; }
@Override public synchronized void updatePartitionStatistics(String databaseName, String tableName, String partitionName, Function<PartitionStatistics, PartitionStatistics> update) { PartitionStatistics currentStatistics = requireNonNull( getPartitionStatistics(databaseName, tableName, ImmutableSet.of(partitionName)).get(partitionName), "getPartitionStatistics() returned null"); PartitionStatistics updatedStatistics = update.apply(currentStatistics); List<Partition> partitions = getPartitionsByNames(databaseName, tableName, ImmutableList.of(partitionName)); if (partitions.size() != 1) { throw new PrestoException(HIVE_METASTORE_ERROR, "Metastore returned multiple partitions for name: " + partitionName); } Partition originalPartition = getOnlyElement(partitions); Partition modifiedPartition = originalPartition.deepCopy(); HiveBasicStatistics basicStatistics = updatedStatistics.getBasicStatistics(); modifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), basicStatistics)); alterPartitionWithoutStatistics(databaseName, tableName, modifiedPartition); Map<String, HiveType> columns = modifiedPartition.getSd().getCols().stream() .collect(toImmutableMap(FieldSchema::getName, schema -> HiveType.valueOf(schema.getType()))); setPartitionColumnStatistics(databaseName, tableName, partitionName, columns, updatedStatistics.getColumnStatistics(), basicStatistics.getRowCount()); Set<String> removedStatistics = difference(currentStatistics.getColumnStatistics().keySet(), updatedStatistics.getColumnStatistics().keySet()); removedStatistics.forEach(column -> deletePartitionColumnStatistics(databaseName, tableName, partitionName, column)); }
public TableSchema(List<FieldSchema> fieldSchemas) { int pos = 1; for (FieldSchema field : fieldSchemas) { columns.add(new ColumnDescriptor(field.getName(), field.getComment(), new TypeDescriptor( field.getType()), pos++)); } }
/** * Convert a HCatFieldSchema to a FieldSchema * @param fs FieldSchema to convert * @return HCatFieldSchema representation of FieldSchema * @throws HCatException */ public static HCatFieldSchema getHCatFieldSchema(FieldSchema fs) throws HCatException { String fieldName = fs.getName(); TypeInfo baseTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(fs.getType()); return getHCatFieldSchema(fieldName, baseTypeInfo, fs.getComment()); }
private MType getMType(Type type) { List<MFieldSchema> fields = new ArrayList<>(); if (type.getFields() != null) { for (FieldSchema field : type.getFields()) { fields.add(new MFieldSchema(field.getName(), field.getType(), field .getComment())); } } return new MType(type.getName(), type.getType1(), type.getType2(), fields); }
private Map<String, Object> makeOneColUnformatted(FieldSchema col) { return MapBuilder.create() .put("name", col.getName()) .put("type", col.getType()) .put("comment", col.getComment()) .build(); }
void compareCommon(FieldSchema col) { Assert.assertEquals(colName, col.getName()); Assert.assertEquals(colType, col.getType()); }
private String getColumnType(String query) { Driver driver = createDriver(); int rc = driver.compile(query); if (rc != 0) { return null; } QueryPlan plan = driver.getPlan(); DDLTask task = (DDLTask) plan.getRootTasks().get(0); DDLWork work = task.getWork(); CreateTableDesc spec = work.getCreateTblDesc(); FieldSchema fs = spec.getCols().get(0); return fs.getType(); }
@Test public void testAddPartitionsInvalidColTypeInSd() throws Exception { createTable(); Partition partition = buildPartition(DB_NAME, TABLE_NAME, DEFAULT_YEAR_VALUE); partition.getSd().getCols().get(0).setType("xyz"); client.add_partitions(Lists.newArrayList(partition)); // TODO: Not sure that this is the correct behavior. It doesn't make sense to create the // partition with column with invalid type. This should be investigated later. Partition part = client.getPartition(DB_NAME, TABLE_NAME, Lists.newArrayList(DEFAULT_YEAR_VALUE)); Assert.assertNotNull(part); Assert.assertEquals("xyz", part.getSd().getCols().get(0).getType()); }
@Test public void testAddPartitionInvalidColTypeInSd() throws Exception { createTable(); Partition partition = buildPartition(DB_NAME, TABLE_NAME, DEFAULT_YEAR_VALUE); partition.getSd().getCols().get(0).setType("xyz"); client.add_partition(partition); // TODO: Not sure that this is the correct behavior. It doesn't make sense to create the // partition with column with invalid type. This should be investigated later. Partition part = client.getPartition(DB_NAME, TABLE_NAME, Lists.newArrayList(DEFAULT_YEAR_VALUE)); Assert.assertNotNull(part); Assert.assertEquals("xyz", part.getSd().getCols().get(0).getType()); }