public HiveWriterPagePartitioner( List<HiveColumnHandle> inputColumns, boolean bucketed, PageIndexerFactory pageIndexerFactory, TypeManager typeManager) { requireNonNull(inputColumns, "inputColumns is null"); requireNonNull(pageIndexerFactory, "pageIndexerFactory is null"); List<Type> partitionColumnTypes = inputColumns.stream() .filter(HiveColumnHandle::isPartitionKey) .map(column -> typeManager.getType(column.getTypeSignature())) .collect(toList()); if (bucketed) { partitionColumnTypes.add(INTEGER); } this.pageIndexer = pageIndexerFactory.createPageIndexer(partitionColumnTypes); }
public static List<Type> getTypes(List<? extends ColumnHandle> columnHandles) { ImmutableList.Builder<Type> types = ImmutableList.builder(); for (ColumnHandle columnHandle : columnHandles) { types.add(TYPE_MANAGER.getType(((HiveColumnHandle) columnHandle).getTypeSignature())); } return types.build(); }
Type type = typeManager.getType(column.getTypeSignature());
Type type = typeManager.getType(column.getTypeSignature());
Type type = typeManager.getType(column.getTypeSignature());
typeManager.getType(handle.getTypeSignature()), columnComment.get(handle.getName()).orElse(null), columnExtraInfo(handle.isPartitionKey()),
private static List<HiveColumnHandle> getPhysicalHiveColumnHandles(List<HiveColumnHandle> columns, boolean useOrcColumnNames, OrcReader reader, Path path) { if (!useOrcColumnNames) { return columns; } verifyFileHasColumnNames(reader.getColumnNames(), path); Map<String, Integer> physicalNameOrdinalMap = buildPhysicalNameOrdinalMap(reader); int nextMissingColumnIndex = physicalNameOrdinalMap.size(); ImmutableList.Builder<HiveColumnHandle> physicalColumns = ImmutableList.builder(); for (HiveColumnHandle column : columns) { Integer physicalOrdinal = physicalNameOrdinalMap.get(column.getName()); if (physicalOrdinal == null) { // if the column is missing from the file, assign it a column number larger // than the number of columns in the file so the reader will fill it with nulls physicalOrdinal = nextMissingColumnIndex; nextMissingColumnIndex++; } physicalColumns.add(new HiveColumnHandle(column.getName(), column.getHiveType(), column.getTypeSignature(), physicalOrdinal, column.getColumnType(), column.getComment())); } return physicalColumns.build(); }
@Test public void testColumnHandleDeserialize() throws Exception { String json = objectMapper.writeValueAsString(COLUMN_HANDLE_AS_MAP); HiveColumnHandle columnHandle = objectMapper.readValue(json, HiveColumnHandle.class); assertEquals(columnHandle.getName(), "column"); assertEquals(columnHandle.getTypeSignature(), DOUBLE.getTypeSignature()); assertEquals(columnHandle.getHiveType(), HiveType.HIVE_FLOAT); assertEquals(columnHandle.getHiveColumnIndex(), -1); assertEquals(columnHandle.isPartitionKey(), true); }
checkState(column.getColumnType() == REGULAR, "column type must be regular"); types[i] = typeManager.getType(column.getTypeSignature()); hiveTypes[i] = column.getHiveType();
.map(column -> new ColumnMetadata( column.getName(), typeManager.getType(column.getTypeSignature()), column.getComment().orElse(null), column.isHidden()))
if (column.isPartitionKey()) { partitionColumnNames.add(column.getName()); partitionColumnTypes.add(typeManager.getType(column.getTypeSignature()));
.map(column -> typeManager.getType(column.getTypeSignature())) .collect(toList());
delegate); List<Type> columnTypes = hiveColumns.stream() .map(input -> typeManager.getType(input.getTypeSignature())) .collect(toList());
Type type = typeManager.getType(column.getTypeSignature()); types[columnIndex] = type;
for (HiveColumnHandle column : physicalColumns) { if (column.getColumnType() == REGULAR) { Type type = typeManager.getType(column.getTypeSignature()); includedColumns.put(column.getHiveColumnIndex(), type); columnReferences.add(new ColumnReference<>(column, column.getHiveColumnIndex(), type));
Type type = typeManager.getType(columnMapping.getHiveColumnHandle().getTypeSignature()); types[columnIndex] = type;
public static List<Type> getTypes(List<? extends ColumnHandle> columnHandles) { ImmutableList.Builder<Type> types = ImmutableList.builder(); for (ColumnHandle columnHandle : columnHandles) { types.add(TYPE_MANAGER.getType(((HiveColumnHandle) columnHandle).getTypeSignature())); } return types.build(); }
public static List<Type> getTypes(List<? extends ColumnHandle> columnHandles) { ImmutableList.Builder<Type> types = ImmutableList.builder(); for (ColumnHandle columnHandle : columnHandles) { types.add(TYPE_MANAGER.getType(((HiveColumnHandle) columnHandle).getTypeSignature())); } return types.build(); } }
public static ParquetPredicate buildParquetPredicate( List<HiveColumnHandle> columns, TupleDomain<HiveColumnHandle> effectivePredicate, MessageType fileSchema, TypeManager typeManager) { ImmutableList.Builder<ColumnReference<HiveColumnHandle>> columnReferences = ImmutableList.builder(); for (HiveColumnHandle column : columns) { if (!column.isPartitionKey()) { int parquetFieldIndex = lookupParquetColumn(column, fileSchema); Type type = typeManager.getType(column.getTypeSignature()); columnReferences.add(new ColumnReference<>(column, parquetFieldIndex, type)); } } return new TupleDomainParquetPredicate<>(effectivePredicate, columnReferences.build()); }
@Test public void testColumnHandleDeserialize() throws Exception { String json = objectMapper.writeValueAsString(COLUMN_HANDLE_AS_MAP); HiveColumnHandle columnHandle = objectMapper.readValue(json, HiveColumnHandle.class); assertEquals(columnHandle.getName(), "column"); assertEquals(columnHandle.getTypeSignature(), DOUBLE.getTypeSignature()); assertEquals(columnHandle.getHiveType(), HiveType.HIVE_FLOAT); assertEquals(columnHandle.getHiveColumnIndex(), -1); assertEquals(columnHandle.isPartitionKey(), true); }