@JsonValue @Override public String toString() { return hiveTypeName.toString(); }
private HiveType(TypeInfo typeInfo) { requireNonNull(typeInfo, "typeInfo is null"); this.hiveTypeName = new HiveTypeName(typeInfo.getTypeName()); this.typeInfo = typeInfo; }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } HiveType hiveType = (HiveType) o; if (!hiveTypeName.equals(hiveType.hiveTypeName)) { return false; } return true; }
@Override public int hashCode() { return hiveTypeName.hashCode(); }
public int getEstimatedSizeInBytes() { int result = INSTANCE_SIZE; result += path.length() * Character.BYTES; result += sizeOfObjectArray(partitionKeys.size()); for (HivePartitionKey partitionKey : partitionKeys) { result += partitionKey.getEstimatedSizeInBytes(); } result += sizeOfObjectArray(blocks.size()); for (InternalHiveBlock block : blocks) { result += block.getEstimatedSizeInBytes(); } result += partitionName.length() * Character.BYTES; result += sizeOfObjectArray(columnCoercions.size()); for (HiveTypeName hiveTypeName : columnCoercions.values()) { result += INTEGER_INSTANCE_SIZE + hiveTypeName.getEstimatedSizeInBytes(); } return result; }
public static Optional<DecimalType> getDecimalType(HiveType hiveType) { return getDecimalType(hiveType.getHiveTypeName().toString()); }
private static String toThriftDdl(String structName, List<Column> columns) { // Mimics function in Hive: // MetaStoreUtils.getDDLFromFieldSchema(String, List<FieldSchema>) StringBuilder ddl = new StringBuilder(); ddl.append("struct "); ddl.append(structName); ddl.append(" { "); boolean first = true; for (Column column : columns) { if (first) { first = false; } else { ddl.append(", "); } ddl.append(typeToThriftType(column.getType().getHiveTypeName().toString())); ddl.append(' '); ddl.append(column.getName()); } ddl.append("}"); return ddl.toString(); }
private static boolean areColumnTypesSupported(List<Column> columns) { if (columns == null || columns.isEmpty()) { return false; } for (Column column : columns) { String type = column.getType().getHiveTypeName().toString(); if (column.getType().getTypeInfo() instanceof DecimalTypeInfo) { // skip precision and scale when check decimal type type = DECIMAL_TYPE_NAME; } if (!SUPPORTED_COLUMN_TYPES.contains(type)) { return false; } } return true; }
public static FieldSchema toMetastoreApiFieldSchema(Column column) { return new FieldSchema(column.getName(), column.getType().getHiveTypeName().toString(), column.getComment().orElse(null)); }
@Override public void addColumn(String databaseName, String tableName, String columnName, HiveType columnType, String columnComment) { Optional<org.apache.hadoop.hive.metastore.api.Table> source = delegate.getTable(databaseName, tableName); if (!source.isPresent()) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } org.apache.hadoop.hive.metastore.api.Table table = source.get(); table.getSd().getCols().add( new FieldSchema(columnName, columnType.getHiveTypeName().toString(), columnComment)); alterTable(databaseName, tableName, table); }
partString += partKey.getName(); partTypesString += partTypesStringSep; partTypesString += partKey.getType().getHiveTypeName().toString(); if (partStringSep.length() == 0) { partStringSep = "/";
private static void assertColumn(Column actual, com.amazonaws.services.glue.model.Column expected) { assertEquals(actual.getName(), expected.getName()); assertEquals(actual.getType().getHiveTypeName().toString(), expected.getType()); assertEquals(actual.getComment().get(), expected.getComment()); }
private static void assertColumn(com.amazonaws.services.glue.model.Column actual, Column expected) { assertEquals(actual.getName(), expected.getName()); assertEquals(actual.getType(), expected.getType().getHiveTypeName().toString()); assertEquals(actual.getComment(), expected.getComment().get()); }
private static ConnectorPageSource createPageSource(HiveTransactionHandle transaction, HiveClientConfig config, File outputFile) { Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, config.getHiveStorageFormat().getInputFormat()); splitProperties.setProperty(SERIALIZATION_LIB, config.getHiveStorageFormat().getSerDe()); splitProperties.setProperty("columns", Joiner.on(',').join(getColumnHandles().stream().map(HiveColumnHandle::getName).collect(toList()))); splitProperties.setProperty("columns.types", Joiner.on(',').join(getColumnHandles().stream().map(HiveColumnHandle::getHiveType).map(hiveType -> hiveType.getHiveTypeName().toString()).collect(toList()))); HiveSplit split = new HiveSplit( SCHEMA_NAME, TABLE_NAME, "", "file:///" + outputFile.getAbsolutePath(), 0, outputFile.length(), outputFile.length(), splitProperties, ImmutableList.of(), ImmutableList.of(), OptionalInt.empty(), false, TupleDomain.all(), ImmutableMap.of(), Optional.empty(), false); HivePageSourceProvider provider = new HivePageSourceProvider(config, createTestHdfsEnvironment(config), getDefaultHiveRecordCursorProvider(config), getDefaultHiveDataStreamFactories(config), TYPE_MANAGER); return provider.createPageSource(transaction, getSession(config), split, ImmutableList.copyOf(getColumnHandles())); }
private static void assertColumn(com.amazonaws.services.glue.model.Column actual, Column expected) { assertEquals(actual.getName(), expected.getName()); assertEquals(actual.getType(), expected.getType().getHiveTypeName().toString()); assertEquals(actual.getComment(), expected.getComment().get()); }
private static void assertColumn(Column actual, com.amazonaws.services.glue.model.Column expected) { assertEquals(actual.getName(), expected.getName()); assertEquals(actual.getType().getHiveTypeName().toString(), expected.getType()); assertEquals(actual.getComment().get(), expected.getComment()); }
private static ConnectorPageSource createPageSource(HiveTransactionHandle transaction, HiveClientConfig config, File outputFile) { Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, config.getHiveStorageFormat().getInputFormat()); splitProperties.setProperty(SERIALIZATION_LIB, config.getHiveStorageFormat().getSerDe()); splitProperties.setProperty("columns", Joiner.on(',').join(getColumnHandles().stream().map(HiveColumnHandle::getName).collect(toList()))); splitProperties.setProperty("columns.types", Joiner.on(',').join(getColumnHandles().stream().map(HiveColumnHandle::getHiveType).map(hiveType -> hiveType.getHiveTypeName().toString()).collect(toList()))); HiveSplit split = new HiveSplit( SCHEMA_NAME, TABLE_NAME, "", "file:///" + outputFile.getAbsolutePath(), 0, outputFile.length(), outputFile.length(), splitProperties, ImmutableList.of(), ImmutableList.of(), OptionalInt.empty(), false, TupleDomain.all(), ImmutableMap.of(), Optional.empty(), false); HivePageSourceProvider provider = new HivePageSourceProvider(config, createTestHdfsEnvironment(config), getDefaultHiveRecordCursorProvider(config), getDefaultHiveDataStreamFactories(config), TYPE_MANAGER); return provider.createPageSource(transaction, getSession(config), split, ImmutableList.copyOf(getColumnHandles())); }