this.hiveStorageTimeZone = hiveStorageTimeZone; this.deserializer = getDeserializer(configuration, splitSchema); this.rowInspector = getTableObjectInspector(deserializer);
return null; if (isStructuralType(type)) { Block block = (Block) fieldFromCursor; if (isArrayType(type)) { Type elementType = ((ArrayType) type).getElementType(); return toArrayValue(block, elementType); else if (isMapType(type)) { MapType mapType = (MapType) type; return toMapValue(block, mapType.getKeyType(), mapType.getValueType()); else if (isRowType(type)) { return toRowValue(block, type.getTypeParameters());
public static long timestampPartitionKey(String value, DateTimeZone zone, String name) { try { return parseHiveTimestamp(value, zone); } catch (IllegalArgumentException e) { throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for TIMESTAMP partition key: %s", value, name)); } }
@Override public Map<String, ColumnHandle> getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle) { SchemaTableName tableName = schemaTableName(tableHandle); Optional<Table> table = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()); if (!table.isPresent()) { throw new TableNotFoundException(tableName); } ImmutableMap.Builder<String, ColumnHandle> columnHandles = ImmutableMap.builder(); for (HiveColumnHandle columnHandle : hiveColumnHandles(table.get())) { columnHandles.put(columnHandle.getName(), columnHandle); } return columnHandles.build(); }
if (isHiveNull(bytes)) { prefilledValue = null; prefilledValue = booleanPartitionKey(columnValue, name); prefilledValue = bigintPartitionKey(columnValue, name); prefilledValue = integerPartitionKey(columnValue, name); prefilledValue = smallintPartitionKey(columnValue, name); prefilledValue = tinyintPartitionKey(columnValue, name); prefilledValue = floatPartitionKey(columnValue, name); prefilledValue = doublePartitionKey(columnValue, name); prefilledValue = varcharPartitionKey(columnValue, name, type); prefilledValue = charPartitionKey(columnValue, name, type); prefilledValue = datePartitionKey(columnValue, name); prefilledValue = timestampPartitionKey(columnValue, hiveStorageTimeZone, name); prefilledValue = shortDecimalPartitionKey(columnValue, (DecimalType) type, name); prefilledValue = longDecimalPartitionKey(columnValue, (DecimalType) type, name);
byte[] bytes = partitionKeyValue.getBytes(UTF_8); if (HiveUtil.isHiveNull(bytes)) { nullsRowDefault[columnIndex] = true; booleans[columnIndex] = booleanPartitionKey(partitionKeyValue, columnName); longs[columnIndex] = bigintPartitionKey(partitionKeyValue, columnName); doubles[columnIndex] = doublePartitionKey(partitionKeyValue, columnName); longs[columnIndex] = timestampPartitionKey(partitionKey.getValue(), hiveStorageTimeZone, columnName); longs[columnIndex] = datePartitionKey(partitionKey.getValue(), columnName);
return getPrimitiveJavaObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale())); else if (isArrayType(type)) { return ObjectInspectorFactory.getStandardListObjectInspector(getJavaObjectInspector(type.getTypeParameters().get(0))); else if (isMapType(type)) { ObjectInspector keyObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(0)); ObjectInspector valueObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(1)); return ObjectInspectorFactory.getStandardMapObjectInspector(keyObjectInspector, valueObjectInspector); else if (isRowType(type)) { return ObjectInspectorFactory.getStandardStructObjectInspector( type.getTypeSignature().getParameters().stream()
return getHiveDecimal(decimalType, block, position); if (isArrayType(type)) { Type elementType = type.getTypeParameters().get(0); if (isMapType(type)) { Type keyType = type.getTypeParameters().get(0); Type valueType = type.getTypeParameters().get(1); if (isRowType(type)) { Block rowBlock = block.getObject(position, Block.class); checkCondition(fieldTypes.size() == rowBlock.getPositionCount(), StandardErrorCode.GENERIC_INTERNAL_ERROR, "Expected row value field count does not match type field count");
parseStringColumn(column); else if (isStructuralType(hiveTypes[column])) { parseObjectColumn(column);
@Override public ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle tableHandle) { requireNonNull(tableHandle, "tableHandle is null"); SchemaTableName tableName = schemaTableName(tableHandle); return getTableMetadata(tableName); }
private void parseLongColumn(int column, byte[] bytes, int start, int length) { boolean wasNull; if (length == 0 || (length == "\\N".length() && bytes[start] == '\\' && bytes[start + 1] == 'N')) { wasNull = true; } else if (hiveTypes[column].equals(HiveType.HIVE_DATE)) { String value = new String(bytes, start, length); longs[column] = parseHiveDate(value); wasNull = false; } else if (hiveTypes[column].equals(HiveType.HIVE_TIMESTAMP)) { String value = new String(bytes, start, length); longs[column] = parseHiveTimestamp(value, hiveStorageTimeZone); wasNull = false; } else { longs[column] = parseLong(bytes, start, length); wasNull = false; } nulls[column] = wasNull; }
@Test public void testGetThriftDeserializer() { Properties schema = new Properties(); schema.setProperty(SERIALIZATION_LIB, ThriftDeserializer.class.getName()); schema.setProperty(SERIALIZATION_CLASS, IntString.class.getName()); schema.setProperty(SERIALIZATION_FORMAT, TBinaryProtocol.class.getName()); assertInstanceOf(getDeserializer(new Configuration(false), schema), ThriftDeserializer.class); }
public static HivePartition parsePartition( SchemaTableName tableName, String partitionName, List<HiveColumnHandle> partitionColumns, List<Type> partitionColumnTypes, DateTimeZone timeZone) { List<String> partitionValues = extractPartitionValues(partitionName); ImmutableMap.Builder<ColumnHandle, NullableValue> builder = ImmutableMap.builder(); for (int i = 0; i < partitionColumns.size(); i++) { HiveColumnHandle column = partitionColumns.get(i); NullableValue parsedValue = parsePartitionValue(partitionName, partitionValues.get(i), partitionColumnTypes.get(i), timeZone); builder.put(column, parsedValue); } Map<ColumnHandle, NullableValue> values = builder.build(); return new HivePartition(tableName, partitionName, values); }
private static void assertPrimitiveField(Map<String, ColumnMetadata> map, String name, Type type, boolean partitionKey) { assertTrue(map.containsKey(name)); ColumnMetadata column = map.get(name); assertEquals(column.getType(), type, name); assertEquals(column.getComment(), annotateColumnComment(null, partitionKey)); }
this.hiveStorageTimeZone = hiveStorageTimeZone; this.deserializer = getDeserializer(splitSchema); this.rowInspector = getTableObjectInspector(deserializer); if (HiveUtil.isHiveNull(bytes)) { nulls[columnIndex] = true; booleans[columnIndex] = booleanPartitionKey(partitionKey.getValue(), name); longs[columnIndex] = bigintPartitionKey(partitionKey.getValue(), name); doubles[columnIndex] = doublePartitionKey(partitionKey.getValue(), name); longs[columnIndex] = datePartitionKey(partitionKey.getValue(), name); longs[columnIndex] = timestampPartitionKey(partitionKey.getValue(), hiveStorageTimeZone, name);
if (HiveUtil.isHiveNull(bytes)) { nulls[columnIndex] = true; booleans[columnIndex] = booleanPartitionKey(columnValue, name); longs[columnIndex] = tinyintPartitionKey(columnValue, name); longs[columnIndex] = smallintPartitionKey(columnValue, name); longs[columnIndex] = integerPartitionKey(columnValue, name); longs[columnIndex] = bigintPartitionKey(columnValue, name); longs[columnIndex] = floatPartitionKey(columnValue, name); doubles[columnIndex] = doublePartitionKey(columnValue, name); slices[columnIndex] = varcharPartitionKey(columnValue, name, type); slices[columnIndex] = charPartitionKey(columnValue, name, type); longs[columnIndex] = datePartitionKey(columnValue, name); longs[columnIndex] = timestampPartitionKey(columnValue, hiveStorageTimeZone, name); longs[columnIndex] = shortDecimalPartitionKey(columnValue, (DecimalType) type, name); slices[columnIndex] = longDecimalPartitionKey(columnValue, (DecimalType) type, name);
if (isArrayType(type) || isMapType(type) || isRowType(type)) { return getJavaObjectInspector(type);
return new Timestamp(millisUtc); if (isArrayType(type)) { Type elementType = type.getTypeParameters().get(0); if (isMapType(type)) { Type keyType = type.getTypeParameters().get(0); Type valueType = type.getTypeParameters().get(1); if (isRowType(type)) { Block rowBlock = block.getObject(position, Block.class); checkCondition(fieldTypes.size() == rowBlock.getPositionCount(), StandardErrorCode.INTERNAL_ERROR, "Expected row value field count does not match type field count");
return cursor.getLong(field); else if (isStructuralType(type)) { return cursor.getObject(field);
@Override public void dropTable(ConnectorSession session, ConnectorTableHandle tableHandle) { HiveTableHandle handle = (HiveTableHandle) tableHandle; SchemaTableName tableName = schemaTableName(tableHandle); Optional<Table> target = metastore.getTable(handle.getSchemaName(), handle.getTableName()); if (!target.isPresent()) { throw new TableNotFoundException(tableName); } metastore.dropTable(session, handle.getSchemaName(), handle.getTableName()); }