@Override public TypeSerializer<Row> createSerializer(ExecutionConfig config) { int len = getArity(); TypeSerializer<?>[] fieldSerializers = new TypeSerializer[len]; for (int i = 0; i < len; i++) { fieldSerializers[i] = types[i].createSerializer(config); } return new RowSerializer(fieldSerializers); }
getArity(), keyPositions, comparators,
@Override public TypeSerializer<Row> createSerializer(ExecutionConfig config) { int len = getArity(); TypeSerializer<?>[] fieldSerializers = new TypeSerializer[len]; for (int i = 0; i < len; i++) { fieldSerializers[i] = types[i].createSerializer(config); } return new RowSerializer(fieldSerializers); }
@Override public TypeSerializer<Row> createSerializer(ExecutionConfig config) { int len = getArity(); TypeSerializer<?>[] fieldSerializers = new TypeSerializer[len]; for (int i = 0; i < len; i++) { fieldSerializers[i] = types[i].createSerializer(config); } return new RowSerializer(fieldSerializers); }
getArity(), keyPositions, comparators,
getArity(), keyPositions, comparators,
private static StorageDescriptor createStorageDescriptor(JobConf jobConf, RowTypeInfo rowTypeInfo) { StorageDescriptor storageDescriptor = new StorageDescriptor(); storageDescriptor.setLocation(jobConf.get(HIVE_TABLE_LOCATION)); storageDescriptor.setInputFormat(jobConf.get(HIVE_TABLE_INPUT_FORMAT)); storageDescriptor.setOutputFormat(jobConf.get(HIVE_TABLE_OUTPUT_FORMAT)); storageDescriptor.setCompressed(Boolean.parseBoolean(jobConf.get(HIVE_TABLE_COMPRESSED))); storageDescriptor.setNumBuckets(Integer.parseInt(jobConf.get(HIVE_TABLE_NUM_BUCKETS))); SerDeInfo serDeInfo = new SerDeInfo(); serDeInfo.setSerializationLib(jobConf.get(HIVE_TABLE_SERDE_LIBRARY)); Map<String, String> parameters = new HashMap<>(); parameters.put(serdeConstants.SERIALIZATION_FORMAT, jobConf.get(HIVE_TABLE_STORAGE_SERIALIZATION_FORMAT)); serDeInfo.setParameters(parameters); List<FieldSchema> fieldSchemas = new ArrayList<>(); for (int i = 0; i < rowTypeInfo.getArity(); i++) { String hiveType = HiveMetadataUtil.convert(TypeConverters.createInternalTypeFromTypeInfo(rowTypeInfo.getFieldTypes()[i])); if (null == hiveType) { logger.error("Now we don't support flink type of " + rowTypeInfo.getFieldTypes()[i] + " converting from hive"); throw new FlinkHiveException("Now we don't support flink's type of " + rowTypeInfo.getFieldTypes()[i] + " converting from hive"); } fieldSchemas.add( new FieldSchema(rowTypeInfo.getFieldNames()[i], hiveType, "")); } storageDescriptor.setCols(fieldSchemas); storageDescriptor.setSerdeInfo(serDeInfo); return storageDescriptor; }
throw new RuntimeException(e); reuse = new GenericRow(rowTypeInfo.getArity());