static String getComment(FieldSchema col) { return col.getComment() != null ? col.getComment() : ""; }
static String getComment(FieldSchema col) { return col.getComment() != null ? col.getComment() : ""; }
public static String getColumnCommentsFromFieldSchema(List<FieldSchema> fieldSchemas) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < fieldSchemas.size(); i++) { if (i > 0) { sb.append(ColumnType.COLUMN_COMMENTS_DELIMITER); } sb.append(fieldSchemas.get(i).getComment()); } return sb.toString(); }
public static List<String> getFieldSchemaString(List<FieldSchema> fl) { if (fl == null) { return null; } ArrayList<String> ret = new ArrayList<String>(); for (FieldSchema f : fl) { ret.add(f.getName() + " " + f.getType() + (f.getComment() != null ? (" " + f.getComment()) : "")); } return ret; }
public static List<String> getFieldSchemaString(List<FieldSchema> fl) { if (fl == null) { return null; } ArrayList<String> ret = new ArrayList<String>(); for (FieldSchema f : fl) { ret.add(f.getName() + " " + f.getType() + (f.getComment() != null ? (" " + f.getComment()) : "")); } return ret; }
public Object getFieldValue(_Fields field) { switch (field) { case NAME: return getName(); case TYPE: return getType(); case COMMENT: return getComment(); } throw new IllegalStateException(); }
private List<MFieldSchema> convertToMFieldSchemas(List<FieldSchema> keys) { List<MFieldSchema> mkeys = null; if (keys != null) { mkeys = new ArrayList<>(keys.size()); for (FieldSchema part : keys) { mkeys.add(new MFieldSchema(part.getName().toLowerCase(), part.getType(), part.getComment())); } } return mkeys; }
public static Column fromMetastoreApiFieldSchema(FieldSchema fieldSchema) { return new Column(fieldSchema.getName(), HiveType.valueOf(fieldSchema.getType()), Optional.ofNullable(emptyToNull(fieldSchema.getComment()))); }
private static List<Column> getColumns(List<FieldSchema> fieldSchemas) { List<Column> columns = Lists.newArrayListWithCapacity(fieldSchemas.size()); for (FieldSchema fieldSchema : fieldSchemas) { columns.add(new Column(fieldSchema.getName(), fieldSchema.getType(), fieldSchema.getComment())); } return columns; }
private static Properties addCols(Properties schema, List<FieldSchema> cols) { StringBuilder colNameBuf = new StringBuilder(); StringBuilder colTypeBuf = new StringBuilder(); StringBuilder colComment = new StringBuilder(); boolean first = true; String columnNameDelimiter = getColumnNameDelimiter(cols); for (FieldSchema col : cols) { if (!first) { colNameBuf.append(columnNameDelimiter); colTypeBuf.append(":"); colComment.append('\0'); } colNameBuf.append(col.getName()); colTypeBuf.append(col.getType()); colComment.append((null != col.getComment()) ? col.getComment() : StringUtils.EMPTY); first = false; } schema.setProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMNS, colNameBuf.toString()); schema.setProperty(ColumnType.COLUMN_NAME_DELIMITER, columnNameDelimiter); String colTypes = colTypeBuf.toString(); schema.setProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMN_TYPES, colTypes); schema.setProperty("columns.comments", colComment.toString()); return schema; }
public FieldSchemaWrapper(FieldSchema fieldSchema) { this.fieldSchema = fieldSchema; this.name = fieldSchema.getName(); this.type = fieldSchema.getType(); this.comment = fieldSchema.getComment(); }
/*** * Use destination table schema to generate column mapping * @param hiveColumns Optional Map to populate with the generated hive columns for reference of caller * @param destinationTableMeta destination table metadata * @return Generate Hive columns with types for given Avro schema */ private static String generateDestinationToHiveColumnMapping( Optional<Map<String, String>> hiveColumns, Table destinationTableMeta) { StringBuilder columns = new StringBuilder(); boolean isFirst = true; List<FieldSchema> fieldList = destinationTableMeta.getSd().getCols(); for (FieldSchema field : fieldList) { if (isFirst) { isFirst = false; } else { columns.append(", \n"); } String name = field.getName(); String type = escapeHiveType(field.getType()); String comment = field.getComment(); if (hiveColumns.isPresent()) { hiveColumns.get().put(name, type); } columns.append(String.format(" `%s` %s COMMENT '%s'", name, type, escapeStringForHive(comment))); } return columns.toString(); }
public TableSchema(List<FieldSchema> fieldSchemas) { int pos = 1; for (FieldSchema field : fieldSchemas) { columns.add(new ColumnDescriptor(field.getName(), field.getComment(), new TypeDescriptor( field.getType()), pos++)); } }
/** * Convert a HCatFieldSchema to a FieldSchema * @param fs FieldSchema to convert * @return HCatFieldSchema representation of FieldSchema * @throws HCatException */ public static HCatFieldSchema getHCatFieldSchema(FieldSchema fs) throws HCatException { String fieldName = fs.getName(); TypeInfo baseTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(fs.getType()); return getHCatFieldSchema(fieldName, baseTypeInfo, fs.getComment()); }
List<TypeInfo> typeInfos = fields.stream().map(fs -> TypeInfoUtils.getTypeInfoFromTypeString(fs.getType())) .collect(Collectors.toList()); List<String> comments = fields.stream().map(fs -> fs.getComment()).collect(Collectors.toList()); Schema schema = new TypeInfoToSchema().convert(colNames, typeInfos, comments, null, null, null); return getOrGenerateSchemaFile(schema);
private MType getMType(Type type) { List<MFieldSchema> fields = new ArrayList<>(); if (type.getFields() != null) { for (FieldSchema field : type.getFields()) { fields.add(new MFieldSchema(field.getName(), field.getType(), field .getComment())); } } return new MType(type.getName(), type.getType1(), type.getType2(), fields); }
private Map<String, Object> makeOneColUnformatted(FieldSchema col) { return MapBuilder.create() .put("name", col.getName()) .put("type", col.getType()) .put("comment", col.getComment()) .build(); }
private Map<String, Object> makeOneColUnformatted(FieldSchema col) { return MapBuilder.create() .put("name", col.getName()) .put("type", col.getType()) .put("comment", col.getComment()) .build(); }
/** * Write formatted column information into given StringBuilder * @param tableInfo - StringBuilder to append column information into * @param cols - list of columns * @param isOutputPadded - make it more human readable by setting indentation * with spaces. Turned off for use by HiveServer2 * @param colStats */ private static void formatAllFields(StringBuilder tableInfo, List<FieldSchema> cols, boolean isOutputPadded, List<ColumnStatisticsObj> colStats) { for (FieldSchema col : cols) { if(isOutputPadded) { formatWithIndentation(col.getName(), col.getType(), getComment(col), tableInfo, colStats); } else { formatWithoutIndentation(col.getName(), col.getType(), col.getComment(), tableInfo, colStats); } } }
List<HiveTableMeta.HiveTableColumnMeta> partitionColumns = Lists.newArrayList(); for (FieldSchema fieldSchema : allFields) { allColumns.add(new HiveTableMeta.HiveTableColumnMeta(fieldSchema.getName(), fieldSchema.getType(), fieldSchema.getComment())); partitionColumns.add(new HiveTableMeta.HiveTableColumnMeta(fieldSchema.getName(), fieldSchema.getType(), fieldSchema.getComment()));