public HdfsSink(HdfsSinkConfig config, SinkContext context) { this.config = config; this.sinkTable = context.getSinkTable(); this.schema = context.getSchema(); checkState(sinkTable.length() > 0, "sinkTable is " + sinkTable); for (int i = 0; i < schema.getFieldNames().size(); i++) { if (schema.getFieldNames().get(i).equalsIgnoreCase(config.eventTimeName)) { this.eventTimeIndex = i; break; } } checkState(eventTimeIndex != -1, "eventTime_field " + config.eventTimeName + " does not exist,but only " + schema.getFieldNames()); checkState("text".equals(config.format.toLowerCase()) || "parquet".equals(config.format.toLowerCase()), "Hdfs sink format only supports text and parquet"); }
@Override public void process(Row value) { Gson gson = new Gson(); Map<String, Object> map = new HashMap<>(); for (String fieldName : schema.getFieldNames()) { map.put(fieldName, value.getAs(fieldName)); } String message = gson.toJson(map); kafkaProducer.send(message); }
public ClickHouseSink(SinkContext context, ClickHouseSinkConfig clickHouseSinkConfig) { this.config = clickHouseSinkConfig; checkState(config.getQuery() != null, "insert into query not setting"); this.prepareStatementQuery = config.getQuery().replaceAll("\\$\\{.*?}", "?"); schema = context.getSchema(); Map<String, String> nt = new HashMap<String, String>(); for (int i = 0; i < schema.getFieldNames().size(); i++) { nt.put(schema.getFieldNames().get(i), schema.getFieldTypes().get(i).toString().split(" ")[1]); } this.nametypes = nt; }
for (String fieldName : schema.getFieldNames()) { map.put(fieldName, value.getAs(fieldName));
for (String fieldName : schema.getFieldNames()) { map.put(fieldName, value.getAs(fieldName));
for (String fieldName : schema.getFieldNames()) {
/** * HBase table field mapping, Inclusion column family and new column name. * * @param schema Table field definitions. * @param columnMappingStr Field information to be mapped. * @return Table field mapping result. */ public static Map<String, Tuple2<String, String>> mapping(Row.Schema schema, String columnMappingStr) throws Exception { Map<String, Tuple2<String, String>> columnMapping = new HashMap<>(); schema.getFieldNames().forEach(fieldName -> columnMapping.put(fieldName, new Tuple2(FAMILY_DEFAULT, fieldName))); if (columnMappingStr != null && !"".equals(columnMappingStr)) { for (String columInfoStr : columnMappingStr.split(",")) { String[] columInfo = columInfoStr.split(":"); switch (columInfo.length) { case 2: mappingTwoLength(columInfo, columnMapping); break; case 3: mappingThreeLength(columInfo, columnMapping); break; default: throw new ColumMappingException("Column mapping str is '" + columInfoStr + "', and Standard format is A:B:C or A:B ."); } } } return columnMapping; }
@Override public void process(Row value) { Object rowkey = value.getAs(rowkeyIndex); if (rowkey == null) { return; } Put put = new Put(BytesUtil.toBytes(rowkey)); try { for (String fieldName : schema.getFieldNames()) { if (!config.rowkey.equals(fieldName)) { Tuple2<String, String> tuple2 = columMapping.get(fieldName); if (tuple2 != null) { hbaseHelper.addColumn(tuple2.f0(), tuple2.f1(), value.getAs(fieldName), put); } else { logger.warn("Field:" + fieldName + " not defined in table " + tableName); } } } if (!put.isEmpty()) { hbaseHelper.store(put); } } catch (Exception e) { logger.error("put record to hbase fail.", e); } }
public JsonSchema(SourceContext context) { ideal.sylph.etl.Row.Schema schema = context.getSchema(); TypeInformation<?>[] types = schema.getFieldTypes().stream().map(TypeExtractor::createTypeInfo).toArray(TypeInformation<?>[]::new); String[] names = schema.getFieldNames().toArray(new String[0]); this.rowTypeInfo = new RowTypeInfo(types, names); }