public ClickHouseSink(SinkContext context, ClickHouseSinkConfig clickHouseSinkConfig) { this.config = clickHouseSinkConfig; checkState(config.getQuery() != null, "insert into query not setting"); this.prepareStatementQuery = config.getQuery().replaceAll("\\$\\{.*?}", "?"); schema = context.getSchema(); Map<String, String> nt = new HashMap<String, String>(); for (int i = 0; i < schema.getFieldNames().size(); i++) { nt.put(schema.getFieldNames().get(i), schema.getFieldTypes().get(i).toString().split(" ")[1]); } this.nametypes = nt; }
public HdfsSink(HdfsSinkConfig config, SinkContext context) { this.config = config; this.sinkTable = context.getSinkTable(); this.schema = context.getSchema(); checkState(sinkTable.length() > 0, "sinkTable is " + sinkTable); for (int i = 0; i < schema.getFieldNames().size(); i++) { if (schema.getFieldNames().get(i).equalsIgnoreCase(config.eventTimeName)) { this.eventTimeIndex = i; break; } } checkState(eventTimeIndex != -1, "eventTime_field " + config.eventTimeName + " does not exist,but only " + schema.getFieldNames()); checkState("text".equals(config.format.toLowerCase()) || "parquet".equals(config.format.toLowerCase()), "Hdfs sink format only supports text and parquet"); }
public KafkaSink09(SinkContext context, Kafka09SinkConfig config) { schema = context.getSchema(); if (!Strings.isNullOrEmpty(config.idField)) { int fieldIndex = schema.getFieldIndex(config.idField); checkState(fieldIndex != -1, config.idField + " does not exist, only " + schema.getFields()); this.idIndex = fieldIndex; } this.config = config; this.topic = config.topics; }
public HbaseSink(SinkContext context, HbaseConfig config) throws Exception { { this.config = config; schema = context.getSchema(); tableName = context.getSinkTable(); if (config.nameSpace != null) { tableName = config.nameSpace + ":" + tableName; } hbaseHelper = new HbaseHelper(tableName, config.zookeeper, config.zkNodeParent); if (!hbaseHelper.tableExist(tableName)) { throw new TableNotFoundException("table does not exist, table name " + tableName); } columMapping = ColumUtil.mapping(schema, config.columnMapping); if (!Strings.isNullOrEmpty(config.rowkey)) { int fieldIndex = schema.getFieldIndex(config.rowkey); checkState(fieldIndex != -1, config.rowkey + " does not exist, only " + schema.getFields()); this.rowkeyIndex = fieldIndex; } checkState(rowkeyIndex != -1, "`rowkey` must be set"); hbaseHelper.closeConnection(); } }
public Elasticsearch5Sink(SinkContext context, ElasticsearchSinkConfig config) { schema = context.getSchema(); this.config = config; if (!Strings.isNullOrEmpty(config.idField)) { int fieldIndex = schema.getFieldIndex(config.idField); checkState(fieldIndex != -1, config.idField + " does not exist, only " + schema.getFields()); this.idIndex = fieldIndex; } if (config.update) { checkState(idIndex != -1, "This is Update mode, `id_field` must be set"); } }
public Elasticsearch6Sink(SinkContext context, ElasticsearchSinkConfig config) { schema = context.getSchema(); this.config = config; if (!Strings.isNullOrEmpty(config.idField)) { int fieldIndex = schema.getFieldIndex(config.idField); checkState(fieldIndex != -1, config.idField + " does not exist, only " + schema.getFields()); this.idIndex = fieldIndex; } if (config.update) { checkState(idIndex != -1, "This is Update mode, `id_field` must be set"); } }