@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { for (OutputStream s: outputStreamStats) { declarer.declareStream(s.id, new Fields("key", "value")); } } }
public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declareStream(ARGS_STREAM, new Fields("request", "args")); declarer.declareStream(RETURN_STREAM, new Fields("request", "return")); declarer.declareStream(ID_STREAM, new Fields("request")); } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { if (outputStreamName != null) { declarer.declareStream(outputStreamName, outputFields); } else { declarer.declare(outputFields); } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { LOG.info("Declaring output fields [{}]", outputs); for (Map.Entry<String, ? extends Fields> streamToFields : outputs.streamFields().entrySet()) { declarer.declareStream(streamToFields.getKey(), streamToFields.getValue()); } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { // in partitioned example, in case an emitter task receives a later transaction than it's emitted so far, // when it sees the earlier txid it should know to emit nothing declarer.declareStream(BATCH_STREAM_ID, new Fields("tx")); declarer.declareStream(COMMIT_STREAM_ID, new Fields("tx")); declarer.declareStream(SUCCESS_STREAM_ID, new Fields("tx")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { for (OutputStream s: streamStats) { declarer.declareStream(s.id, new Fields("key", "value")); } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declareStream(MasterBatchCoordinator.BATCH_STREAM_ID, new Fields("tx", "metadata")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declareStream(CHECKPOINT_STREAM_ID, new Fields(CHECKPOINT_FIELD_TXID, CHECKPOINT_FIELD_ACTION)); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { Iterator it = this.outputFields.entrySet().iterator(); while (it.hasNext()) { Map.Entry entryTuple = (Map.Entry)it.next(); String key = (String)entryTuple.getKey(); String[] value = (String[])entryTuple.getValue(); if (key.equals("default")) { declarer.declare(new Fields(value)); } else { declarer.declareStream(key, new Fields(value)); } } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { Iterator it = this.outputFields.entrySet().iterator(); while (it.hasNext()) { Map.Entry entryTuple = (Map.Entry)it.next(); String key = (String)entryTuple.getKey(); String[] value = (String[])entryTuple.getValue(); if (key.equals("default")) { declarer.declare(new Fields(value)); } else { declarer.declareStream(key, new Fields(value)); } } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { Fields outFields = TridentUtils.getSingleOutputStreamFields(_delegate); outFields = TridentUtils.fieldsConcat(new Fields("$id$"), outFields); declarer.declareStream(_stream, outFields); // try to find a way to merge this code with what's already done in TridentBoltExecutor declarer.declareStream(_coordStream, true, new Fields("id", "count")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { for (Node n : _nodes) { declarer.declareStream(n.streamId, TridentUtils.fieldsConcat(new Fields("$batchId"), n.allOutputFields)); } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { List<String> fields = new ArrayList<>(_spout.getOutputFields().toList()); fields.add(0, ID_FIELD); declarer.declareStream(_streamName, new Fields(fields)); }
public void declareOutputFields(OutputFieldsDeclarer declarer) { _delegate.declareOutputFields(declarer); declarer.declareStream(Constants.COORDINATED_STREAM_ID, true, new Fields("id", "count")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { _bolt.declareOutputFields(declarer); for (String batchGroup : _coordSpecs.keySet()) { declarer.declareStream(COORD_STREAM(batchGroup), true, new Fields("id", "count")); } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { String lateTupleStream = (String) getComponentConfiguration().get(Config.TOPOLOGY_BOLTS_LATE_TUPLE_STREAM); if (lateTupleStream != null) { declarer.declareStream(lateTupleStream, new Fields(LATE_TUPLE_FIELD)); } bolt.declareOutputFields(declarer); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { RecordTranslator<K, V> translator = kafkaSpoutConfig.getTranslator(); for (String stream : translator.streams()) { declarer.declareStream(stream, translator.getFieldsFor(stream)); } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declareStream(os, new Fields("value")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { if (Strings.isNullOrEmpty(eventHubConfig.getOutputStreamId())) { declarer.declare(scheme.getOutputFields()); } else { declarer.declareStream(eventHubConfig.getOutputStreamId(), scheme.getOutputFields()); } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { String[] outputFieldNames = new String[outputFields.length]; for (int i = 0; i < outputFields.length; ++i) { outputFieldNames[i] = outputFields[i].getOutputName(); } if (outputStreamName != null) { declarer.declareStream(outputStreamName, new Fields(outputFieldNames)); } else { declarer.declare(new Fields(outputFieldNames)); } }