public java.lang.Object getFieldValue(_Fields field) { switch (field) { case INPUTS: return get_inputs(); case STREAMS: return get_streams(); case PARALLELISM_HINT: return get_parallelism_hint(); case JSON_CONF: return get_json_conf(); } throw new java.lang.IllegalStateException(); }
private void initCommon(String id, IComponent component, Number parallelism) throws IllegalArgumentException { ComponentCommon common = new ComponentCommon(); common.set_inputs(new HashMap<GlobalStreamId, Grouping>()); if (parallelism != null) { int dop = parallelism.intValue(); if (dop < 1) { throw new IllegalArgumentException("Parallelism must be positive."); } common.set_parallelism_hint(dop); } Map<String, Object> conf = component.getComponentConfiguration(); if (conf != null) { common.set_json_conf(JSONValue.toJSONString(conf)); } commons.put(id, common); }
@Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((is_set_inputs()) ? 131071 : 524287); if (is_set_inputs()) hashCode = hashCode * 8191 + inputs.hashCode(); hashCode = hashCode * 8191 + ((is_set_streams()) ? 131071 : 524287); if (is_set_streams()) hashCode = hashCode * 8191 + streams.hashCode(); hashCode = hashCode * 8191 + ((is_set_parallelism_hint()) ? 131071 : 524287); if (is_set_parallelism_hint()) hashCode = hashCode * 8191 + parallelism_hint; hashCode = hashCode * 8191 + ((is_set_json_conf()) ? 131071 : 524287); if (is_set_json_conf()) hashCode = hashCode * 8191 + json_conf.hashCode(); return hashCode; }
public static int getParallelismHint(ComponentCommon componentCommon) { if (!componentCommon.is_set_parallelism_hint()) { return 1; } else { return componentCommon.get_parallelism_hint(); } }
private static boolean isEmptyInputs(ComponentCommon common) { if (common.get_inputs() == null) { return true; } else { return common.get_inputs().isEmpty(); } }
case INPUTS: if (value == null) { unset_inputs(); } else { set_inputs((java.util.Map<GlobalStreamId,Grouping>)value); unset_streams(); } else { set_streams((java.util.Map<java.lang.String,StreamInfo>)value); unset_parallelism_hint(); } else { set_parallelism_hint((java.lang.Integer)value); unset_json_conf(); } else { set_json_conf((java.lang.String)value);
Bolt bolt = boltSpec.getValue(); ComponentCommon common = bolt.get_common(); Map<GlobalStreamId, Grouping> inputs = common.get_inputs(); if (inputs != null) { for (Map.Entry<GlobalStreamId, Grouping> input : inputs.entrySet()) { Map<String, StreamInfo> outputs = common.get_streams(); if (outputs != null) { for (String name : outputs.keySet()) { .withParallelism(common.get_parallelism_hint()) .withId(boltComp); boltBuilders.put(boltComp, builder); ComponentCommon common = spout.get_common(); Map<String, StreamInfo> outputs = common.get_streams(); if (outputs != null) { for (String name : outputs.keySet()) { .withParallelism(common.get_parallelism_hint()) .withId(spoutComp); spoutBuilders.put(spoutComp, builder);
/** * return the current component configuration. * * @return the current configuration. */ @Override public Map<String, Object> getComponentConfiguration() { return parseJson(commons.get(id).get_json_conf()); }
if (common.is_set_parallelism_hint()) { int dop = common.get_parallelism_hint(); source.setParallelism(dop); } else { common.set_parallelism_hint(1); if (unprocessedBoltInputs == null) { unprocessedBoltInputs = new HashSet<>(); unprocessedBoltInputs.addAll(common.get_inputs().entrySet()); unprocessdInputsPerBolt.put(boltId, unprocessedBoltInputs); userBolt, inputStreams); if (common.is_set_parallelism_hint()) { int dop = common.get_parallelism_hint(); outputStream.setParallelism(dop); } else { common.set_parallelism_hint(1);
/** * Gets the set of streams declared for the specified component. */ public Set<String> getComponentStreams(String componentId) { return getComponentCommon(componentId).get_streams().keySet(); }
@SuppressWarnings("unchecked") @Override public T addConfigurations(Map<String, Object> conf) { if (conf != null) { if (conf.containsKey(Config.TOPOLOGY_KRYO_REGISTER)) { throw new IllegalArgumentException("Cannot set serializations for a component using fluent API"); } if (!conf.isEmpty()) { String currConf = commons.get(id).get_json_conf(); commons.get(id).set_json_conf(mergeIntoJson(parseJson(currConf), conf)); } } return (T) this; }
@Override public void read(org.apache.storm.thrift.protocol.TProtocol prot, Bolt struct) throws org.apache.storm.thrift.TException { org.apache.storm.thrift.protocol.TTupleProtocol iprot = (org.apache.storm.thrift.protocol.TTupleProtocol) prot; struct.bolt_object = new ComponentObject(); struct.bolt_object.read(iprot); struct.set_bolt_object_isSet(true); struct.common = new ComponentCommon(); struct.common.read(iprot); struct.set_common_isSet(true); } }
String componentId = entry.getKey(); ComponentCommon common = getComponentCommon(entry.getValue()); Map<GlobalStreamId, Grouping> inputs = common.get_inputs(); for (Map.Entry<GlobalStreamId, Grouping> input : inputs.entrySet()) { String sourceStreamId = input.getKey().get_streamId(); if (!sourceComponent.get_streams().containsKey(sourceStreamId)) { throw new WrappedInvalidTopologyException("Component: [" + componentId + "] subscribes from non-existent stream: " if (Thrift.groupingType(grouping) == Grouping._Fields.FIELDS) { List<String> fields = new ArrayList<>(grouping.get_fields()); Map<String, StreamInfo> streams = sourceComponent.get_streams(); Set<String> sourceOutputFields = getStreamOutputFields(streams); fields.removeAll(sourceOutputFields);
/** * For bolts that has incoming streams from spouts (the root bolts), add checkpoint stream from checkpoint spout to its input. For other * bolts, add checkpoint stream from the previous bolt to its input. */ private void addCheckPointInputs(ComponentCommon component) { Set<GlobalStreamId> checkPointInputs = new HashSet<>(); for (GlobalStreamId inputStream : component.get_inputs().keySet()) { String sourceId = inputStream.get_componentId(); if (_spouts.containsKey(sourceId)) { checkPointInputs.add(new GlobalStreamId(CHECKPOINT_COMPONENT_ID, CHECKPOINT_STREAM_ID)); } else { checkPointInputs.add(new GlobalStreamId(sourceId, CHECKPOINT_STREAM_ID)); } } for (GlobalStreamId streamId : checkPointInputs) { component.put_to_inputs(streamId, Grouping.all(new NullStruct())); } }
public static ComponentCommon prepareComponentCommon(Map<GlobalStreamId, Grouping> inputs, Map<String, StreamInfo> outputs, Integer parallelismHint, Map<String, Object> conf) { Map<GlobalStreamId, Grouping> mappedInputs = new HashMap<>(); Map<String, StreamInfo> mappedOutputs = new HashMap<>(); if (inputs != null && !inputs.isEmpty()) { mappedInputs.putAll(inputs); } if (outputs != null && !outputs.isEmpty()) { mappedOutputs.putAll(outputs); } ComponentCommon component = new ComponentCommon(mappedInputs, mappedOutputs); if (parallelismHint != null) { component.set_parallelism_hint(parallelismHint); } if (conf != null) { component.set_json_conf(JSONValue.toJSONString(conf)); } return component; }
final Map<String, List<Integer>> componentToSortedTasks, final Map<String, Map<String, Fields>> componentToStreamToFields) { final int parallelismHint = common.get_parallelism_hint(); Integer taskId = null; for (Entry<String, StreamInfo> outStream : common.get_streams().entrySet()) { outputStreams.put(outStream.getKey(), new Fields(outStream.getValue().get_output_fields()));
if (is_set_parallelism_hint()) { if (!first) sb.append(", "); sb.append("parallelism_hint:"); first = false; if (is_set_json_conf()) { if (!first) sb.append(", "); sb.append("json_conf:");
public void write(org.apache.storm.thrift.protocol.TProtocol oprot, ComponentCommon struct) throws org.apache.storm.thrift.TException { struct.validate(); if (struct.is_set_parallelism_hint()) { oprot.writeFieldBegin(PARALLELISM_HINT_FIELD_DESC); oprot.writeI32(struct.parallelism_hint); if (struct.is_set_json_conf()) { oprot.writeFieldBegin(JSON_CONF_FIELD_DESC); oprot.writeString(struct.json_conf);
public ComponentCommon deepCopy() { return new ComponentCommon(this); }
public static Map<ExecutorDetails, String> genExecsAndComps(StormTopology topology) { Map<ExecutorDetails, String> retMap = new HashMap<>(); int startTask = 0; int endTask = 0; for (Map.Entry<String, SpoutSpec> entry : topology.get_spouts().entrySet()) { SpoutSpec spout = entry.getValue(); String spoutId = entry.getKey(); int spoutParallelism = spout.get_common().get_parallelism_hint(); for (int i = 0; i < spoutParallelism; i++) { retMap.put(new ExecutorDetails(startTask, endTask), spoutId); startTask++; endTask++; } } for (Map.Entry<String, Bolt> entry : topology.get_bolts().entrySet()) { String boltId = entry.getKey(); Bolt bolt = entry.getValue(); int boltParallelism = bolt.get_common().get_parallelism_hint(); for (int i = 0; i < boltParallelism; i++) { retMap.put(new ExecutorDetails(startTask, endTask), boltId); startTask++; endTask++; } } return retMap; }