public java.lang.Object getFieldValue(_Fields field) { switch (field) { case SPOUT_OBJECT: return get_spout_object(); case COMMON: return get_common(); } throw new java.lang.IllegalStateException(); }
private Object mkTaskObject() { StormTopology topology = systemTopologyContext.getRawTopology(); Map<String, SpoutSpec> spouts = topology.get_spouts(); Map<String, Bolt> bolts = topology.get_bolts(); Map<String, StateSpoutSpec> stateSpouts = topology.get_state_spouts(); Object result; ComponentObject componentObject; if (spouts.containsKey(componentId)) { componentObject = spouts.get(componentId).get_spout_object(); } else if (bolts.containsKey(componentId)) { componentObject = bolts.get(componentId).get_bolt_object(); } else if (stateSpouts.containsKey(componentId)) { componentObject = stateSpouts.get(componentId).get_state_spout_object(); } else { throw new RuntimeException("Could not find " + componentId + " in " + topology); } result = Utils.getSetComponentObject(componentObject); if (result instanceof ShellComponent) { if (spouts.containsKey(componentId)) { result = new ShellSpout((ShellComponent) result); } else { result = new ShellBolt((ShellComponent) result); } } if (result instanceof JavaObject) { result = Thrift.instantiateJavaObject((JavaObject) result); } return result; }
/** * Create a new topology to be tracked. * @param origTopo the original topology. * @param cluster a cluster that should have been launched with tracking enabled. */ public TrackedTopology(StormTopology origTopo, ILocalCluster cluster) { LOG.warn("CLUSTER {} - {}", cluster, cluster.getTrackedId()); this.cluster = cluster; lastSpoutCommit = new AtomicInteger(0); String id = cluster.getTrackedId(); topology = origTopo.deepCopy(); for (Bolt bolt : topology.get_bolts().values()) { IRichBolt obj = (IRichBolt) Thrift.deserializeComponentObject(bolt.get_bolt_object()); bolt.set_bolt_object(Thrift.serializeComponentObject(new BoltTracker(obj, id))); } for (SpoutSpec spout : topology.get_spouts().values()) { IRichSpout obj = (IRichSpout) Thrift.deserializeComponentObject(spout.get_spout_object()); spout.set_spout_object(Thrift.serializeComponentObject(new SpoutTracker(obj, id))); } }
map((spec) -> Thrift.deserializeComponentObject(spec.get_spout_object())).collect(Collectors.toList());
public Object getFieldValue(_Fields field) { switch (field) { case SPOUT_OBJECT: return get_spout_object(); case COMMON: return get_common(); } throw new IllegalStateException(); }
private void addTopologyInputs(Map<String, SpoutSpec> spouts, Map stormConf, String topologyOwner, AtlasEntity topology, AtlasEntityExtInfo entityExtInfo) { List<AtlasEntity> inputs = new ArrayList<>(); for (Map.Entry<String, SpoutSpec> entry : spouts.entrySet()) { Serializable instance = Utils.javaDeserialize(entry.getValue().get_spout_object().get_serialized_java(), Serializable.class); String dsType = instance.getClass().getSimpleName(); AtlasEntity dsEntity = addDataSet(dsType, topologyOwner, instance, stormConf, entityExtInfo); if (dsEntity != null) { inputs.add(dsEntity); } } topology.setAttribute("inputs", AtlasTypeUtil.getAtlasObjectIds(inputs)); }
private void addTopologyInputs(Map<String, SpoutSpec> spouts, Map stormConf, String topologyOwner, AtlasEntity topology, AtlasEntityExtInfo entityExtInfo) { List<AtlasEntity> inputs = new ArrayList<>(); for (Map.Entry<String, SpoutSpec> entry : spouts.entrySet()) { Serializable instance = Utils.javaDeserialize(entry.getValue().get_spout_object().get_serialized_java(), Serializable.class); String dsType = instance.getClass().getSimpleName(); AtlasEntity dsEntity = addDataSet(dsType, topologyOwner, instance, stormConf, entityExtInfo); if (dsEntity != null) { inputs.add(dsEntity); } } topology.setAttribute("inputs", AtlasTypeUtil.getAtlasObjectIds(inputs)); }
public static Map<String, Map<String, Object>> lag (StormTopology stormTopology, Map topologyConf) { Map<String, Map<String, Object>> result = new HashMap<>(); Map<String, SpoutSpec> spouts = stormTopology.get_spouts(); String className = null; for (Map.Entry<String, SpoutSpec> spout: spouts.entrySet()) { try { SpoutSpec spoutSpec = spout.getValue(); ComponentObject componentObject = spoutSpec.get_spout_object(); // FIXME: yes it's a trick so we might be better to find alternative way... className = getClassNameFromComponentObject(componentObject); logger.debug("spout classname: {}", className); if (className.endsWith("storm.kafka.spout.KafkaSpout")) { result.put(spout.getKey(), getLagResultForNewKafkaSpout(spout.getKey(), spoutSpec, topologyConf)); } else if (className.endsWith("storm.kafka.KafkaSpout")) { result.put(spout.getKey(), getLagResultForOldKafkaSpout(spout.getKey(), spoutSpec, topologyConf)); } } catch (Exception e) { logger.warn("Exception thrown while getting lag for spout id: " + spout.getKey() + " and spout class: " + className); logger.warn("Exception message:" + e.getMessage(), e); } } return result; }
private AtlasEntity createSpoutInstance(String spoutName, SpoutSpec stormSpout) { AtlasEntity spout = new AtlasEntity(StormDataTypes.STORM_SPOUT.getName()); Serializable instance = Utils.javaDeserialize(stormSpout.get_spout_object().get_serialized_java(), Serializable.class); Map<String, String> flatConfigMap = StormTopologyUtil.getFieldValues(instance, true, null); spout.setAttribute(AtlasClient.NAME, spoutName); spout.setAttribute("driverClass", instance.getClass().getName()); spout.setAttribute("conf", flatConfigMap); return spout; }