SpoutDeclarer sd = builder.setSpout(spoutConf.id, new LoadSpout(spoutConf), spoutConf.parallelism); if (spoutConf.memoryLoad > 0) { sd.setMemoryLoad(spoutConf.memoryLoad); sd.setCPULoad(spoutConf.cpuLoad);
private static void buildSpouts(ExecutionContext context, TopologyBuilder builder) throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException, NoSuchFieldException { for (SpoutDef sd : context.getTopologyDef().getSpouts()) { IRichSpout spout = buildSpout(sd, context); SpoutDeclarer declarer = builder.setSpout(sd.getId(), spout, sd.getParallelism()); if (sd.getOnHeapMemoryLoad() > -1) { if (sd.getOffHeapMemoryLoad() > -1) { declarer.setMemoryLoad(sd.getOnHeapMemoryLoad(), sd.getOffHeapMemoryLoad()); } else { declarer.setMemoryLoad(sd.getOnHeapMemoryLoad()); } } if (sd.getCpuLoad() > -1) { declarer.setCPULoad(sd.getCpuLoad()); } if (sd.getNumTasks() > -1) { declarer.setNumTasks(sd.getNumTasks()); } context.addSpout(sd.getId(), spout); } }
builder.setSpout(id, new RichSpoutBatchTriggerer((IRichSpout) c.spout, c.streamName, c.batchGroupId), c.parallelism); d.addConfigurations(c.componentConf); masterCoord.setMemoryLoad(onHeap, offHeap); } else { masterCoord.setMemoryLoad(onHeap); masterCoord.setCPULoad(cpuLoad);
String spoutId = sensorToParserConfigs.size() > 1 ? "kafkaSpout-" + entry.getKey() : "kafkaSpout"; builder.setSpout(spoutId, kafkaSpout, spoutParallelism.get(i)) .setNumTasks(spoutNumTasks.get(i)); spoutIds.add(spoutId); ++i;
, getAdapterName(), getQueueName()) , LGProperties.getInteger("rabbit.spout.threads", 1)) .addConfigurations(spoutConfig.asMap()) .setMaxSpoutPending(LGProperties.getInteger("rabbit.prefetch.messages", 250));
public static StormTopology buildTopology(Map<String, SpoutDetails> spoutMap, Map<String, BoltDetails> boltMap) { TopologyBuilder builder = new TopologyBuilder(); for (Entry<String, SpoutDetails> entry : spoutMap.entrySet()) { String spoutId = entry.getKey(); SpoutDetails spec = entry.getValue(); SpoutDeclarer spoutDeclarer = builder.setSpout(spoutId, spec.getSpout(), spec.getParallelism()); spoutDeclarer.addConfigurations(spec.getConf()); } for (Entry<String, BoltDetails> entry : boltMap.entrySet()) { String spoutId = entry.getKey(); BoltDetails spec = entry.getValue(); BoltDeclarer boltDeclarer = null; if (spec.bolt instanceof IRichBolt) { boltDeclarer = builder.setBolt(spoutId, (IRichBolt) spec.getBolt(), spec.getParallelism()); } else { boltDeclarer = builder.setBolt(spoutId, (IBasicBolt) spec.getBolt(), spec.getParallelism()); } boltDeclarer.addConfigurations(spec.getConf()); addInputs(boltDeclarer, spec.getInputs()); } return builder.createTopology(); }
SpoutDeclarer declarer = builder.setSpout(coordinator, new TransactionalSpoutCoordinator(_spout)); for(Map<String, Object> conf: _spoutConfs) { declarer.addConfigurations(conf); declarer.addConfiguration(Config.TOPOLOGY_TRANSACTIONAL_ID, _id);
private static TopologyBuilder buildTopology() throws Exception { TopologyBuilder builder = new TopologyBuilder(); String topicName = Configuration.getConfig().getString("rtc.mq.spout.topic"); String groupName = Configuration.getConfig().getString("rtc.mq.spout.group"); BrokerHosts hosts = new ZkHosts(Configuration.getConfig().getString("rtc.zk.hosts")); SpoutConfig spoutConfig = new SpoutConfig(hosts, topicName, "/consumers", groupName); spoutConfig.startOffsetTime = kafka.api.OffsetRequest.LatestTime(); spoutConfig.zkServers = Arrays.asList(Configuration.getConfig().getString("rtc.storm.zkServers").split(",")); spoutConfig.zkPort = Configuration.getConfig().getInt("rtc.storm.zkPort"); spoutConfig.scheme = new SchemeAsMultiScheme(new StringScheme()); KafkaSpout kafkaSpout = new KafkaSpout(spoutConfig); builder.setSpout("MQSpout", kafkaSpout, Configuration.getConfig().getInt("rtc.storm.spout.parallelismHint")).setNumTasks(Configuration.getConfig().getInt("rtc.storm.spout.task")); builder.setBolt("ExtractBolt", new ExtractBolt(), Configuration.getConfig().getInt("rtc.storm.extract.bolt.parallelismHint")).setNumTasks(Configuration.getConfig().getInt("rtc.storm.extract.bolt.task")).shuffleGrouping("MQSpout"); builder.setBolt("Statistic", new StatisticBolt(), Configuration.getConfig().getInt("rtc.storm.statistic.bolt.parallelismHint")).setNumTasks(Configuration.getConfig().getInt("rtc.storm.statistic.bolt.task")).fieldsGrouping("ExtractBolt", new Fields(new String[]{"hashKeys"})); // builder.setBolt("Alarm", new AlarmBolt(), Configuration.getConfig().getInt("rtc.storm.alarm.bolt.parallelismHint")).setNumTasks(Configuration.getConfig().getInt("rtc.storm.alarm.bolt.task")).fieldsGrouping("Statistic", new Fields(new String[]{"EventName"})); return builder; }
.addConfigurations(spoutConfig.asMap()) .setMaxSpoutPending(LGProperties.getInteger("rabbit.prefetch.messages", 250)); .addConfigurations(spoutConfigCommand.asMap()) .setMaxSpoutPending(LGProperties.getInteger("rabbit.prefetch.messages", 250));
spoutDeclarer.setMemoryLoad(onHeap, offHeap); } else { spoutDeclarer.setMemoryLoad(onHeap); spoutDeclarer.setCPULoad(cpuLoad);
private static void buildSpouts(ExecutionContext context, TopologyBuilder builder) throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException, NoSuchFieldException { for (SpoutDef sd : context.getTopologyDef().getSpouts()) { IRichSpout spout = buildSpout(sd, context); SpoutDeclarer declarer = builder.setSpout(sd.getId(), spout, sd.getParallelism()); if (sd.getOnHeapMemoryLoad() > -1) { if (sd.getOffHeapMemoryLoad() > -1) { declarer.setMemoryLoad(sd.getOnHeapMemoryLoad(), sd.getOffHeapMemoryLoad()); } else { declarer.setMemoryLoad(sd.getOnHeapMemoryLoad()); } } if (sd.getCpuLoad() > -1) { declarer.setCPULoad(sd.getCpuLoad()); } if (sd.getNumTasks() > -1) { declarer.setNumTasks(sd.getNumTasks()); } context.addSpout(sd.getId(), spout); } }
d.addConfigurations(conf); masterCoord.setMemoryLoad(onHeap, offHeap); masterCoord.setMemoryLoad(onHeap); masterCoord.setCPULoad(cpuLoad);
private static TopologyBuilder buildTopology() throws Exception { TopologyBuilder builder = new TopologyBuilder(); String topicName = Configuration.getConfig().getString("rtc.mq.spout.topic"); String groupName = Configuration.getConfig().getString("rtc.mq.spout.group"); BrokerHosts hosts = new ZkHosts(Configuration.getConfig().getString("rtc.zk.hosts")); SpoutConfig spoutConfig = new SpoutConfig(hosts, topicName, "/consumers", groupName); spoutConfig.startOffsetTime = kafka.api.OffsetRequest.LatestTime(); spoutConfig.zkServers = Arrays.asList(Configuration.getConfig().getString("rtc.storm.zkServers").split(",")); spoutConfig.zkPort = Configuration.getConfig().getInt("rtc.storm.zkPort"); spoutConfig.scheme = new SchemeAsMultiScheme(new StringScheme()); KafkaSpout kafkaSpout = new KafkaSpout(spoutConfig); builder.setSpout("MQSpout", kafkaSpout, Configuration.getConfig().getInt("rtc.storm.spout.parallelismHint")).setNumTasks(Configuration.getConfig().getInt("rtc.storm.spout.task")); builder.setBolt("ExtractBolt", new ExtractBolt(), Configuration.getConfig().getInt("rtc.storm.extract.bolt.parallelismHint")).setNumTasks(Configuration.getConfig().getInt("rtc.storm.extract.bolt.task")).shuffleGrouping("MQSpout"); builder.setBolt("Statistic", new StatisticBolt(), Configuration.getConfig().getInt("rtc.storm.statistic.bolt.parallelismHint")).setNumTasks(Configuration.getConfig().getInt("rtc.storm.statistic.bolt.task")).fieldsGrouping("ExtractBolt", new Fields(new String[]{"hashKeys"})); // builder.setBolt("Alarm", new AlarmBolt(), Configuration.getConfig().getInt("rtc.storm.alarm.bolt.parallelismHint")).setNumTasks(Configuration.getConfig().getInt("rtc.storm.alarm.bolt.task")).fieldsGrouping("Statistic", new Fields(new String[]{"EventName"})); return builder; }
builder1.setSpout("wordSpout", new TestWordSpout(), 2).setCPULoad(250.0).setMemoryLoad(1000.0, 200.0); builder1.setBolt("wordCountBolt", new TestWordCounter(), 1).shuffleGrouping("wordSpout").setCPULoad(100.0) .setMemoryLoad(500.0, 100.0);
public static int run(String[] args) throws Exception { CommandLineParser parser = new BasicParser(); Options options = getCommonRequiredOptions(); CommandLine cmd = parser.parse( options, args); Map<String, String> dsConf = getAccumuloDataStoreConf(cmd); String featureName = cmd.getOptionValue(FEATURE_NAME); SimpleFeatureType featureType = DataUtilities.createType(featureName, "geom:Point:srid=4326"); DataStore ds = DataStoreFinder.getDataStore(dsConf); ds.createSchema(featureType); TopologyBuilder topologyBuilder = new TopologyBuilder(); String topic = cmd.getOptionValue(TOPIC); String groupId = topic; dsConf.put(OSMIngest.FEATURE_NAME, featureName); OSMKafkaSpout OSMKafkaSpout = new OSMKafkaSpout(dsConf, groupId, topic); topologyBuilder.setSpout("Spout", OSMKafkaSpout, 10).setNumTasks(10); OSMKafkaBolt OSMKafkaBolt = new OSMKafkaBolt(dsConf, groupId, topic); topologyBuilder.setBolt("Bolt", OSMKafkaBolt, 20).shuffleGrouping("Spout"); Config stormConf = new Config(); stormConf.setNumWorkers(10); stormConf.setDebug(true); StormSubmitter.submitTopology(topic, stormConf, topologyBuilder.createTopology()); return 0; } }
builder1.setSpout("wordSpout1", new TestWordSpout(), 1).setCPULoad(300.0).setMemoryLoad(2000.0, 48.0); StormTopology stormTopology1 = builder1.createTopology(); Config config1 = new Config(); builder2.setSpout("wordSpout2", new TestWordSpout(), 4).setCPULoad(100.0).setMemoryLoad(500.0, 12.0); StormTopology stormTopology2 = builder2.createTopology(); Config config2 = new Config(); builder3.setSpout("wordSpout3", new TestWordSpout(), 4).setCPULoad(20.0).setMemoryLoad(200.0, 56.0); StormTopology stormTopology3 = builder3.createTopology(); Config config3 = new Config(); builder4.setSpout("wordSpout4", new TestWordSpout(), 12).setCPULoad(30.0).setMemoryLoad(100.0, 0.0); StormTopology stormTopology4 = builder4.createTopology(); Config config4 = new Config(); builder5.setSpout("wordSpout5", new TestWordSpout(), 40).setCPULoad(25.0).setMemoryLoad(100.0, 28.0); StormTopology stormTopology5 = builder5.createTopology(); Config config5 = new Config();
SpoutDeclarer spout = builder.setSpout("word", new TestWordSpout(), 10).setCPULoad(20); spout.setMemoryLoad(64, 16);
builder1.setSpout("wordSpout", new TestWordSpout(), 1).setCPULoad(20.0).setMemoryLoad(200.0); builder1.setBolt("wordCountBolt", new TestWordCounter(), 1).shuffleGrouping("wordSpout").setCPULoad(20.0).setMemoryLoad(200.0); StormTopology stormTopology1 = builder1.createTopology();
/** * This submits a topology after loading the given spout with the given configuration as the source of * {@link com.yahoo.bullet.record.BulletRecord} using the given {@link TopologyBuilder}. * * @param builder The {@link TopologyBuilder} to use to add the topology to. * @param spout The name of the instance of an {@link org.apache.storm.topology.IRichSpout} to load. * @param args The arguments to pass to the constructor of this spout (otherwise the default constructor is used). * @param config The Storm settings for this Bullet topology. * @param parallelism The parallelism of the spout component. * @param cpuLoad The CPU load for the Storm RAS scheduler. * @param onHeapMemoryLoad The on heap memory load for the Storm RAS scheduler. * @param offHeapMemoryLoad The off heap memory load for the Storm RAS scheduler. * @throws Exception if there were issues creating the topology. */ public static void submit(TopologyBuilder builder, String spout, List<String> args, BulletStormConfig config, Number parallelism, Number cpuLoad, Number onHeapMemoryLoad, Number offHeapMemoryLoad) throws Exception { builder.setSpout(TopologyConstants.RECORD_COMPONENT, ReflectionUtils.getSpout(spout, args), parallelism) .setCPULoad(cpuLoad).setMemoryLoad(onHeapMemoryLoad, offHeapMemoryLoad); log.info("Added spout {} with Parallelism {}, CPU load {}, On-heap memory {}, Off-heap memory {}", spout, parallelism, cpuLoad, onHeapMemoryLoad, offHeapMemoryLoad); submit(config, TopologyConstants.RECORD_COMPONENT, builder); }
private static void addBulletSpout(BulletStormConfig config, TopologyBuilder builder) throws Exception { String bulletSpoutClassName = config.getAs(BulletStormConfig.BULLET_SPOUT_CLASS_NAME, String.class); List<String> bulletSpoutArgs = config.getAs(BulletStormConfig.BULLET_SPOUT_ARGS, List.class); Number bulletSpoutParallelism = config.getAs(BulletStormConfig.BULLET_SPOUT_PARALLELISM, Number.class); Number bulletSpoutCPULoad = config.getAs(BulletStormConfig.BULLET_SPOUT_CPU_LOAD, Number.class); Number bulletSpoutMemoryOnHeapLoad = config.getAs(BulletStormConfig.BULLET_SPOUT_MEMORY_ON_HEAP_LOAD, Number.class); Number bulletSpoutMemoryOffHeapLoad = config.getAs(BulletStormConfig.BULLET_SPOUT_MEMORY_OFF_HEAP_LOAD, Number.class); builder.setSpout(TopologyConstants.RECORD_COMPONENT, ReflectionUtils.getSpout(bulletSpoutClassName, bulletSpoutArgs), bulletSpoutParallelism) .setCPULoad(bulletSpoutCPULoad) .setMemoryLoad(bulletSpoutMemoryOnHeapLoad, bulletSpoutMemoryOffHeapLoad); log.info("Added spout with Parallelism {}, CPU load {}, On-heap memory {}, Off-heap memory {}", bulletSpoutParallelism, bulletSpoutCPULoad, bulletSpoutMemoryOnHeapLoad, bulletSpoutMemoryOffHeapLoad); }