public BatchBoltExecutor(IBatchBolt bolt) { _boltSer = Utils.javaSerialize(bolt); }
/** * Add a new worker lifecycle hook * * @param workerHook the lifecycle hook to add */ public void addWorkerHook(IWorkerHook workerHook) { if (null == workerHook) { throw new IllegalArgumentException("WorkerHook must not be null."); } _workerHooks.add(ByteBuffer.wrap(Utils.javaSerialize(workerHook))); }
public static Grouping prepareCustomStreamGrouping(Object obj) { return Grouping.custom_serialized(Utils.javaSerialize(obj)); }
public static ComponentObject serializeComponentObject(Object obj) { return ComponentObject.serialized_java(Utils.javaSerialize(obj)); }
private void setUpNimbusInfo(List<ACL> acls) { String leaderInfoPath = conf.get(Config.STORM_ZOOKEEPER_ROOT) + ClusterUtils.LEADERINFO_SUBTREE; NimbusInfo nimbusInfo = NimbusInfo.fromConf(conf); if (ClientZookeeper.existsNode(zk, leaderInfoPath, false)) { ClientZookeeper.setData(zk, leaderInfoPath, Utils.javaSerialize(nimbusInfo)); } else { ClientZookeeper.createNode(zk, leaderInfoPath, Utils.javaSerialize(nimbusInfo), CreateMode.PERSISTENT, acls); } }
/** * ## Repartitioning Operation * * @param partitioner * @return */ public Stream partition(CustomStreamGrouping partitioner) { return partition(Grouping.custom_serialized(Utils.javaSerialize(partitioner))); }
@Override public BoltDeclarer customGrouping(String componentId, String streamId, CustomStreamGrouping grouping) { return grouping(componentId, streamId, Grouping.custom_serialized(Utils.javaSerialize(grouping))); }
public static SpoutSpec prepareSerializedSpoutDetails(IRichSpout spout, Map<String, StreamInfo> outputs) { return new SpoutSpec(ComponentObject.serialized_java (Utils.javaSerialize(spout)), prepareComponentCommon(new HashMap<>(), outputs, null, null)); }
public static Bolt prepareSerializedBoltDetails(Map<GlobalStreamId, Grouping> inputs, IBolt bolt, Map<String, StreamInfo> outputs, Integer parallelismHint, Map<String, Object> conf) { ComponentCommon common = prepareComponentCommon(inputs, outputs, parallelismHint, conf); return new Bolt(ComponentObject.serialized_java(Utils.javaSerialize(bolt)), common); }
@Test public void classIsSerializable() throws Exception { Utils.javaSerialize(targetSelector); }
try { maybeAddCheckpointInputs(common); boltSpecs.put(boltId, new Bolt(ComponentObject.serialized_java(Utils.javaSerialize(bolt)), common)); } catch (RuntimeException wrapperCause) { if (wrapperCause.getCause() != null && NotSerializableException.class.equals(wrapperCause.getCause().getClass())) { ComponentCommon common = getComponentCommon(spoutId, spout); try { spoutSpecs.put(spoutId, new SpoutSpec(ComponentObject.serialized_java(Utils.javaSerialize(spout)), common)); } catch (RuntimeException wrapperCause) { if (wrapperCause.getCause() != null && NotSerializableException.class.equals(wrapperCause.getCause().getClass())) {
private static PartitionNode makeIdentityPartition(Node basis) { return new PartitionNode(basis.streamId, basis.name, basis.allOutputFields, Grouping.custom_serialized(Utils.javaSerialize(new IdentityGrouping()))); }
@Test public void classIsSerializable() throws Exception { PartialKeyGrouping.AssignmentCreator assignmentCreator = new PartialKeyGrouping.RandomTwoTaskAssignmentCreator(); Utils.javaSerialize(assignmentCreator); }
@Test public void testGroupingIsSerializable() throws Exception { PartialKeyGrouping grouping = new PartialKeyGrouping(new Fields("some_field")); Utils.javaSerialize(grouping); }
private StormTopology genereateStormTopology(boolean withWorkerHook) { ImmutableMap<String, SpoutSpec> spouts = ImmutableMap.of("spout-1", new SpoutSpec()); ImmutableMap<String, Bolt> bolts = ImmutableMap.of("bolt-1", new Bolt()); ImmutableMap<String, StateSpoutSpec> state_spouts = ImmutableMap.of(); StormTopology stormTopology = new StormTopology(spouts, bolts, state_spouts); if (withWorkerHook) { BaseWorkerHook workerHook = new BaseWorkerHook(); stormTopology.add_to_worker_hooks(ByteBuffer.wrap(Utils.javaSerialize(workerHook))); } return stormTopology; } }
@Test public void testServerExistsTrue() { String path = "/exists_path"; String dataString = "pulse data"; HBPulse hbPulse = new HBPulse(); hbPulse.set_id(path); hbPulse.set_details(Utils.javaSerialize(dataString)); messageWithRandId(HBServerMessageType.SEND_PULSE, HBMessageData.pulse(hbPulse)); handler.handleMessage(hbMessage, true); messageWithRandId(HBServerMessageType.EXISTS, HBMessageData.path(path)); HBMessage badResponse = handler.handleMessage(hbMessage, false); HBMessage goodResponse = handler.handleMessage(hbMessage, true); Assert.assertEquals(mid, badResponse.get_message_id()); Assert.assertEquals(HBServerMessageType.NOT_AUTHORIZED, badResponse.get_type()); Assert.assertEquals(mid, goodResponse.get_message_id()); Assert.assertEquals(HBServerMessageType.EXISTS_RESPONSE, goodResponse.get_type()); Assert.assertTrue(goodResponse.get_data().get_boolval()); }
/** * Add a new worker lifecycle hook * * @param workerHook the lifecycle hook to add */ public void addWorkerHook(IWorkerHook workerHook) { if(null == workerHook) { throw new IllegalArgumentException("WorkerHook must not be null."); } _workerHooks.add(ByteBuffer.wrap(Utils.javaSerialize(workerHook))); }
/** * ## Repartitioning Operation * * @param partitioner * @return */ public Stream partition(CustomStreamGrouping partitioner) { return partition(Grouping.custom_serialized(Utils.javaSerialize(partitioner))); }
@Override public BoltDeclarer customGrouping(String componentId, String streamId, CustomStreamGrouping grouping) { return grouping(componentId, streamId, Grouping.custom_serialized(Utils.javaSerialize(grouping))); }
private static PartitionNode makeIdentityPartition(Node basis) { return new PartitionNode(basis.streamId, basis.name, basis.allOutputFields, Grouping.custom_serialized(Utils.javaSerialize(new IdentityGrouping()))); }