Attribute.AttributeMap launchAttributes = new Attribute.AttributeMap.DefaultAttributeMap(); launchAttributes.put(EmbeddedAppLauncher.RUN_ASYNC, true); if (options.isEmbeddedExecutionDebugMode()) {
AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap(); attributeMap.put(DAG.APPLICATION_ID, APP_ID);
if (pm.attributes.get(PortContext.TUPLE_CLASS) == null) { throw new ValidationException("Attribute " + PortContext.TUPLE_CLASS.getName() + " missing on port : " + n.name + "." + pm.getPortName());
store.getMetaInstance().flush(); CouchbasePOJOSetOperator outputOperator = new CouchbasePOJOSetOperator(); AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap(); attributeMap.put(DAG.APPLICATION_ID, APP_ID); OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
props.put(VALUE_DESERIALIZER_CLASS_CONFIG, VALUE_DESERIALIZER); Attribute.AttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap(); attributeMap.put(Context.DAGContext.APPLICATION_NAME, "MyKafkaApp"); attributeMap.put(DAG.APPLICATION_PATH, APPLICATION_PATH);
store.setKeyspace(KEYSPACE); AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap(); attributeMap.put(DAG.APPLICATION_ID, APP_ID); OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap); inputOperator.outputPort.setSink(sink); Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap(); portAttributes.put(Context.PortContext.TUPLE_CLASS, TestInputPojo.class); TestPortContext tpc = new TestPortContext(portAttributes);
Attribute.AttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap(); CollectorTestSink<Object> sink = new CollectorTestSink<Object>();
AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap(); attributeMap.put(DAG.APPLICATION_ID, APP_ID); OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap); TestInputOperator inputOperator = new TestInputOperator();
Attribute.AttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap(); attributeMap.put(Context.OperatorContext.SPIN_MILLIS, 500); OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
t.start(); Attribute.AttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap(); attributeMap.put(Context.DAGContext.APPLICATION_PATH, testMeta.baseDir);
new Thread(p).start(); Attribute.AttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap(); attributeMap.put(Context.OperatorContext.SPIN_MILLIS, 500); attributeMap.put(Context.DAGContext.APPLICATION_PATH, testMeta.baseDir);
t2.setRow("row2"); t2.setColValue("tc"); AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap(); attributeMap.put(OperatorContext.PROCESSING_MODE, ProcessingMode.AT_MOST_ONCE); thop.setup(mockOperatorContext(0, attributeMap)); thop.beginWindow(0);
thop.beginWindow(0); thop.input.process(t1); AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap(); attributeMap.put(OperatorContext.PROCESSING_MODE, ProcessingMode.AT_MOST_ONCE); thop.setup(mockOperatorContext(0, attributeMap));
operator.setExchangeType("fanout"); Attribute.AttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap(); CollectorTestSink<Object> sink = new CollectorTestSink<Object>();
t2.setRow("row2"); t2.setColValue("tc"); AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap(); attributeMap.put(OperatorContext.PROCESSING_MODE, ProcessingMode.AT_LEAST_ONCE); thop.setup(mockOperatorContext(0, attributeMap)); thop.beginWindow(0);
expressions.add("getStringVal()"); outputOperator.setExpression(expressions); AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap(); attributeMap.put(OperatorContext.PROCESSING_MODE, ProcessingMode.AT_LEAST_ONCE); attributeMap.put(OperatorContext.ACTIVATION_WINDOW_ID, -1L); attributeMap.put(DAG.APPLICATION_ID, APP_ID); OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributes = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap(); attributes.put(DAG.APPLICATION_ID, appId);
public AppHandle launchApp(StreamingApplication app, Properties configProperties) throws IOException { List<File> jarsToShip = getYarnDeployDependencies(); StringBuilder classpath = new StringBuilder(); for (File path : jarsToShip) { if (path.isDirectory()) { File tmpJar = File.createTempFile("beam-runners-apex-", ".jar"); createJar(path, tmpJar); tmpJar.deleteOnExit(); path = tmpJar; } if (classpath.length() != 0) { classpath.append(':'); } classpath.append(path.getAbsolutePath()); } EmbeddedAppLauncher<?> embeddedLauncher = Launcher.getLauncher(LaunchMode.EMBEDDED); DAG dag = embeddedLauncher.getDAG(); app.populateDAG(dag, new Configuration(false)); Attribute.AttributeMap launchAttributes = new Attribute.AttributeMap.DefaultAttributeMap(); launchAttributes.put(YarnAppLauncher.LIB_JARS, classpath.toString().replace(':', ',')); LaunchParams lp = new LaunchParams(dag, launchAttributes, configProperties); lp.cmd = "hadoop " + ApexYarnLauncher.class.getName(); HashMap<String, String> env = new HashMap<>(); env.put("HADOOP_USER_CLASSPATH_FIRST", "1"); env.put("HADOOP_CLASSPATH", classpath.toString()); lp.env = env; return launchApp(lp); }
private KafkaSinglePortStringInputOperator createOperator(boolean isIdempotency) { Attribute.AttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap(); attributeMap.put(Context.OperatorContext.SPIN_MILLIS, 500); attributeMap.put(Context.DAGContext.APPLICATION_PATH, testMeta.baseDir); testMeta.context = mockOperatorContext(1, attributeMap); testMeta.operator = new KafkaSinglePortStringInputOperator(); KafkaConsumer consumer = new SimpleKafkaConsumer(); consumer.setTopic(TEST_TOPIC); consumer.setInitialOffset("earliest"); if (isIdempotency) { FSWindowDataManager storageManager = new FSWindowDataManager(); storageManager.setStatePath(testMeta.recoveryDir); testMeta.operator.setWindowDataManager(storageManager); } testMeta.operator.setConsumer(consumer); testMeta.operator.setZookeeper("localhost:" + KafkaOperatorTestBase.TEST_ZOOKEEPER_PORT[0]); testMeta.operator.setMaxTuplesPerWindow(500); List<Partitioner.Partition<AbstractKafkaInputOperator<KafkaConsumer>>> partitions = new LinkedList<Partitioner.Partition<AbstractKafkaInputOperator<KafkaConsumer>>>(); Collection<Partitioner.Partition<AbstractKafkaInputOperator<KafkaConsumer>>> newPartitions = testMeta.operator.definePartitions(partitions, new StatelessPartitionerTest.PartitioningContextImpl(null, 0)); Assert.assertEquals(1, newPartitions.size()); KafkaSinglePortStringInputOperator operator = (KafkaSinglePortStringInputOperator)newPartitions.iterator().next().getPartitionedInstance(); testMeta.sink = new CollectorTestSink<Object>(); testMeta.operator.outputPort.setSink(testMeta.sink); operator.outputPort.setSink(testMeta.sink); return operator; }
@Test public void TestSplunkInputOperator() { SplunkStore store = new SplunkStore(); store.setHost(HOST); store.setPassword(PASSWORD); store.setPort(PORT); store.setUserName(USER_NAME); AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap(); attributeMap.put(DAG.APPLICATION_ID, APP_ID); OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap); TestInputOperator inputOperator = new TestInputOperator(); inputOperator.setStore(store); inputOperator.setEarliestTime("-1000h"); inputOperator.setLatestTime("now"); CollectorTestSink<Object> sink = new CollectorTestSink<Object>(); inputOperator.outputPort.setSink(sink); inputOperator.setup(context); inputOperator.beginWindow(0); inputOperator.emitTuples(); inputOperator.endWindow(); Assert.assertEquals("rows from splunk", 100, sink.collectedTuples.size()); }