/** * A thin wrapper layer over {@link DataStream#writeAsText(java.lang.String, WriteMode)}. * * @param path The path pointing to the location the text file is written to * @param mode Controls the behavior for existing files. Options are * NO_OVERWRITE and OVERWRITE. */ @PublicEvolving public void write_as_text(String path, WriteMode mode) { stream.writeAsText(path, mode); }
/** * A thin wrapper layer over {@link DataStream#writeAsText(java.lang.String)}. * * @param path The path pointing to the location the text file is written to. */ @PublicEvolving public void write_as_text(String path) { stream.writeAsText(path); }
aggregated.writeAsText(params.get("output")); } else { System.out.println("Printing result to stdout. Use --output to specify output path.");
} else { alerts .writeAsText(outputFile, FileSystem.WriteMode.OVERWRITE) .setParallelism(1);
counts.writeAsText(params.get("output")); } else { System.out.println("Printing result to stdout. Use --output to specify output path.");
.writeAsText(resultPath, FileSystem.WriteMode.OVERWRITE);
counts.writeAsText(params.get("output")); } else { System.out.println("Printing result to stdout. Use --output to specify output path.");
tweets.writeAsText(params.get("output")); } else { System.out.println("Printing result to stdout. Use --output to specify output path.");
counts.writeAsText(params.get("output")); rejectedWords.writeAsText(params.get("rejected-words-output")); } else { System.out.println("Printing result to stdout. Use --output to specify output path.");
public static void main(String[] args) throws Exception { // Checking input parameters final ParameterTool params = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); DataStream<Integer> trainingData = env.addSource(new FiniteTrainingDataSource()); DataStream<Integer> newData = env.addSource(new FiniteNewDataSource()); // build new model on every second of new data DataStream<Double[]> model = trainingData .assignTimestampsAndWatermarks(new LinearTimestamp()) .timeWindowAll(Time.of(5000, TimeUnit.MILLISECONDS)) .apply(new PartialModelBuilder()); // use partial model for newData DataStream<Integer> prediction = newData.connect(model).map(new Predictor()); // emit result if (params.has("output")) { prediction.writeAsText(params.get("output")); } else { System.out.println("Printing result to stdout. Use --output to specify output path."); prediction.print(); } // execute program env.execute("Streaming Incremental Learning"); }
numbers.writeAsText(params.get("output")); } else { System.out.println("Printing result to stdout. Use --output to specify output path.");
@Test public void testProgram() throws Exception { String resultPath = getTempDirPath("result"); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<String> text = env.fromElements(WordCountData.TEXT); DataStream<Tuple2<String, Integer>> counts = text .flatMap(new Tokenizer()) .keyBy(0).sum(1); counts.writeAsText(resultPath); env.execute("WriteAsTextTest"); compareResultsByLinesInMemory(WordCountData.STREAMING_COUNTS_AS_TUPLES, resultPath); }
topSpeeds.writeAsText(params.get("output")); } else { System.out.println("Printing result to stdout. Use --output to specify output path.");
public static void main(String[] args) throws Exception { ParameterTool params = ParameterTool.fromArgs(args); String outputPath = params.getRequired("outputPath"); int recordsPerSecond = params.getInt("recordsPerSecond", 10); int duration = params.getInt("durationInSecond", 60); int offset = params.getInt("offsetInSecond", 0); StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment(); sEnv.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); sEnv.enableCheckpointing(4000); sEnv.getConfig().setAutoWatermarkInterval(1000); // execute a simple pass through program. PeriodicSourceGenerator generator = new PeriodicSourceGenerator( recordsPerSecond, duration, offset); DataStream<Tuple> rows = sEnv.addSource(generator); DataStream<Tuple> result = rows .keyBy(1) .timeWindow(Time.seconds(5)) .sum(0); result.writeAsText(outputPath + "/result.txt", FileSystem.WriteMode.OVERWRITE) .setParallelism(1); sEnv.execute(); }
@Override protected void testProgram() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<Edge<Long, NullValue>> edges = getGraphStream(env); GraphStream<Long, NullValue, NullValue> graph = new SimpleEdgeStream<>(edges, env); DataStream<DisjointSet<Long>> cc = graph.aggregate(new ConnectedComponents<Long, NullValue>(5)); cc.writeAsText(resultPath); env.execute("Streaming Connected ComponentsCheck"); } }
@Override protected void testProgram() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<Edge<Long, NullValue>> edges = getGraphStream(env); GraphStream<Long, NullValue, NullValue> graph = new SimpleEdgeStream<>(edges, env); DataStream<Candidates> cc = graph.aggregate(new BipartitenessCheck<Long, NullValue>((long) 500)); cc.writeAsText(resultPath); env.execute("Bipartiteness check"); } }
@Override protected void testProgram() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<Edge<Long, NullValue>> edges = getGraphStream(env); GraphStream<Long, NullValue, NullValue> graph = new SimpleEdgeStream<>(edges, env); DataStream<Candidates> cc = graph.aggregate(new BipartitenessCheck<Long, NullValue>((long) 500)); cc.writeAsText(resultPath); env.execute("Non Bipartiteness check"); } }
@Test public void testUnboundedPrimitiveTypeSourceAndReturnTuple() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<String> input = env.addSource(new RandomWordSource(5).closeDelay(1500)); DataStream<Tuple1<String>> output = SiddhiCEP .define("wordStream", input, "words") .cql("from wordStream select words insert into outputStream") .returns("outputStream"); String resultPath = tempFolder.newFile().toURI().toString(); output.writeAsText(resultPath, FileSystem.WriteMode.OVERWRITE); env.execute(); assertEquals(5, getLineCount(resultPath)); }
@Test public void testUnboundedTupleSourceAndReturnTuple() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<Tuple4<Integer, String, Double, Long>> input = env .addSource(new RandomTupleSource(5).closeDelay(1500)).keyBy(1); DataStream<Tuple4<Long, Integer, String, Double>> output = SiddhiCEP .define("inputStream", input, "id", "name", "price", "timestamp") .cql("from inputStream select timestamp, id, name, price insert into outputStream") .returns("outputStream"); String resultPath = tempFolder.newFile().toURI().toString(); output.writeAsText(resultPath, FileSystem.WriteMode.OVERWRITE); env.execute(); assertEquals(5, getLineCount(resultPath)); }
@Test public void testCustomizeSiddhiFunctionExtension() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<Event> input = env.addSource(new RandomEventSource(5)); SiddhiCEP cep = SiddhiCEP.getSiddhiEnvironment(env); cep.registerExtension("custom:plus", CustomPlusFunctionExtension.class); DataStream<Map<String, Object>> output = cep .from("inputStream", input, "id", "name", "price", "timestamp") .cql("from inputStream select timestamp, id, name, custom:plus(price,price) as doubled_price insert into outputStream") .returnAsMap("outputStream"); String resultPath = tempFolder.newFile().toURI().toString(); output.writeAsText(resultPath, FileSystem.WriteMode.OVERWRITE); env.execute(); assertEquals(5, getLineCount(resultPath)); }