/** * Creates and returns a Jet job based on the supplied DAG. Jet will * asynchronously start executing the job. */ @Nonnull default Job newJob(@Nonnull DAG dag) { return newJob(dag, new JobConfig()); }
/** * Creates and returns an executable job based on the supplied pipeline. * Jet will asynchronously start executing the job. */ @Nonnull default Job newJob(@Nonnull Pipeline pipeline) { return newJob(pipeline.toDag()); }
private long measure() throws InterruptedException, ExecutionException { System.out.print("\nCounting words... "); final Map<String, Long> counts = new ConcurrentHashMap<>(); final Job job = jet.newJob(buildDag(counts)); long start = System.nanoTime(); job.join(); final long took = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start); System.out.print("done in " + took + " milliseconds."); // printResults(counts); return took; }
public void go() { System.out.print("\nStarting up... "); long start = System.nanoTime(); Pipeline p = buildPipeline(); jet.newJob(p).join(); System.out.println("done in " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start) + " milliseconds."); }
private void buildInvertedIndex() { Job job = jet.newJob(createPipeline()); long start = System.nanoTime(); job.join(); System.out.println("Indexing took " + NANOSECONDS.toMillis(System.nanoTime() - start) + " milliseconds."); }
private void buildInvertedIndex() { Job job = jet.newJob(createDag()); long start = System.nanoTime(); job.join(); System.out.println("Indexing took " + NANOSECONDS.toMillis(System.nanoTime() - start) + " milliseconds."); }
private static void runPipeline(JetInstance jet) { System.out.println("\nRunning the pipeline "); Pipeline p = buildPipeline(); jet.newJob(p).join(); } }
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); JetInstance jet = Jet.newJetInstance(); Jet.newJetInstance(); try { jet.newJob(buildPipeline()); Thread.sleep(JOB_DURATION_MS); } finally { Jet.shutdownAll(); } }
public static void main(String[] args) { JetInstance jet = Jet.newJetInstance(); Pipeline p = buildPipeline(); System.out.println("Generating model..."); try { jet.newJob(p).join(); printTransitionsAndMarkovChain(jet); } finally { Jet.shutdownAll(); } }
private void go() { try { setup(); jet.newJob(buildPipeline()).join(); } finally { Jet.shutdownAll(); } }
private void go() throws Exception { try { setup(); Pipeline p = buildPipeline(connectionUrl()); jet.newJob(p).join(); jet.getMap(MAP_NAME).values().forEach(System.out::println); } finally { cleanup(); } }
private void go() throws Exception { try { createAvroFile(); JetInstance jet = Jet.newJetInstance(); JobConf jobConfig = createJobConfig(); jet.newJob(buildPipeline(jobConfig)).join(); } finally { Jet.shutdownAll(); } }
private void go() throws Exception { try { setup(); Pipeline p = buildPipeline(connectionUrl()); jet.newJob(p).join(); printTable(); } finally { cleanup(); } }
public static void main(String[] args) { JetInstance jet = Jet.newJetInstance(); Pipeline pipeline = buildPipeline(); addListener(jet.getMap(TAKE_OFF_MAP), a -> System.out.println("New aircraft taking off: " + a)); addListener(jet.getMap(LANDING_MAP), a -> System.out.println("New aircraft landing " + a)); try { Job job = jet.newJob(pipeline, new JobConfig().setName("FlightTelemetry").setProcessingGuarantee(ProcessingGuarantee.EXACTLY_ONCE)); job.join(); } finally { Jet.shutdownAll(); } }
@RequestMapping("/submitJob") public void submitJob() { Pipeline pipeline = Pipeline.create(); pipeline.drawFrom(CustomSourceP.customSource()) .drainTo(Sinks.logger()); JobConfig jobConfig = new JobConfig() .addClass(SpringBootSample.class) .addClass(CustomSourceP.class); instance.newJob(pipeline, jobConfig).join(); }
public static CompletableFuture<Void> copyMapUsingJob(JetInstance instance, int queueSize, String sourceMap, String targetMap) { DAG dag = new DAG(); Vertex source = dag.newVertex("readMap(" + sourceMap + ')', readMapP(sourceMap)); Vertex sink = dag.newVertex("writeMap(" + targetMap + ')', writeMapP(targetMap)); dag.edge(between(source, sink).setConfig(new EdgeConfig().setQueueSize(queueSize))); JobConfig jobConfig = new JobConfig() .setName("copy-" + sourceMap + "-to-" + targetMap); return instance.newJob(dag, jobConfig).getFuture(); } }
private Job newJob() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.mapJournal(SOURCE_NAME, START_FROM_OLDEST)) .withoutTimestamps() .drainTo(Sinks.list(SINK_NAME)); return jet.newJob(p, new JobConfig().setName("job-infinite-pipeline")); }
@Test(timeout = 20000) public void test() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.batchFromProcessor("source", preferLocalParallelismOne(CustomSourceP::new))) .drainTo(Sinks.fromProcessor("sink", preferLocalParallelismOne(CustomSinkP::new))); jetInstance.newJob(p).join(); }