@Override public void configure() throws Exception { final String kafkaUri = kafkaConnectionInformation.kafkaUri(); from("direct:start").process(new Processor() { @Override public void process(Exchange exchange) throws Exception { final INDArray arr = (INDArray) exchange.getIn().getBody(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(bos); Nd4j.write(arr, dos); byte[] bytes = bos.toByteArray(); String base64 = Base64.encodeBase64String(bytes); exchange.getIn().setBody(base64, String.class); String id = UUID.randomUUID().toString(); exchange.getIn().setHeader(KafkaConstants.KEY, id); exchange.getIn().setHeader(KafkaConstants.PARTITION_KEY, id); } }).to(kafkaUri); from(kafkaUri).process(new Processor() { @Override public void process(Exchange exchange) throws Exception { byte[] body2 = (byte[]) exchange.getIn().getBody(); String body = new String(body2); INDArray arr = Nd4jBase64.fromBase64(body); exchange.getIn().setBody(arr); } }).to("direct:receive"); } }
public void configure() throws Exception { errorHandler(deadLetterChannel("seda:errors")); // We pool the atom feeds from the source for further processing in the seda queue // we set the delay to 1 second for each pool. // Using splitEntries=true will during polling only fetch one RSS Entry at any given time. from("rss:" + VERTX_BLOG_ATOM + "?splitEntries=true&consumer.delay=100").to("seda:feeds"); from("seda:feeds") // Filter .filter().method("filterService", "isRelease") // Transform (extract) .transform(simple("${body.entries[0].title}")) // Output .to("seda:announce"); } };
@Override public void configure() throws Exception { from("file://target/inbox") .to("file://target/outbox"); }
@Override public void configure(){ StringBuilder sb = new StringBuilder(); sb.append("netty:").append(serverUri); sb.append(serverUri.contains("?")? '&' : '?'); sb.append("sync=").append(syncFlag); sb.append("&decoders=#").append(xmlFrameDecoderName) .append(",#").append(xmlStringDecoderName) .append("&encoders=#").append(stringEncoderName); from(sb.toString()).to(toUri); } });
@Override public void configure() throws Exception { from("file:target/inbox") .to("file:target/foo"); }
@Override public void configure() throws Exception { from("timer:foo?period=2000") // need to use the IP address of the wildfly-swarm application .to("http4://{{swarm.ip}}:8080") .log("${body}"); } }
@Override public void configure() throws Exception { from(input) // lookup bean with name helloBean .bean("helloBean") .to(output); }
public void configure() throws Exception { from("direct:test-with-session").to("kie-local://ksession1"); from("direct:test-no-session").to("kie-local://dynamic"); from("direct:test-with-session-withHeader").to("kie-local://ksession1").to("mock:resultWithHeader"); from("direct:test-no-session-withHeader").to("kie-local://dynamic").to("mock:resultWithHeader"); } };
public void configure() { from("direct:start") .to("sql-stored-connector:DEMO_ADD( INTEGER ${body[a]}, INTEGER ${body[b]}, OUT INTEGER c)"); } };
public void configure() { // set up the transform bean MyTransform transform = new MyTransform(); transform.setPrefix("JavaDSL"); from("timer://javaTimer?fixedRate=true&period=2000") .bean(transform, "transform") .to("log:ExampleRouter"); }
@Override public void configure() throws Exception { from("ref:sampleSql") .setBody(constant("SELECT SYSDATE FROM DUAL")) .to("jdbc:hades_PC1_PC2") .split(body()) .log(LoggingLevel.INFO, SampleSqlRouteBuilder.class.getName(), "date: ${body[SYSDATE]}"); } }
protected void errorHandler() { from("direct:error_" + messageBusDestination) .process(new ErrorProcessor()) .setHeader("responseId", property("correlationId")) .to("liferay:" + DestinationNames.MESSAGE_BUS_DEFAULT_RESPONSE); }
@Override public void configure() throws Exception { JacksonDataFormat configFormat = new JacksonDataFormat(MHWWorkerConfig.class); from("file://"+lmhRoot+"?fileName=worker.cfg&delay=10000&noop=true") .unmarshal(configFormat) .bean(method(this, "setConfig")); from("direct:saveConfig") .marshal(configFormat) .to("file://"+lmhRoot+"?fileName=worker.cfg"); }