/** * Get the configuration of the verticle. * <p> * This can be specified when the verticle is deployed. * @return the configuration */ public JsonObject config() { return context.config(); }
@Override public void start() throws Exception { JsonObject config = Vertx.currentContext().config();
@Override public void start() throws Exception { // If a config file is set, read the host and port. String host = Vertx.currentContext().config().getString("host"); if (host == null) { host = "127.0.0.1"; } // Create the redis client final RedisClient client = RedisClient.create(vertx, new RedisOptions().setHost(host)); client.set("key", "value", r -> { if (r.succeeded()) { System.out.println("key stored"); client.get("key", s -> { System.out.println("Retrieved value: " + s.result()); }); } else { System.out.println("Connection or Operation Failed " + r.cause()); } }); } }
@Override public void start() throws Exception { processArgs = context.processArgs(); conf = context.config(); // if (Thread.currentThread().getContextClassLoader() != getClass().getClassLoader()) { // throw new IllegalStateException("Wrong tccl!"); // } vertx.eventBus().send("testcounts", new JsonObject().put("deploymentID", context.deploymentID()).put("count", instanceCount.incrementAndGet())); }
@Override public void start() throws Exception { switch (startAction) { case THROW_EXCEPTION: throw new Exception("FooBar!"); case THROW_ERROR: throw new Error("FooBar!"); default: startCalled = true; startContext = Vertx.currentContext(); } deploymentID = Vertx.currentContext().deploymentID(); config = context.config(); }
/** * Get the configuration of the verticle. * <p> * This can be specified when the verticle is deployed. * @return the configuration */ public JsonObject config() { return context.config(); }
@Override public void init(Vertx vertx, Context context) { super.init(vertx, context); this.endpoint = (Endpoint) context.config().getValue(AbstractTransport.ENDPOINT_KEY); this.endpointObject = (URIEndpointObject) this.endpoint.getAddress(); }
@Override public void init(Vertx vertx, Context context) { super.init(vertx, context); this.endpoint = (Endpoint) context.config().getValue(AbstractTransport.ENDPOINT_KEY); this.endpointObject = (URIEndpointObject) endpoint.getAddress(); }
@Override public void start() throws Exception { processArgs = context.processArgs(); conf = context.config(); // if (Thread.currentThread().getContextClassLoader() != getClass().getClassLoader()) { // throw new IllegalStateException("Wrong tccl!"); // } vertx.eventBus().send("testcounts", new JsonObject().put("deploymentID", context.deploymentID()).put("count", instanceCount.incrementAndGet())); }
@Override public void start() throws Exception { switch (startAction) { case THROW_EXCEPTION: throw new Exception("FooBar!"); case THROW_ERROR: throw new Error("FooBar!"); default: startCalled = true; startContext = Vertx.currentContext(); } deploymentID = Vertx.currentContext().deploymentID(); config = context.config(); }
/** * If the context is associated with a Verticle deployment, this returns the configuration that was specified when * the verticle was deployed. * @return the configuration of the deployment or null if not a Verticle deployment */ public JsonObject config() { JsonObject ret = delegate.config(); return ret; }
@Inject public JsonConfigObjectMapperConfigurator(Vertx vertx) { config = vertx.getOrCreateContext().config(); config = config.getJsonObject("jersey", config).getJsonObject("jackson", config); }
@Override public void init(Vertx vertx, Context context) { super.init(vertx, context); this.endpoint = (Endpoint) context.config().getValue(AbstractTransport.ENDPOINT_KEY); this.endpointObject = (URIEndpointObject) endpoint.getAddress(); }
private void configureVertx(Vertx vertx) { JsonObject config = vertx.getOrCreateContext().config(); // prevent exception -> http://stackoverflow.com/questions/19840056/failed-to-detect-a-valid-hadoop-home-directory System.setProperty("hadoop.home.dir", "/"); config.put(ConfigConstants.STORAGE_HDFS_PATH, hdfsLocalRoot); config.put(ConfigConstants.STORAGE_HDFS_DEFAULT_FS, hdfsAdress); }
@Override public void init(Vertx vertx, Context context) { super.init(vertx, context); this.endpoint = (Endpoint) context.config().getValue(AbstractTransport.ENDPOINT_KEY); this.endpointObject = (URIEndpointObject) this.endpoint.getAddress(); }
@Before public void setUp() throws Exception { JsonObject baseConfig = new JsonObject().put("cassandra", config); when(context.config()).thenReturn(baseConfig); when(vertx.getOrCreateContext()).thenReturn(context); }
@Override public void start() throws Exception { JsonObject conf = Vertx.currentContext().config(); vertx.eventBus().publish("moduleStarted", conf.getString("foo").equals("wibble") && conf.getString("quux").equals("blah") && conf.getString("socks").equals("eeek")); }
@Test public void testDeployWithOptionsAsJsonConfig(TestContext context) { String cmd = "verticle-deploy io.vertx.ext.shell.command.base.DeployVerticleTest$SomeVerticle '{\"config\":{\"ok\":true}}'"; String result = testDeployCmd(context, cmd); context.assertNotNull(ctx.get()); context.assertEquals(result, "Deployed " + ctx.get().deploymentID()); context.assertEquals(1, ctx.get().getInstanceCount()); context.assertNotNull(ctx.get().config()); context.assertTrue(ctx.get().config().containsKey("ok")); context.assertEquals(true, ctx.get().config().getBoolean("ok")); }
public static void init(final Future<Void> startFuture, AbstractVerticle registrationObject, VxmsRoutes... routes) { // TODO to be used for build in REST and others final Vertx vertx = registrationObject.getVertx(); final JsonObject config = vertx.getOrCreateContext().config(); vertx.eventBus() .consumer(ConfigurationUtil.getServiceName(config, registrationObject.getClass()) + "-info", VxmsEndpoint::info); initEndpoint(startFuture, registrationObject, new VxmsShared(vertx, new LocalData(vertx)), routes); }
@Override public void start(Future<Void> startFuture) throws Exception { Properties config = new Properties(); config.putAll(context.config().getMap()); KafkaProducer<String, String> producer = KafkaProducer.createShared(vertx, "the-name", config); producer.write(KafkaProducerRecord.create("the_topic", "the_value"), ar -> startFuture.handle(ar.map((Void) null))); } }