theHueMulator.setupServer(); awaitInitialization();
@Override public void initialize() throws ContainerInitializationException { Timer.start("SPARK_COLD_START"); log.debug("First request, getting new server instance"); // trying to call init in case the embedded server had not been initialized. Spark.init(); // adding this call to make sure that the framework is fully initialized. This should address a race // condition and solve GitHub issue #71. Spark.awaitInitialization(); embeddedServer = lambdaServerFactory.getServerInstance(); // manually add the spark filter to the chain. This should the last one and match all uris FilterRegistration.Dynamic sparkRegistration = getServletContext().addFilter("SparkFilter", embeddedServer.getSparkFilter()); sparkRegistration.addMappingForUrlPatterns(EnumSet.of(DispatcherType.REQUEST), true, "/*"); Timer.stop("SPARK_COLD_START"); } }
@Override protected void before() throws Throwable { Spark.port(this.port); this.sparkApplication = this.sparkApplicationClass.newInstance(); this.sparkApplication.init(); Spark.awaitInitialization(); }
public void run() { reporter.start(); Iterator<Map.Entry<String, Collection>> it = collections.entrySet().iterator(); while (it.hasNext()) { Map.Entry<String, Collection> entry = it.next(); post("/api/" + entry.getKey(), (request, response) -> dispatchQuery(request, response, entry.getValue())); get("/api/" + entry.getKey() + "/meta", (request, response) -> dispatchMeta(request, response, entry.getKey(), entry.getValue())); } awaitInitialization(); }
@Override public void run() { DirectoryComparator comparator = new DirectoryComparator(opts.src, opts.dst); comparator.compare(); configureSpark(comparator, opts.defaultPort); Spark.awaitInitialization(); System.out.println(String.format("Starting server: %s:%d", "http://127.0.0.1", opts.defaultPort)); }
@Override public void run() { DirectoryComparator comparator = new DirectoryComparator(opts.src, opts.dst); comparator.compare(); configureSpark(comparator, opts.defaultPort); Spark.awaitInitialization(); System.out.println(String.format("Starting server: %s:%d", "http://127.0.0.1", opts.defaultPort)); }
awaitInitialization(); System.out.println("Active Connections");
@Override public void start() { Spark.port(applicationConfig.port()); configure(); Spark.awaitInitialization(); LOGGER.info("Spark server started on port {} for healthCheck.", applicationConfig.port()); }
@Override protected void executeInternal() throws Exception { setupStageVariables(); loadPathHandlers(); port(serverPort); threadPool(8); setupServer(); awaitInitialization(); getLog().info(format("Server is alive on http://%s:%d/%s/", "127.0.0.1", this.serverPort, this.stageName)); while (10 != System.in.read()) { Thread.sleep(500); } stop(); }
@Override public void execute() throws MojoExecutionException { try { printLogo(); getLog().info(LOG_SEPERATOR); getLog().info("Starting web server at port " + serverPort); port(serverPort); threadPool(8); setUpServer(); awaitInitialization(); while (10 != System.in.read()) { Thread.sleep(500); } stop(); } catch (Exception e) { throw new MojoExecutionException("Exception at deployment", e); } }
void initServer(final int port) throws MockServerException { logger.debug("initServer called"); try { clearState(); Spark.init(); // Blocks the current thread (using a CountDownLatch under the hood) until the server is fully initialised. Spark.awaitInitialization(); synchronized (monitor) { serverState.setRunning(true); serverState.setPort(port); } } catch (Throwable ex) { throw new MockServerException(ex); } }
public void start() { Spark.ipAddress(config.bindHost()); Spark.port(config.bindPort()); Spark.threadPool(config.threadNum()); Spark.staticFileLocation("/static"); if (!config.readonly()) { this.initWritableHandlers(); } this.initReadonlyHandlers(); this.initViewHandlers(); this.initExceptionHandlers(); Spark.awaitInitialization(); LOG.warn("captain server started"); }