@Override public synchronized void start() { // instantiate the producer producer = new KafkaProducer<String,byte[]>(kafkaProps); counter.start(); super.start(); }
@Override public void stop() { logger.info("Null sink {} stopping...", getName()); super.stop(); logger.info("Null sink {} stopped. Event metrics: {}", getName(), counterGroup); }
@Override public synchronized void start() { this.lastRolledMillis = System.currentTimeMillis(); counter.start(); // signal that this sink is ready to process LOG.info("Started DatasetSink " + getName()); super.start(); }
@Override public void stop() { logger.info("IRC sink {} stopping", this.getName()); destroyConnection(); super.stop(); logger.debug("IRC sink {} stopped. Metrics:{}", this.getName(), counterGroup); }
@Override public void start() { String timeoutName = "hdfs-" + getName() + "-call-runner-%d"; callTimeoutPool = Executors.newFixedThreadPool(threadsPoolSize, new ThreadFactoryBuilder().setNameFormat(timeoutName).build()); String rollerName = "hdfs-" + getName() + "-roll-timer-%d"; timedRollerPool = Executors.newScheduledThreadPool(rollTimerPoolSize, new ThreadFactoryBuilder().setNameFormat(rollerName).build()); this.sfWriters = new WriterLinkedHashMap(maxOpenFiles); sinkCounter.start(); super.start(); }
@Override public void stop() { logger.info("Rpc sink {} stopping...", getName()); destroyConnection(); cxnResetExecutor.shutdown(); try { if (cxnResetExecutor.awaitTermination(5, TimeUnit.SECONDS)) { cxnResetExecutor.shutdownNow(); } } catch (Exception ex) { logger.error("Interrupted while waiting for connection reset executor to shut down"); } sinkCounter.stop(); super.stop(); logger.info("Rpc sink {} stopped. Metrics: {}", getName(), sinkCounter); }
@Override public void start() { String timeoutName = "hive-" + getName() + "-call-runner-%d"; // call timeout pool needs only 1 thd as sink is effectively single threaded callTimeoutPool = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setNameFormat(timeoutName).build()); this.allWriters = Maps.newHashMap(); sinkCounter.start(); super.start(); setupHeartBeatTimer(); LOG.info(getName() + ": Hive Sink {} started", getName() ); }
@Override public void stop() { logger.info("ElasticSearch sink {} stopping"); if (client != null) { client.close(); } sinkCounter.incrementConnectionClosedCount(); sinkCounter.stop(); super.stop(); }
@Override public void start() { logger.info("Starting {}...", this); counterGroup.setName(this.getName()); super.start(); logger.info("Null sink {} started.", getName()); }
@Override public synchronized void stop() { producer.close(); counter.stop(); logger.info("Kafka Sink {} stopped. Metrics: {}", getName(), counter); super.stop(); }
@Override public void start() { logger.info("IRC sink starting"); try { createConnection(); } catch (Exception e) { logger.error("Unable to create irc client using hostname:" + hostname + " port:" + port + ". Exception follows.", e); /* Try to prevent leaking resources. */ destroyConnection(); /* FIXME: Mark ourselves as failed. */ return; } super.start(); logger.debug("IRC sink {} started", this.getName()); }
@Override public void stop() { // do not constrain close() calls with a timeout for (Entry<HiveEndPoint, HiveWriter> entry : allWriters.entrySet()) { try { HiveWriter w = entry.getValue(); w.close(); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } } // shut down all thread pools callTimeoutPool.shutdown(); try { while (callTimeoutPool.isTerminated() == false) { callTimeoutPool.awaitTermination( Math.max(DEFAULT_CALLTIMEOUT, callTimeout), TimeUnit.MILLISECONDS); } } catch (InterruptedException ex) { LOG.warn(getName() + ":Shutdown interrupted on " + callTimeoutPool, ex); } callTimeoutPool = null; allWriters.clear(); allWriters = null; sinkCounter.stop(); super.stop(); LOG.info("Hive Sink {} stopped", getName() ); }
logger.info("Starting {}...", this); sinkCounter.start(); super.start();
@Override public synchronized void stop() { LOGGER.info("Morphline Sink {} stopping...", getName()); try { if (handler != null) { handler.stop(); } sinkCounter.stop(); LOGGER.info("Morphline Sink {} stopped. Metrics: {}, {}", getName(), sinkCounter); } finally { super.stop(); } }
@Override public void start() { Preconditions.checkArgument(client == null, "Please call stop " + "before calling start on an old instance."); sinkCounter.start(); sinkCounter.incrementConnectionCreatedCount(); client = initHBaseClient(); super.start(); }
super.stop();
@Override public synchronized void start() { LOGGER.info("Starting Morphline Sink {} ...", this); sinkCounter.start(); if (handler == null) { MorphlineHandler tmpHandler; try { tmpHandler = (MorphlineHandler) Class.forName(handlerClass).newInstance(); } catch (Exception e) { throw new ConfigurationException(e); } tmpHandler.configure(context); handler = tmpHandler; } super.start(); LOGGER.info("Morphline Sink {} started.", getName()); }
/** * Stops the grid. */ @Override public synchronized void stop() { if (ignite != null) ignite.close(); sinkCounter.incrementConnectionClosedCount(); sinkCounter.stop(); super.stop(); }
super.start();
writerCache = null; sinkCounter.stop(); super.stop();