@Override public boolean doMonitor(ServiceEmitter emitter) { log.info("I am watching..."); return true; } }
@Override public void start() throws Exception { log.info("Starting object[%s]", o); startMethod.invoke(o); }
@Override public void start() throws Exception { log.info("Starting object[%s]", o); startMethod.invoke(o); }
@Override public ByteBuffer get() { log.info("Allocating new littleEndByteBuf[%,d]", counter.incrementAndGet()); return ByteBuffer.allocateDirect(0xFFFF).order(ByteOrder.LITTLE_ENDIAN); } }
@Override public ByteBuffer get() { log.info("Allocating new bigEndByteBuf[%,d]", counter.incrementAndGet()); return ByteBuffer.allocateDirect(0xFFFF).order(ByteOrder.BIG_ENDIAN); } }
@Override public void start() throws Exception { for (Method method : o.getClass().getMethods()) { if (method.getAnnotation(LifecycleStart.class) != null) { log.info("Invoking start method[%s] on object[%s].", method, o); method.invoke(o); } } }
@Override public Job addInputPaths(HadoopDruidIndexerConfig config, Job job) throws IOException { log.info("Adding paths[%s]", paths); FileInputFormat.addInputPaths(job, paths); return job; } }
@Override public void stop() { log.info("Stopping object[%s]", o); try { stopMethod.invoke(o); } catch (Exception e) { log.error(e, "Unable to invoke stopMethod() on %s", o.getClass()); } } }
private void createPath(String parentPath) { try { curator.create().creatingParentsIfNeeded().forPath(parentPath); parentsIBuilt.add(parentPath); } catch (Exception e) { log.info(e, "Problem creating parentPath[%s], someone else created it first?", parentPath); } } }
@Override public TaskStatus run(TaskToolbox toolbox) throws Exception { log.info("Subs are good! Italian BMT and Meatball are probably my favorite."); convertSegment(toolbox, segment); return success(); } }
@Override public void run() { log.info("Running shutdown hook"); stop(); } }
private void poll() { for (Map.Entry<String, ConfigHolder> entry : watchedConfigs.entrySet()) { try { if (entry.getValue().swapIfNew(dbConnector.lookup(configTable, "name", "payload", entry.getKey()))) { log.info("New value for key[%s] seen.", entry.getKey()); } } catch (Exception e) { log.warn(e, "Exception when checking property[%s]", entry.getKey()); } } }
@Override public void pushTaskLog(final String taskid, File file) throws IOException { if (!config.getDirectory().exists()) { config.getDirectory().mkdir(); } final File outputFile = fileForTask(taskid); Files.copy(file, outputFile); log.info("Wrote task log to: %s", outputFile); }
@Override public void close() throws IOException { log.info("Closing connection to RabbitMQ"); channel.close(); connection.close(); } };
@LifecycleStop public void stop() { synchronized (lock) { if (!started) { return; } log.info("Stopping %s with config[%s]", getClass(), config); announcer.unannounce(makeAnnouncementPath()); started = false; } }
public void unannounceSegment(DataSegment segment) throws IOException { final String path = makeServedSegmentPath(segment); log.info("Unannouncing segment[%s] at path[%s]", segment.getIdentifier(), path); announcer.unannounce(path); }
@Override public Task apply(DataSegment segment) { final Integer segmentVersion = segment.getBinaryVersion(); if (!CURR_VERSION_INTEGER.equals(segmentVersion)) { return new SubTask(getGroupId(), segment); } log.info("Skipping[%s], already version[%s]", segment.getIdentifier(), segmentVersion); return null; } }