public static void nonNullCheckForValue(Object value) { Preconditions.checkArgument(value != null, "value argument can not be null"); }
/** * Compute the java.library.path that should be used for the worker. This helps it to load JNI libraries that are packaged in the uber * jar. * * @param stormRoot the root directory of the worker process * @param conf the config for the supervisor. * @return the java.library.path/LD_LIBRARY_PATH to use so native libraries load correctly. */ protected String javaLibraryPath(String stormRoot, Map<String, Object> conf) { String resourceRoot = stormRoot + File.separator + ServerConfigUtils.RESOURCES_SUBDIR; String os = System.getProperty("os.name").replaceAll("\\s+", "_"); String arch = System.getProperty("os.arch"); String archResourceRoot = resourceRoot + File.separator + os + "-" + arch; String ret = CPJ.join(archResourceRoot, resourceRoot, conf.get(DaemonConfig.JAVA_LIBRARY_PATH)); return ret; }
public static ILocalAssignmentsBackend getBackend(Map<String, Object> conf) { if (conf.get(Config.NIMBUS_LOCAL_ASSIGNMENTS_BACKEND_CLASS) != null) { Object targetObj = ReflectionUtils.newInstance((String) conf.get(Config.NIMBUS_LOCAL_ASSIGNMENTS_BACKEND_CLASS)); Preconditions.checkState(targetObj instanceof ILocalAssignmentsBackend, "{} must implements ILocalAssignmentsBackend", Config.NIMBUS_LOCAL_ASSIGNMENTS_BACKEND_CLASS); ((ILocalAssignmentsBackend) targetObj).prepare(conf); return (ILocalAssignmentsBackend) targetObj; } return getDefault(); }
public int read (byte[] bytes, int off, int len) throws IOException { Preconditions.checkNotNull(bytes, "Given byte array can not be null"); Preconditions.checkPositionIndexes(off, off + len, bytes.length); if (!buf.hasRemaining()) { return -1; } if (len == 0) { return 0; } int end = Math.min(len, buf.remaining()); buf.get(bytes, off, end); return end; } }
private void updateCachedHeartbeatsFromSupervisor(SupervisorWorkerHeartbeats workerHeartbeats) { workerHeartbeats.get_worker_heartbeats().forEach(this::updateCachedHeartbeatsFromWorker); if (!heartbeatsReadyFlag.get() && !Strings.isNullOrEmpty(workerHeartbeats.get_supervisor_id())) { heartbeatsRecoveryStrategy.reportNodeId(workerHeartbeats.get_supervisor_id()); } }
public byte[] encodeValue(V value) { return internalValueSerializer.serialize( Optional.of(valueSerializer.serialize(value))); }
/** * Reads a string encrypted by another instance with a shared key */ private void testEncryptsAndDecryptsMessage(Map<String, Object> topoConf) { String testText = "Tetraodontidae is a family of primarily marine and estuarine fish of the order" + " Tetraodontiformes. The family includes many familiar species, which are" + " variously called pufferfish, puffers, balloonfish, blowfish, bubblefish," + " globefish, swellfish, toadfish, toadies, honey toads, sugar toads, and sea" + " squab.[1] They are morphologically similar to the closely related" + " porcupinefish, which have large external spines (unlike the thinner, hidden" + " spines of Tetraodontidae, which are only visible when the fish has puffed up)." + " The scientific name refers to the four large teeth, fused into an upper and" + " lower plate, which are used for crushing the shells of crustaceans and" + " mollusks, their natural prey."; Kryo kryo = new Kryo(); BlowfishTupleSerializer writerBTS = new BlowfishTupleSerializer(kryo, topoConf); BlowfishTupleSerializer readerBTS = new BlowfishTupleSerializer(kryo, topoConf); int bufferSize = 1024; Output output = new Output(bufferSize, bufferSize); Input input = new Input(bufferSize); String[] stringList = testText.split(" "); ListDelegate delegate = new ListDelegate(); delegate.addAll(Arrays.asList(stringList)); writerBTS.write(kryo, output, delegate); input.setBuffer(output.getBuffer()); ListDelegate outDelegate = readerBTS.read(kryo, input, ListDelegate.class); Assert.assertEquals(testText, Joiner.on(" ").join(outDelegate.toArray())); } }
/** * Compute the classpath for the worker process. * * @param stormJar the topology jar * @param dependencyLocations any dependencies from the topology * @param topoVersion the version of the storm framework to use * @return the full classpath */ protected String getWorkerClassPath(String stormJar, List<String> dependencyLocations, SimpleVersion topoVersion) { List<String> workercp = new ArrayList<>(); workercp.addAll(asStringList(_topoConf.get(Config.TOPOLOGY_CLASSPATH_BEGINNING))); workercp.addAll(frameworkClasspath(topoVersion)); workercp.add(stormJar); workercp.addAll(dependencyLocations); workercp.addAll(asStringList(_topoConf.get(Config.TOPOLOGY_CLASSPATH))); return CPJ.join(workercp); }
/** * Get instance of {@link IWorkerHeartbeatsRecoveryStrategy} with conf. * @param conf strategy config * @return an instance of {@link IWorkerHeartbeatsRecoveryStrategy} */ public static IWorkerHeartbeatsRecoveryStrategy getStrategy(Map<String, Object> conf) { IWorkerHeartbeatsRecoveryStrategy strategy; if (conf.get(DaemonConfig.NIMBUS_WORKER_HEARTBEATS_RECOVERY_STRATEGY_CLASS) != null) { Object targetObj = ReflectionUtils.newInstance((String) conf.get(DaemonConfig.NIMBUS_WORKER_HEARTBEATS_RECOVERY_STRATEGY_CLASS)); Preconditions.checkState(targetObj instanceof IWorkerHeartbeatsRecoveryStrategy, "{} must implements IWorkerHeartbeatsRecoveryStrategy", DaemonConfig.NIMBUS_WORKER_HEARTBEATS_RECOVERY_STRATEGY_CLASS); strategy = ((IWorkerHeartbeatsRecoveryStrategy) targetObj); } else { strategy = new TimeOutWorkerHeartbeatsRecoveryStrategy(); } strategy.prepare(conf); return strategy; }
public static void nonNullCheckForKey(Object key) { Preconditions.checkArgument(key != null, "key argument can not be null"); }
/** * Compute the java.library.path that should be used for the worker. * This helps it to load JNI libraries that are packaged in the uber jar. * @param stormRoot the root directory of the worker process * @param conf the config for the supervisor. * @return the java.library.path/LD_LIBRARY_PATH to use so native libraries load correctly. */ protected String javaLibraryPath(String stormRoot, Map<String, Object> conf) { String resourceRoot = stormRoot + Utils.FILE_PATH_SEPARATOR + ConfigUtils.RESOURCES_SUBDIR; String os = System.getProperty("os.name").replaceAll("\\s+", "_"); String arch = System.getProperty("os.arch"); String archResourceRoot = resourceRoot + Utils.FILE_PATH_SEPARATOR + os + "-" + arch; String ret = CPJ.join(archResourceRoot, resourceRoot, conf.get(Config.JAVA_LIBRARY_PATH)); return ret; }
@Override public void operationComplete(ChannelFuture future) throws Exception { // This call returns immediately Channel newChannel = future.channel(); if (future.isSuccess() && connectionEstablished(newChannel)) { boolean setChannel = channelRef.compareAndSet(null, newChannel); checkState(setChannel); LOG.debug("successfully connected to {}, {} [attempt {}]", address.toString(), newChannel.toString(), connectionAttempt); if (messagesLost.get() > 0) { LOG.warn("Re-connection to {} was successful but {} messages has been lost so far", address.toString(), messagesLost.get()); } } else { Throwable cause = future.cause(); reschedule(cause); if (newChannel != null) { newChannel.close(); } } } });
public static void main(String[] args) throws Exception { Preconditions.checkArgument(args.length == 5, "Illegal number of arguments. Expected: 5, Actual: " + args.length); String stormId = args[0]; String assignmentId = args[1]; String supervisorPort = args[2]; String portStr = args[3]; String workerId = args[4]; Map<String, Object> conf = ConfigUtils.readStormConfig(); Utils.setupDefaultUncaughtExceptionHandler(); StormCommon.validateDistributedMode(conf); Worker worker = new Worker(conf, null, stormId, assignmentId, Integer.parseInt(supervisorPort), Integer.parseInt(portStr), workerId); worker.start(); Utils.addShutdownHookWithForceKillIn1Sec(worker::shutdown); }
/** * Compute the classpath for the worker process * @param stormJar the topology jar * @param dependencyLocations any dependencies from the topology * @return the full classpath */ protected String getWorkerClassPath(String stormJar, List<String> dependencyLocations) { List<String> workercp = new ArrayList<>(); workercp.addAll(frameworkClasspath()); workercp.add(stormJar); workercp.addAll(dependencyLocations); workercp.addAll(asStringList(_topoConf.get(Config.TOPOLOGY_CLASSPATH))); return CPJ.join(workercp); }
public static void nonNullCheckForKey(Object key) { Preconditions.checkArgument(key != null, "key argument can not be null"); }
public static void nonNullCheckForValue(Object value) { Preconditions.checkArgument(value != null, "value argument can not be null"); }