return new ObjectPair<Long, Integer>(-1L, -1); int executorMemoryInMB = Utils.memoryStringToMb( sparkConf.get("spark.executor.memory", "512m")); double memoryFraction = 1.0 - sparkConf.getDouble("spark.storage.memoryFraction", 0.6);
return new ObjectPair<Long, Integer>(-1L, -1); int executorMemoryInMB = Utils.memoryStringToMb( sparkConf.get("spark.executor.memory", "512m")); double memoryFraction = 1.0 - sparkConf.getDouble("spark.storage.memoryFraction", 0.6);
Utils.memoryStringToMb(SparkEnv.get().conf().get(SPARK_EXECUTOR_MEMORY_PROP, DEFAULT_SPARK_EXECUTOR_MEMORY_MB)) * 1024
long executorMemoryInBytes = Utils.memoryStringToMb(SparkEnv.get().conf().get(SPARK_EXECUTOR_MEMORY_PROP, DEFAULT_SPARK_EXECUTOR_MEMORY_MB)) * 1024
/** * this starts a worker with given parameters. it reads the spark defaults from * the given properties file and override parameters accordingly. it also adds the port offset * to all the port configurations */ public synchronized void startWorker() { if (!this.workerActive) { String workerHost = this.myHost; int workerPort = this.sparkConf.getInt(AnalyticsConstants.SPARK_WORKER_PORT, 10000 + this.portOffset); int workerUiPort = this.sparkConf.getInt(AnalyticsConstants.SPARK_WORKER_WEBUI_PORT, 10500 + this.portOffset); int workerCores = this.sparkConf.getInt(AnalyticsConstants.SPARK_WORKER_CORES, 1); String workerMemory = getStringFromSparkConf(AnalyticsConstants.SPARK_WORKER_MEMORY, "1g"); String[] masters = this.getSparkMastersFromCluster(); String workerDir = getStringFromSparkConf(AnalyticsConstants.SPARK_WORKER_DIR, "work"); Worker.startRpcEnvAndEndpoint(workerHost, workerPort, workerUiPort, workerCores, Utils.memoryStringToMb(workerMemory), masters, workerDir, Option.empty(), this.sparkConf); log.info("[Spark init - worker] Started SPARK WORKER in " + workerHost + ":" + workerPort + " with webUI port " + workerUiPort + " with Masters " + Arrays.toString(masters)); this.workerActive = true; } else { logDebug("Worker is already active in this node, therefore ignoring worker startup"); } }
return new ObjectPair<Long, Integer>(-1L, -1); int executorMemoryInMB = Utils.memoryStringToMb( sparkConf.get("spark.executor.memory", "512m")); double memoryFraction = 1.0 - sparkConf.getDouble("spark.storage.memoryFraction", 0.6);