public static List<UserEmailEntity> getUserMail(Config config, String siteId, String userId) { List<UserEmailEntity> result = new ArrayList<>(); String url = "http://" + config.getString(Constants.HOST_PATH) + ":" + config.getInt(Constants.PORT_PATH) + config.getString(Constants.CONTEXT_PATH) + Constants.ANALYZER_PATH + Constants.USER_META_ROOT_PATH + "/" + siteId + "/" + URLEncoder.encode(userId); InputStream is = null; try { is = InputStreamUtils.getInputStream(url, null, org.apache.eagle.jpm.util.Constants.CompressionType.NONE); LOG.info("get user meta from {}", url); result = ((RESTResponse<List<UserEmailEntity>>) OBJ_MAPPER.readValue(is, new TypeReference<RESTResponse<List<UserEmailEntity>>>() { })).getData(); } catch (Exception e) { LOG.warn("failed to get user meta from {}", url, e); } finally { org.apache.eagle.jpm.util.Utils.closeInputStream(is); } return result; }
private void init(Config config) { this.config = config; //parse eagle zk this.zkStateConfig.zkQuorum = config.getString("zookeeper.zkQuorum"); this.zkStateConfig.zkSessionTimeoutMs = config.getInt("zookeeper.zkSessionTimeoutMs"); this.zkStateConfig.zkRetryTimes = config.getInt("zookeeper.zkRetryTimes"); this.zkStateConfig.zkRetryInterval = config.getInt("zookeeper.zkRetryInterval"); this.zkStateConfig.zkLockPath = Utils.makeLockPath(ZK_ROOT_PREFIX + "/" + config.getString("siteId")); this.zkStateConfig.zkRoot = ZK_ROOT_PREFIX + "/" + config.getString("siteId") + JOB_SYMBOL; // parse eagle service endpoint this.eagleServiceConfig.eagleServiceHost = config.getString("service.host"); String port = config.getString("service.port"); this.eagleServiceConfig.eagleServicePort = Integer.parseInt(port); this.eagleServiceConfig.username = config.getString("service.username"); this.eagleServiceConfig.password = config.getString("service.password"); this.eagleServiceConfig.readTimeoutSeconds = config.getInt("service.readTimeOutSeconds"); //parse data source config this.endpointConfig.rmUrls = config.getString("endpointConfig.rmUrls").split(","); this.endpointConfig.site = config.getString("siteId"); this.endpointConfig.fetchRunningJobInterval = config.getInt("endpointConfig.fetchRunningJobInterval"); this.endpointConfig.parseJobThreadPoolSize = config.getInt("endpointConfig.parseJobThreadPoolSize"); this.endpointConfig.requestsNum = getConfigValue(config, "endpointConfig.requestsNum", 1); this.endpointConfig.limitPerRequest = getConfigValue(config, "endpointConfig.limitPerRequest", ""); this.endpointConfig.timeRangePerRequestInMin = getConfigValue(config, "endpointConfig.timeRangePerRequestInMin", 60); LOG.info("Successfully initialized MRRunningJobConfig"); LOG.info("site: " + this.endpointConfig.site); LOG.info("eagle.service.host: " + this.eagleServiceConfig.eagleServiceHost); LOG.info("eagle.service.port: " + this.eagleServiceConfig.eagleServicePort); }
@Override public void ack(Object messageId) { //remove from processStartTime, if size == 0, then update lastUpdateTime Long startTime = (Long)messageId; if (LOG.isDebugEnabled()) { LOG.debug("succeed startTime {}", startTime); } this.processStartTime.remove(startTime); if (this.processStartTime.size() == 0) { while (true) { try { LOG.info("all have finished, update lastUpdateTime to {}", this.lastUpdateTime + MAX_SAFE_TIME); AggregationTimeManager.instance().updateLastFinishTime(this.lastUpdateTime + MAX_SAFE_TIME); break; } catch (Exception e) { Utils.sleep(3); } } } }
return false; } finally { Utils.closeInputStream(is); attemptEntity.setAppInfo(app); attemptEntity.setStartTime(Utils.dateTimeToLong(sparkApplication.getAttempts().get(j - 1).getStartTime())); attemptEntity.setTimestamp(attemptEntity.getStartTime()); attemptEntity.setExecMemoryBytes(Utils.parseMemory(jobConfig.get(Constants.SPARK_EXECUTOR_MEMORY_KEY))); : Utils.parseMemory(jobConfig.get(Constants.SPARK_DRIVER_MEMORY_KEY))); attemptEntity.setExecutorCores(Integer.parseInt(jobConfig.get(Constants.SPARK_EXECUTOR_CORES_KEY)));
public static Constants.JobType fetchJobType(Configuration config) { Map<String, String> mapConfig = new HashMap<>(); config.forEach(entry -> mapConfig.put(entry.getKey(), entry.getValue())); return fetchJobType(mapConfig); }
private long getMemoryOverhead(JobConfig config, long executorMemory, String fieldName) { long result = 0L; String fieldValue = config.getConfig().get(fieldName); if (fieldValue != null) { try { result = Utils.parseMemory(fieldValue + "m"); } catch (Exception e) { result = Utils.parseMemory(fieldValue); } } if (result == 0L) { result = Math.max( Utils.parseMemory(conf.getString("spark.defaultVal.spark.yarn.overhead.min")), executorMemory * conf.getInt("spark.defaultVal." + fieldName + ".factor") / 100); } return result; }
taskEntity.getTags().put(SparkJobTagName.SPARK_TASK_INDEX.toString(), task.getIndex() + ""); taskEntity.setTaskId(task.getTaskId()); taskEntity.setLaunchTime(Utils.dateTimeToLong(task.getLaunchTime())); taskEntity.setHost(task.getHost()); taskEntity.setTaskLocality(task.getTaskLocality());
this.setJobType(Utils.fetchJobType(this.configuration).toString());
long executorMemory = Utils.parseMemory((String) this.getConfigVal(this.app.getConfig(), "spark.executor.memory", String.class.getName())); long driverMemory = Utils.parseMemory(this.isClientMode(app.getConfig()) ? (String) this.getConfigVal(this.app.getConfig(), "spark.yarn.am.memory", String.class.getName()) : (String) this.getConfigVal(app.getConfig(), "spark.driver.memory", String.class.getName()));
public static List<JobMetaEntity> getJobMeta(Config config, String siteId, String jobDefId) { List<JobMetaEntity> result = new ArrayList<>(); String url = "http://" + config.getString(Constants.HOST_PATH) + ":" + config.getInt(Constants.PORT_PATH) + config.getString(Constants.CONTEXT_PATH) + Constants.ANALYZER_PATH + Constants.JOB_META_ROOT_PATH + "/" + siteId + "/" + URLEncoder.encode(jobDefId); InputStream is = null; try { is = InputStreamUtils.getInputStream(url, null, org.apache.eagle.jpm.util.Constants.CompressionType.NONE); LOG.info("get job meta from {}", url); result = ((RESTResponse<List<JobMetaEntity>>) OBJ_MAPPER.readValue(is, new TypeReference<RESTResponse<List<JobMetaEntity>>>() { })).getData(); } catch (Exception e) { LOG.warn("failed to get job meta from {}", url, e); } finally { org.apache.eagle.jpm.util.Utils.closeInputStream(is); } return result; }
@Override public void nextTuple() { LOG.info("start to fetch job list"); try { List<AppInfo> apps = rmResourceFetcher.getResource(Constants.ResourceType.RUNNING_MR_JOB); if (apps == null) { apps = new ArrayList<>(); } handleApps(apps, true); long fetchTime = Calendar.getInstance().getTimeInMillis(); if (fetchTime - this.lastFinishAppTime > 60000L) { apps = rmResourceFetcher.getResource(Constants.ResourceType.COMPLETE_MR_JOB, Long.toString(this.lastFinishAppTime)); if (apps == null) { apps = new ArrayList<>(); } handleApps(apps, false); this.lastFinishAppTime = fetchTime; this.runningJobManager.updateLastFinishTime(partitionId, fetchTime); } } catch (Exception e) { LOG.warn("exception found {}", e); } finally { //need to be configured Utils.sleep(60); } }
this.zkStateConfig.zkRetryTimes = config.getInt("zookeeper.zkRetryTimes"); this.zkStateConfig.zkSessionTimeoutMs = config.getInt("zookeeper.zkSessionTimeoutMs"); this.zkStateConfig.zkLockPath = Utils.makeLockPath(DEFAULT_SPARK_JOB_RUNNING_ZOOKEEPER_ROOT + "/" + config.getString("siteId")); this.zkStateConfig.zkRoot = DEFAULT_SPARK_JOB_RUNNING_ZOOKEEPER_ROOT + "/" + config.getString("siteId") + JOB_SYMBOL; if (config.hasPath("zookeeper.zkRoot")) {
return false; } finally { Utils.closeInputStream(is);
@Override public void nextTuple() { Utils.sleep(5); if (LOG.isDebugEnabled()) { LOG.debug("start to run");
zkStateConfig.zkLockPath = Utils.makeLockPath(config.getString("dataSourceConfig.zkRoot") + "/" + config.getString("siteId")); zkStateConfig.zkRoot = config.getString("dataSourceConfig.zkRoot") + "/" + config.getString("siteId") + JOB_SYMBOL; zkStateConfig.zkSessionTimeoutMs = config.getInt("dataSourceConfig.zkSessionTimeoutMs");
return false; } finally { Utils.closeInputStream(is);
LOG.error("An fetal exception is caught: {}", e.getMessage(), e); } finally { Utils.sleep(endpointConfig.fetchRunningJobInterval);
continue; } finally { Utils.closeInputStream(is);
return false; } finally { Utils.closeInputStream(is);