private void handleExecutorAdd(JSONObject event) { String executorID = (String) event.get("Executor ID"); long executorAddTime = JSONUtils.getLong(event, "Timestamp", lastEventTime); this.lastEventTime = executorAddTime; SparkExecutor executor = this.initiateExecutor(executorID, executorAddTime); JSONObject executorInfo = JSONUtils.getJSONObject(event, "Executor Info"); }
private void handleStageSubmit(JSONObject event) { JSONObject stageInfo = JSONUtils.getJSONObject(event, "Stage Info"); int stageId = JSONUtils.getInt(stageInfo, "Stage ID"); int stageAttemptId = JSONUtils.getInt(stageInfo, "Stage Attempt ID"); String key = this.generateStageKey(Integer.toString(stageId), Integer.toString(stageAttemptId)); stageTaskStatusMap.put(key, new HashMap<Integer, Boolean>()); if (!stages.containsKey(key)) { //may be further attempt for one stage String baseAttempt = this.generateStageKey(Integer.toString(stageId), "0"); if (stages.containsKey(baseAttempt)) { SparkStage stage = stages.get(baseAttempt); String jobId = stage.getTags().get(SparkJobTagName.SPARK_JOB_ID.toString()); String stageName = JSONUtils.getString(event, "Stage Name"); int numTasks = JSONUtils.getInt(stageInfo, "Number of Tasks"); this.initiateStage(Integer.parseInt(jobId), stageId, stageAttemptId, stageName, numTasks); } } }
private void handleJobEnd(JSONObject event) { int jobId = JSONUtils.getInt(event, "Job ID"); SparkJob job = jobs.get(jobId); long completionTime = JSONUtils.getLong(event, "Completion Time", lastEventTime); job.setCompletionTime(completionTime); this.lastEventTime = completionTime; JSONObject jobResult = JSONUtils.getJSONObject(event, "Job Result"); String result = JSONUtils.getString(jobResult, "Result"); if (result.equalsIgnoreCase("JobSucceeded")) { job.setStatus(SparkEntityConstant.SparkJobStatus.SUCCEEDED.toString()); } else { job.setStatus(SparkEntityConstant.SparkJobStatus.FAILED.toString()); } }
private void handleStageComplete(JSONObject event) { JSONObject stageInfo = JSONUtils.getJSONObject(event, "Stage Info"); int stageId = JSONUtils.getInt(stageInfo, "Stage ID"); int stageAttemptId = JSONUtils.getInt(stageInfo, "Stage Attempt ID"); String key = this.generateStageKey(Integer.toString(stageId), Integer.toString(stageAttemptId)); SparkStage stage = stages.get(key); // If "Submission Time" is not available, use the "Launch Time" of "Task ID" = 0. Long submissionTime = JSONUtils.getLong(stageInfo, "Submission Time", firstTaskLaunchTime); stage.setSubmitTime(submissionTime); long completeTime = JSONUtils.getLong(stageInfo, "Completion Time", lastEventTime); stage.setCompleteTime(completeTime); this.lastEventTime = completeTime; if (stageInfo != null && stageInfo.containsKey("Failure Reason")) { stage.setStatus(SparkEntityConstant.SparkStageStatus.FAILED.toString()); } else { stage.setStatus(SparkEntityConstant.SparkStageStatus.COMPLETE.toString()); } }
private void handleBlockManagerAdd(JSONObject event) { long maxMemory = JSONUtils.getLong(event, "Maximum Memory"); long timestamp = JSONUtils.getLong(event, "Timestamp", lastEventTime); this.lastEventTime = timestamp; JSONObject blockInfo = JSONUtils.getJSONObject(event, "Block Manager ID"); String executorID = JSONUtils.getString(blockInfo, "Executor ID"); String hostAndPort = JSONUtils.getString(blockInfo, "Host") + ":" + JSONUtils.getLong(blockInfo, "Port"); SparkExecutor executor = this.initiateExecutor(executorID, timestamp); executor.setMaxMemory(maxMemory); executor.setHostPort(hostAndPort); }
private SparkTask initializeTask(JSONObject event) { SparkTask task = new SparkTask(); task.setTags(new HashMap<>(this.app.getTags())); task.setTimestamp(app.getTimestamp()); task.getTags().put(SparkJobTagName.SPARK_SATGE_ID.toString(), Long.toString(JSONUtils.getLong(event, "Stage ID"))); task.getTags().put(SparkJobTagName.SPARK_STAGE_ATTEMPT_ID.toString(), Long.toString(JSONUtils.getLong(event, "Stage Attempt ID"))); JSONObject taskInfo = JSONUtils.getJSONObject(event, "Task Info"); long taskId = JSONUtils.getLong(taskInfo, "Task ID"); task.setTaskId(taskId); task.getTags().put(SparkJobTagName.SPARK_TASK_INDEX.toString(), Long.toString(JSONUtils.getLong(taskInfo, "Index"))); task.getTags().put(SparkJobTagName.SPARK_TASK_ATTEMPT_ID.toString(), Integer.toString(JSONUtils.getInt(taskInfo, "Attempt"))); long launchTime = JSONUtils.getLong(taskInfo, "Launch Time", lastEventTime); this.lastEventTime = launchTime; if (taskId == 0) { this.setFirstTaskLaunchTime(launchTime); } task.setLaunchTime(launchTime); task.setExecutorId(JSONUtils.getString(taskInfo, "Executor ID")); task.setHost(JSONUtils.getString(taskInfo, "Host")); task.setTaskLocality(JSONUtils.getString(taskInfo, "Locality")); task.setSpeculative(JSONUtils.getBoolean(taskInfo, "Speculative")); tasks.put(task.getTaskId(), task); return task; }
private void handleTaskEnd(JSONObject event) { JSONObject taskInfo = JSONUtils.getJSONObject(event, "Task Info"); long taskId = JSONUtils.getLong(taskInfo, "Task ID"); SparkTask task = tasks.get(taskId); JSONObject taskMetrics = JSONUtils.getJSONObject(event, "Task Metrics"); if (null != taskMetrics) { task.setExecutorDeserializeTime(JSONUtils.getLong(taskMetrics, "Executor Deserialize Time", lastEventTime)); task.setDiskBytesSpilled(JSONUtils.getLong(taskMetrics, "Disk Bytes Spilled")); JSONObject inputMetrics = JSONUtils.getJSONObject(taskMetrics, "Input Metrics"); if (null != inputMetrics) { task.setInputBytes(JSONUtils.getLong(inputMetrics, "Bytes Read")); JSONObject outputMetrics = JSONUtils.getJSONObject(taskMetrics, "Output Metrics"); if (null != outputMetrics) { task.setOutputBytes(JSONUtils.getLong(outputMetrics, "Bytes Written")); JSONObject shuffleWriteMetrics = JSONUtils.getJSONObject(taskMetrics, "Shuffle Write Metrics"); if (null != shuffleWriteMetrics) { task.setShuffleWriteBytes(JSONUtils.getLong(shuffleWriteMetrics, "Shuffle Bytes Written")); JSONObject shuffleReadMetrics = JSONUtils.getJSONObject(taskMetrics, "Shuffle Read Metrics"); if (null != shuffleReadMetrics) { task.setShuffleReadLocalBytes(JSONUtils.getLong(shuffleReadMetrics, "Local Bytes Read"));