private void killJob() { LOG.debug("Killing Spark job with job handle id " + sparkJobHandleId); boolean needToKillJob = false; if (jobRef != null && !jobKilled) { synchronized (this) { if (!jobKilled) { jobKilled = true; needToKillJob = true; } } } if (needToKillJob) { try { jobRef.cancelJob(); } catch (Exception e) { LOG.warn("Failed to kill Spark job", e); } } }
sparkJobID = jobRef.getJobId(); this.jobID = jobRef.getSparkJobStatus().getAppID(); rc = jobRef.monitorJob(); SparkJobStatus sparkJobStatus = jobRef.getSparkJobStatus(); getSparkJobInfo(sparkJobStatus, rc); if (rc == 0) { sparkStatistics = sparkJobStatus.getSparkStatistics(); if (LOG.isInfoEnabled() && sparkStatistics != null) { LOG.info(String.format("=====Spark Job[%s] statistics=====", jobRef.getJobId())); logSparkStatistic(sparkStatistics); jobRef.cancelJob();
@Test public void testHandleInterruptedException() throws Exception { HiveConf hiveConf = new HiveConf(); SparkTask sparkTask = new SparkTask(); sparkTask.setWork(mock(SparkWork.class)); DriverContext mockDriverContext = mock(DriverContext.class); QueryState mockQueryState = mock(QueryState.class); when(mockQueryState.getConf()).thenReturn(hiveConf); sparkTask.initialize(mockQueryState, null, mockDriverContext, null); SparkJobStatus mockSparkJobStatus = mock(SparkJobStatus.class); when(mockSparkJobStatus.getMonitorError()).thenReturn(new InterruptedException()); SparkSession mockSparkSession = mock(SparkSession.class); SparkJobRef mockSparkJobRef = mock(SparkJobRef.class); when(mockSparkJobRef.monitorJob()).thenReturn(2); when(mockSparkJobRef.getSparkJobStatus()).thenReturn(mockSparkJobStatus); when(mockSparkSession.submit(any(), any())).thenReturn(mockSparkJobRef); SessionState.start(hiveConf); SessionState.get().setSparkSession(mockSparkSession); sparkTask.execute(mockDriverContext); verify(mockSparkJobRef, atLeastOnce()).cancelJob(); when(mockSparkJobStatus.getMonitorError()).thenReturn( new HiveException(new InterruptedException())); sparkTask.execute(mockDriverContext); verify(mockSparkJobRef, atLeastOnce()).cancelJob(); }
sparkJobHandleId = jobRef.getJobId(); addToHistory(Keys.SPARK_JOB_HANDLE_ID, jobRef.getJobId()); jobID = jobRef.getSparkJobStatus().getAppID(); rc = jobRef.monitorJob(); sparkJobID = jobRef.getSparkJobStatus().getJobId(); SparkJobStatus sparkJobStatus = jobRef.getSparkJobStatus(); getSparkJobInfo(sparkJobStatus); setSparkException(sparkJobStatus, rc);
rc = jobRef.monitorJob(); SparkJobStatus sparkJobStatus = jobRef.getSparkJobStatus(); if (rc == 0) { sparkCounters = sparkJobStatus.getCounter(); LOG.info(String.format("=====Spark Job[%s] statistics=====", jobRef.getJobId())); logSparkStatistic(sparkStatistics); jobRef.cancelJob();