@Test public void testHandleInterruptedException() throws Exception { HiveConf hiveConf = new HiveConf(); SparkTask sparkTask = new SparkTask(); sparkTask.setWork(mock(SparkWork.class)); DriverContext mockDriverContext = mock(DriverContext.class); QueryState mockQueryState = mock(QueryState.class); when(mockQueryState.getConf()).thenReturn(hiveConf); sparkTask.initialize(mockQueryState, null, mockDriverContext, null); SparkJobStatus mockSparkJobStatus = mock(SparkJobStatus.class); when(mockSparkJobStatus.getMonitorError()).thenReturn(new InterruptedException()); SparkSession mockSparkSession = mock(SparkSession.class); SparkJobRef mockSparkJobRef = mock(SparkJobRef.class); when(mockSparkJobRef.monitorJob()).thenReturn(2); when(mockSparkJobRef.getSparkJobStatus()).thenReturn(mockSparkJobStatus); when(mockSparkSession.submit(any(), any())).thenReturn(mockSparkJobRef); SessionState.start(hiveConf); SessionState.get().setSparkSession(mockSparkSession); sparkTask.execute(mockDriverContext); verify(mockSparkJobRef, atLeastOnce()).cancelJob(); when(mockSparkJobStatus.getMonitorError()).thenReturn( new HiveException(new InterruptedException())); sparkTask.execute(mockDriverContext); verify(mockSparkJobRef, atLeastOnce()).cancelJob(); }
rc = jobRef.monitorJob();
sparkJobID = jobRef.getJobId(); this.jobID = jobRef.getSparkJobStatus().getAppID(); rc = jobRef.monitorJob(); SparkJobStatus sparkJobStatus = jobRef.getSparkJobStatus(); getSparkJobInfo(sparkJobStatus, rc);
rc = jobRef.monitorJob(); SparkJobStatus sparkJobStatus = jobRef.getSparkJobStatus(); if (rc == 0) {