static private HiveException handlePoolException(ExecutorService pool, Exception e) { HiveException he = null; if (e instanceof HiveException) { he = (HiveException) e; if (he.getCanonicalErrorMsg() != ErrorMsg.GENERIC_ERROR) { if (he.getCanonicalErrorMsg() == ErrorMsg.UNRESOLVED_RT_EXCEPTION) { LOG.error("Failed to move: {}", he.getMessage()); } else { LOG.error("Failed to move: {}", he.getRemoteErrorMsg()); } } } else { LOG.error("Failed to move: {}", e.getMessage()); he = new HiveException(e.getCause()); } pool.shutdownNow(); return he; }
private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) throws CommandProcessorResponse { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if(rootMsg != null) { errorMessage += "\n" + rootMsg; } SQLState = e.getCanonicalErrorMsg() != null ? e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); throw createProcessorResponse(ret); } private boolean requiresLock() {
private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if(rootMsg != null) { errorMessage += "\n" + rootMsg; } SQLState = e.getCanonicalErrorMsg() != null ? e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return createProcessorResponse(ret); } private boolean requiresLock() {
private CommandProcessorResponse createProcessorResponse(int ret) { SessionState.getPerfLogger().cleanupPerfLogMetrics(); queryDisplay.setErrorMessage(errorMessage); if(downstreamError != null && downstreamError instanceof HiveException) { ErrorMsg em = ((HiveException)downstreamError).getCanonicalErrorMsg(); if(em != null) { return new CommandProcessorResponse(ret, errorMessage, SQLState, schema, downstreamError, em.getErrorCode(), null); } } return new CommandProcessorResponse(ret, errorMessage, SQLState, downstreamError); }
private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg, String expectedMatchedStr) { HiveException he = ss.getHiveException(e); assertEquals(expectedErrMsg, he.getCanonicalErrorMsg()); if (expectedMatchedStr != null) { assertTrue(he.getMessage().contains(expectedMatchedStr)); } }
@Test public void testSparkExceptionAndMonitorError() { SparkTask sparkTask = new SparkTask(); SparkJobStatus mockSparkJobStatus = mock(SparkJobStatus.class); when(mockSparkJobStatus.getMonitorError()).thenReturn(new RuntimeException()); when(mockSparkJobStatus.getSparkJobException()).thenReturn( new ExecutionException(new SparkException(""))); sparkTask.setSparkException(mockSparkJobStatus, 3); Assert.assertTrue(sparkTask.getException() instanceof HiveException); Assert.assertEquals(((HiveException) sparkTask.getException()).getCanonicalErrorMsg(), ErrorMsg.SPARK_JOB_RUNTIME_ERROR); }
@Test public void testSetSparkExceptionWithTimeoutError() { SparkTask sparkTask = new SparkTask(); SparkJobStatus mockSparkJobStatus = mock(SparkJobStatus.class); when(mockSparkJobStatus.getMonitorError()).thenReturn(new HiveException(ErrorMsg .SPARK_JOB_MONITOR_TIMEOUT, Long.toString(60))); sparkTask.setSparkException(mockSparkJobStatus, 3); Assert.assertTrue(sparkTask.getException() instanceof HiveException); Assert.assertEquals(((HiveException) sparkTask.getException()).getCanonicalErrorMsg(), ErrorMsg.SPARK_JOB_MONITOR_TIMEOUT); Assert.assertTrue(sparkTask.getException().getMessage().contains("60s")); }
@Test public void testSetClauseFakeColumn() throws Exception { CommandProcessorResponse cpr = runStatementOnDriverNegative("MERGE INTO "+ Table.ACIDTBL + " target USING " + Table.NONACIDORCTBL + "\n source ON target.a = source.a " + "\nWHEN MATCHED THEN UPDATE set t = 1"); Assert.assertEquals(ErrorMsg.INVALID_TARGET_COLUMN_IN_SET_CLAUSE, ((HiveException)cpr.getException()).getCanonicalErrorMsg()); cpr = runStatementOnDriverNegative("update " + Table.ACIDTBL + " set t = 1"); Assert.assertEquals(ErrorMsg.INVALID_TARGET_COLUMN_IN_SET_CLAUSE, ((HiveException)cpr.getException()).getCanonicalErrorMsg()); } @Test
@Test public void testSetSparkExceptionWithJobError() { SparkTask sparkTask = new SparkTask(); SparkJobStatus mockSparkJobStatus = mock(SparkJobStatus.class); ExecutionException ee = new ExecutionException("Exception thrown by job", new SparkException("Job aborted due to stage failure: Not a task or OOM error")); when(mockSparkJobStatus.getSparkJobException()).thenReturn(ee); sparkTask.setSparkException(mockSparkJobStatus, 3); Assert.assertTrue(sparkTask.getException() instanceof HiveException); Assert.assertEquals(((HiveException) sparkTask.getException()).getCanonicalErrorMsg(), ErrorMsg.SPARK_JOB_RUNTIME_ERROR); Assert.assertTrue(sparkTask.getException().getMessage().contains("Not a task or OOM error")); }
@Test public void testMergeNegative() throws Exception { CommandProcessorResponse cpr = runStatementOnDriverNegative("MERGE INTO " + Table.ACIDTBL + " target USING " + Table.NONACIDORCTBL + " source\nON target.a = source.a " + "\nWHEN MATCHED THEN UPDATE set b = 1 " + "\nWHEN MATCHED THEN DELETE " + "\nWHEN NOT MATCHED AND a < 1 THEN INSERT VALUES(1,2)"); Assert.assertEquals(ErrorMsg.MERGE_PREDIACTE_REQUIRED, ((HiveException)cpr.getException()).getCanonicalErrorMsg()); } @Test
@Test public void testMergeNegative2() throws Exception { CommandProcessorResponse cpr = runStatementOnDriverNegative("MERGE INTO "+ Table.ACIDTBL + " target USING " + Table.NONACIDORCTBL + "\n source ON target.pk = source.pk " + "\nWHEN MATCHED THEN UPDATE set b = 1 " + "\nWHEN MATCHED THEN UPDATE set b=a"); Assert.assertEquals(ErrorMsg.MERGE_TOO_MANY_UPDATE, ((HiveException)cpr.getException()).getCanonicalErrorMsg()); }
@Test public void testSetSparkExceptionWithOOMError() { SparkTask sparkTask = new SparkTask(); SparkJobStatus mockSparkJobStatus = mock(SparkJobStatus.class); ExecutionException jobError = new ExecutionException( new SparkException("Container killed by YARN for exceeding memory limits")); when(mockSparkJobStatus.getSparkJobException()).thenReturn(jobError); sparkTask.setSparkException(mockSparkJobStatus, 3); Assert.assertTrue(sparkTask.getException() instanceof HiveException); Assert.assertEquals(((HiveException) sparkTask.getException()).getCanonicalErrorMsg(), ErrorMsg.SPARK_RUNTIME_OOM); }
int errorCode = 1; if (he.getCanonicalErrorMsg() != ErrorMsg.GENERIC_ERROR) { errorCode = he.getCanonicalErrorMsg().getErrorCode(); if (he.getCanonicalErrorMsg() == ErrorMsg.UNRESOLVED_RT_EXCEPTION) { console.printError("Failed with exception " + he.getMessage(), "\n" + StringUtils.stringifyException(he));
HiveException rc = (HiveException) downstreamError; mdf.error(ss.out, errorMessage, rc.getCanonicalErrorMsg().getErrorCode(), SQLState, rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ? org.apache.hadoop.util.StringUtils.stringifyException(rc) : null);
if (e instanceof HiveException) { HiveException he = (HiveException) e; rc = he.getCanonicalErrorMsg().getErrorCode(); } else { rc = 1;
HiveException rc = (HiveException) downstreamError; mdf.error(ss.out, errorMessage, rc.getCanonicalErrorMsg().getErrorCode(), SQLState, rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ? org.apache.hadoop.util.StringUtils.stringifyException(rc) : null);
getCanonicalErrorMsg(); if (errorMsg != ErrorMsg.GENERIC_ERROR) { SQLState = errorMsg.getSQLState();
HiveException rc = (HiveException) downstreamError; mdf.error(ss.out, errorMessage, rc.getCanonicalErrorMsg().getErrorCode(), SQLState, rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ? org.apache.hadoop.util.StringUtils.stringifyException(rc) : null);