@VisibleForTesting int close(TezWork work, int rc, DAGClient dagClient) { try { List<BaseWork> ws = work.getAllWork(); for (BaseWork w: ws) { if (w instanceof MergeJoinWork) { w = ((MergeJoinWork) w).getMainWork(); } for (Operator<?> op: w.getAllOperators()) { op.jobClose(conf, rc == 0); } } } catch (Exception e) { // jobClose needs to execute successfully otherwise fail task if (rc == 0) { rc = 3; String mesg = "Job Commit failed with exception '" + Utilities.getNameMessage(e) + "'"; console.printError(mesg, "\n" + StringUtils.stringifyException(e)); } } if (dagClient != null) { // null in tests closeDagClientWithoutEx(dagClient); } return rc; }
String msg = " with exception '" + Utilities.getNameMessage(e) + "'"; msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + msg;
private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) throws CommandProcessorResponse { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if(rootMsg != null) { errorMessage += "\n" + rootMsg; } SQLState = e.getCanonicalErrorMsg() != null ? e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); throw createProcessorResponse(ret); } private boolean requiresLock() {
/** * Close will move the temp files into the right place for the fetch * task. If the job has failed it will clean up the files. */ private int close(int rc) { try { List<BaseWork> ws = work.getAllWork(); for (BaseWork w: ws) { for (Operator<?> op: w.getAllOperators()) { op.jobClose(conf, rc == 0); } } } catch (Exception e) { // jobClose needs to execute successfully otherwise fail task if (rc == 0) { rc = 3; String mesg = "Job Commit failed with exception '" + Utilities.getNameMessage(e) + "'"; console.printError(mesg, "\n" + StringUtils.stringifyException(e)); setException(e); } } return rc; }
private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if(rootMsg != null) { errorMessage += "\n" + rootMsg; } SQLState = e.getCanonicalErrorMsg() != null ? e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return createProcessorResponse(ret); } private boolean requiresLock() {
/** * Close will move the temp files into the right place for the fetch * task. If the job has failed it will clean up the files. */ private int close(int rc) { try { List<BaseWork> ws = work.getAllWork(); for (BaseWork w: ws) { for (Operator<?> op: w.getAllOperators()) { op.jobClose(conf, rc == 0); } } } catch (Exception e) { // jobClose needs to execute successfully otherwise fail task if (rc == 0) { rc = 3; String mesg = "Job Commit failed with exception '" + Utilities.getNameMessage(e) + "'"; console.printError(mesg, "\n" + StringUtils.stringifyException(e)); setException(e); } } return rc; }
int close(TezWork work, int rc) { try { List<BaseWork> ws = work.getAllWork(); for (BaseWork w: ws) { if (w instanceof MergeJoinWork) { w = ((MergeJoinWork) w).getMainWork(); } for (Operator<?> op: w.getAllOperators()) { op.jobClose(conf, rc == 0); } } } catch (Exception e) { // jobClose needs to execute successfully otherwise fail task if (rc == 0) { rc = 3; String mesg = "Job Commit failed with exception '" + Utilities.getNameMessage(e) + "'"; console.printError(mesg, "\n" + StringUtils.stringifyException(e)); } } closeDagClientWithoutEx(); return rc; }
String msg = " with exception '" + Utilities.getNameMessage(e) + "'"; msg = "Failed to monitor Job[ " + sparkJobStatus.getJobId() + "]" + msg;
String msg = " with exception '" + Utilities.getNameMessage(e) + "'"; msg = "Failed to monitor Job[ " + sparkJobStatus.getJobId() + "]" + msg;
String mesg = " with exception '" + Utilities.getNameMessage(e) + "'"; if (rj != null) { mesg = "Ended Job = " + rj.getJobID() + mesg; returnVal = 3; String mesg = "Job Commit failed with exception '" + Utilities.getNameMessage(e) + "'"; console.printError(mesg, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
e.printStackTrace(); setException(e); String mesg = " with exception '" + Utilities.getNameMessage(e) + "'"; if (rj != null) { mesg = "Ended Job = " + rj.getJobID() + mesg;
LOG.warn("Interrupted while monitoring the Hive on Spark application, exiting"); } else { String msg = " with exception '" + Utilities.getNameMessage(e) + "' Last known state = " + (state != null ? state.name() : "UNKNOWN"); msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + msg;
String mesg = " with exception '" + Utilities.getNameMessage(e) + "'"; if (rj != null) { mesg = "Ended Job = " + rj.getJobID() + mesg; returnVal = 3; String mesg = "Job Commit failed with exception '" + Utilities.getNameMessage(e) + "'"; console.printError(mesg, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
e.printStackTrace(); setException(e); String mesg = " with exception '" + Utilities.getNameMessage(e) + "'"; if (rj != null) { mesg = "Ended Job = " + rj.getJobID() + mesg; success = false; returnVal = 3; String mesg = "Job Commit failed with exception '" + Utilities.getNameMessage(e) + "'"; console.printError(mesg, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
String msg = "Failed to execute spark task, with exception '" + Utilities.getNameMessage(e) + "'";
e.printStackTrace(); setException(e); String mesg = " with exception '" + Utilities.getNameMessage(e) + "'"; if (rj != null) { mesg = "Ended Job = " + rj.getJobID() + mesg;
hookRunner.runPreDriverHooks(hookContext); } catch (Exception e) { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); SQLState = ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; hookRunner.runPostDriverHooks(hookContext); } catch (Exception e) { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); SQLState = ErrorMsg.findSQLState(e.getMessage()); downstreamError = e;
errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if (hookContext != null) { try {
errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if (hookContext != null) { try {
errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); SQLState = ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); SQLState = ErrorMsg.findSQLState(e.getMessage()); downstreamError = e;