public void localJobDebugger(int exitVal, String taskId) { StringBuilder sb = new StringBuilder(); sb.append("\n"); sb.append("Task failed!\n"); sb.append("Task ID:\n " + taskId + "\n\n"); sb.append("Logs:\n"); console.printError(sb.toString()); console.printError(LogUtils.getLogFilePath()); }
public void localJobDebugger(int exitVal, String taskId) { StringBuilder sb = new StringBuilder(); sb.append("\n"); sb.append("Task failed!\n"); sb.append("Task ID:\n " + taskId + "\n\n"); sb.append("Logs:\n"); console.printError(sb.toString()); for (Appender appender : ((Logger) LogManager.getRootLogger()).getAppenders().values()) { if (appender instanceof FileAppender) { console.printError(((FileAppender) appender).getFileName()); } else if (appender instanceof RollingFileAppender) { console.printError(((RollingFileAppender) appender).getFileName()); } } }
public void run() { try { diagnosticMesg = showJobFailDebugInfo(); } catch (IOException e) { console.printError(e.getMessage()); } }
private int dropPermanentFunction(Hive db, DropFunctionDesc dropFunctionDesc) { try { String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts( dropFunctionDesc.getFunctionName()); String dbName = qualifiedNameParts[0]; String funcName = qualifiedNameParts[1]; String registeredName = FunctionUtils.qualifyFunctionName(funcName, dbName); FunctionRegistry.unregisterPermanentFunction(registeredName); db.dropFunction(dbName, funcName); return 0; } catch (Exception e) { // For repl load flow, function may not exist for first incremental phase. So, just return success. if (dropFunctionDesc.getReplicationSpec().isInReplicationScope() && (e.getCause() instanceof NoSuchObjectException)) { LOG.info("Drop function is idempotent as function: " + dropFunctionDesc.getFunctionName() + " doesn't exist."); return 0; } LOG.info("drop function: ", e); console.printError("FAILED: error during drop function: " + StringUtils.stringifyException(e)); return 1; } }
public int progressLocal(Process runningJob, String taskId) { int exitVal = -101; try { exitVal = runningJob.waitFor(); //TODO: poll periodically } catch (InterruptedException e) { } if (exitVal != 0) { console.printError("Execution failed with exit status: " + exitVal); console.printError("Obtaining error information"); if (HiveConf.getBoolVar(job, HiveConf.ConfVars.SHOW_JOB_FAIL_DEBUG_INFO)) { // Since local jobs are run sequentially, all relevant information is already available // Therefore, no need to fetch job debug info asynchronously localJobDebugger(exitVal, taskId); } } else { console.printInfo("Execution completed successfully"); console.printInfo("MapredLocal task succeeded"); } return exitVal; }
private int dropPermanentFunction(Hive db, DropFunctionDesc dropFunctionDesc) { try { String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts( dropFunctionDesc.getFunctionName()); String dbName = qualifiedNameParts[0]; String funcName = qualifiedNameParts[1]; String registeredName = FunctionUtils.qualifyFunctionName(funcName, dbName); FunctionRegistry.unregisterPermanentFunction(registeredName); db.dropFunction(dbName, funcName); return 0; } catch (Exception e) { LOG.info("drop function: " + StringUtils.stringifyException(e)); console.printError("FAILED: error during drop function: " + StringUtils.stringifyException(e)); return 1; } }
public void printError(String error) { printError(error, null); }
static boolean unregisterJar(List<String> jarsToUnregister) { LogHelper console = getConsole(); try { Utilities.removeFromClassPath(jarsToUnregister.toArray(new String[0])); console.printInfo("Deleted " + jarsToUnregister + " from class path"); return true; } catch (IOException e) { console.printError("Unable to unregister " + jarsToUnregister + "\nException: " + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return false; } }
private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if(rootMsg != null) { errorMessage += "\n" + rootMsg; } SQLState = e.getCanonicalErrorMsg() != null ? e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return createProcessorResponse(ret); } private boolean requiresLock() {
public void run() { try { getTaskInfos(); } catch (Exception e) { console.printError(e.getMessage()); } }
private int handleInterruption(String msg) { SQLState = "HY008"; //SQLState for cancel operation errorMessage = "FAILED: command has been interrupted: " + msg; console.printError(errorMessage); return 1000; }
public void run() { try { diagnosticMesg = showJobFailDebugInfo(); } catch (IOException e) { console.printError(e.getMessage()); } }
/** * Logger an error to console if available. */ private void log(String error) { LogHelper console = SessionState.getConsole(); if (console != null) { console.printError(error); } }
private int handleInterruptionWithHook(String msg, HookContext hookContext, PerfLogger perfLogger) { SQLState = "HY008"; //SQLState for cancel operation errorMessage = "FAILED: command has been interrupted: " + msg; console.printError(errorMessage); if (hookContext != null) { try { invokeFailureHooks(perfLogger, hookContext, errorMessage, null); } catch (Exception e) { LOG.warn("Caught exception attempting to invoke Failure Hooks", e); } } return 1000; }
private String getUserFromUGI() { // Don't use the userName member, as it may or may not have been set. Get the value from // conf, which calls into getUGI to figure out who the process is running as. try { return conf.getUser(); } catch (IOException e) { errorMessage = "FAILED: Error in determining user while acquiring locks: " + e.getMessage(); SQLState = ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); } return null; }
static boolean unregisterJar(List<String> jarsToUnregister) { LogHelper console = getConsole(); try { Utilities.removeFromClassPath(jarsToUnregister.toArray(new String[0])); console.printInfo("Deleted " + jarsToUnregister + " from class path"); return true; } catch (IOException e) { console.printError("Unable to unregister " + jarsToUnregister + "\nException: " + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return false; } }
private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) throws CommandProcessorResponse { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if(rootMsg != null) { errorMessage += "\n" + rootMsg; } SQLState = e.getCanonicalErrorMsg() != null ? e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); throw createProcessorResponse(ret); } private boolean requiresLock() {
/** * Logs an error into the log file, and into the HiveServer2 or HiveCli error stream too. * BeeLine uses the operation log file to show the logs to the user, so depending on the * BeeLine settings it could be shown to the user. * @param error The log message */ public void printError(String error) { printError(error, null); }
public void run() { try { getTaskInfos(); } catch (Exception e) { console.printError(e.getMessage()); } }
/** * Logger an error to console if available. */ private static void log(String error) { LogHelper console = SessionState.getConsole(); if (console != null) { console.printError(error); } }