private void testReverseMatch(ErrorMsg errorMsg, String... args) { String parametrizedMsg = errorMsg.format(args); ErrorMsg canonicalMsg = ErrorMsg.getErrorMsg(parametrizedMsg); Assert.assertEquals("Didn't find expected msg", errorMsg.getErrorCode(), canonicalMsg.getErrorCode()); } }
private void failIfColAliasExists(Set<String> nameSet, String name) throws SemanticException { if (nameSet.contains(name)) { throw new SemanticException(ErrorMsg.COLUMN_ALIAS_ALREADY_EXISTS .getMsg(name)); } nameSet.add(name); }
/** * Given an error code, returns the ErrorMsg object associated with it. * @param errorCode An error code * @return ErrorMsg */ public static ErrorMsg getErrorMsg(int errorCode) { for (ErrorMsg errorMsg : values()) { if (errorMsg.getErrorCode() == errorCode) { return errorMsg; } } return null; }
public CommandProcessorResponse(int responseCode, ErrorMsg canonicalErrMsg, Throwable t, String ... msgArgs) { this(responseCode, canonicalErrMsg.format(msgArgs), canonicalErrMsg.getSQLState(), null, t, canonicalErrMsg.getErrorCode(), null); }
/** * For a given error message string, searches for a <code>ErrorMsg</code> enum * that appears to be a match. If a match is found, returns the * <code>SQLState</code> associated with the <code>ErrorMsg</code>. If a match * is not found or <code>ErrorMsg</code> has no <code>SQLState</code>, returns * the <code>SQLState</code> bound to the <code>GENERIC_ERROR</code> * <code>ErrorMsg</code>. * * @param mesg * An error message string * @return SQLState */ public static String findSQLState(String mesg) { ErrorMsg error = getErrorMsg(mesg); return error.getSQLState(); }
@Override protected int execute(DriverContext driverContext) { try { Hive hiveDb = getHive(); Path dumpRoot = new Path(conf.getVar(HiveConf.ConfVars.REPLDIR), getNextDumpDir()); DumpMetaData dmd = new DumpMetaData(dumpRoot, conf); Path cmRoot = new Path(conf.getVar(HiveConf.ConfVars.REPLCMDIR)); Long lastReplId; if (work.isBootStrapDump()) { lastReplId = bootStrapDump(dumpRoot, dmd, cmRoot, hiveDb); } else { lastReplId = incrementalDump(dumpRoot, dmd, cmRoot, hiveDb); } prepareReturnValues(Arrays.asList(dumpRoot.toUri().toString(), String.valueOf(lastReplId))); } catch (Exception e) { LOG.error("failed", e); setException(e); return ErrorMsg.getErrorMsg(e.getMessage()).getErrorCode(); } return 0; }
private void verifyThatTimedOutCompileOpsCount(List<CommandProcessorResponse> responseList, int count) { verifyErrorCount(ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode(), is(equalTo(count)), responseList); }
ErrorMsg error = ErrorMsg.getErrorMsg(e.getMessage()); errorMessage = "FAILED: " + e.getClass().getSimpleName(); if (error != ErrorMsg.GENERIC_ERROR) { errorMessage += " [Error " + error.getErrorCode() + "]:"; SQLState = error.getSQLState(); downstreamError = e; console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return error.getErrorCode();//todo: this is bad if returned as cmd shell exit
/** * This is the recommended constructor to use since it helps use * canonical messages throughout and propagate remote errors. * * @param errorMsg Canonical error message * @param msgArgs message arguments if message is parametrized; must be {@code null} is message takes no arguments */ public DataConstraintViolationError(Throwable cause, ErrorMsg errorMsg, String... msgArgs) { super(errorMsg.format(msgArgs), cause); canonicalErrorMsg = errorMsg; }
private void setErrorMsgAndDetail(int exitVal, Throwable downstreamError, Task tsk) { this.downstreamError = downstreamError; errorMessage = "FAILED: Execution Error, return code " + exitVal + " from " + tsk.getClass().getName(); if(downstreamError != null) { //here we assume that upstream code may have parametrized the msg from ErrorMsg //so we want to keep it errorMessage += ". " + downstreamError.getMessage(); } else { ErrorMsg em = ErrorMsg.getErrorMsg(exitVal); if (em != null) { errorMessage += ". " + em.getMsg(); } } }
private String singleTable(String json, String table) throws IOException { Map obj = JsonBuilder.jsonToMap(json); if (JsonBuilder.isError(obj)) return json; List tables = (List) obj.get("tables"); if (TempletonUtils.isset(tables)) return JsonBuilder.mapToJson(tables.get(0)); else { return JsonBuilder .createError(ErrorMsg.INVALID_TABLE.format(table), ErrorMsg.INVALID_TABLE.getErrorCode()). buildJson(); } }
public static void raiseAcidTablesMustBeReadWithAcidReaderException(Configuration conf) throws IOException { String hiveInputFormat = HiveConf.getVar(conf, ConfVars.HIVEINPUTFORMAT); if (hiveInputFormat.equals(HiveInputFormat.class.getName())) { throw new IOException(ErrorMsg.ACID_TABLES_MUST_BE_READ_WITH_ACID_READER.getErrorCodedMsg()); } else { throw new IOException(ErrorMsg.ACID_TABLES_MUST_BE_READ_WITH_HIVEINPUTFORMAT.getErrorCodedMsg()); } }
errorMessage = ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCodedMsg(); throw createProcessorResponse(ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode());
private static void renderPosition(StringBuilder sb, ASTNode tree) { sb.append("Line "); sb.append(getLine(tree)); sb.append(":"); sb.append(getCharPositionInLine(tree)); } public static String renderPosition(ASTNode n) {
private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) throws CommandProcessorResponse { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if(rootMsg != null) { errorMessage += "\n" + rootMsg; } SQLState = e.getCanonicalErrorMsg() != null ? e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); throw createProcessorResponse(ret); } private boolean requiresLock() {
private String getUserFromUGI() { // Don't use the userName member, as it may or may not have been set. Get the value from // conf, which calls into getUGI to figure out who the process is running as. try { return conf.getUser(); } catch (IOException e) { errorMessage = "FAILED: Error in determining user while acquiring locks: " + e.getMessage(); SQLState = ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); } return null; }
static private HiveException getHiveException(Exception e, String msg, String logMsg) { // The message from remote exception includes the entire stack. The error thrown from // hive based on the remote exception needs only the first line. String hiveErrMsg = null; if (e.getMessage() != null) { hiveErrMsg = String.format("%s%s%s", msg, ": ", Splitter.on(System.getProperty("line.separator")).split(e.getMessage()).iterator() .next()); } else { hiveErrMsg = msg; } ErrorMsg errorMsg = ErrorMsg.getErrorMsg(e); if (logMsg != null) { LOG.info(String.format(logMsg, e.getMessage())); } if (errorMsg != ErrorMsg.UNRESOLVED_RT_EXCEPTION) { return new HiveException(e, e.getMessage(), errorMsg, hiveErrMsg); } else { return new HiveException(msg, e); } }
private static int getLine(ASTNode tree) { if (tree.getChildCount() == 0) { return tree.getToken().getLine(); } return getLine((ASTNode) tree.getChild(0)); }
private static int getCharPositionInLine(ASTNode tree) { if (tree.getChildCount() == 0) { return tree.getToken().getCharPositionInLine(); } return getCharPositionInLine((ASTNode) tree.getChild(0)); }