public CommandProcessorResponse compileAndRespond(String command) { return createProcessorResponse(compileInternal(command, false)); }
private void checkInterrupted(String msg, HookContext hookContext, PerfLogger perfLogger) throws CommandProcessorResponse { if (lDrvState.isAborted()) { throw createProcessorResponse(handleInterruptionWithHook(msg, hookContext, perfLogger)); } }
public CommandProcessorResponse compileAndRespond(String command, boolean cleanupTxnList) { try { compileInternal(command, false); return createProcessorResponse(0); } catch (CommandProcessorResponse e) { return e; } finally { if (cleanupTxnList) { // Valid txn list might be generated for a query compiled using this // command, thus we need to reset it conf.unset(ValidTxnList.VALID_TXNS_KEY); } } }
private void openTransaction() throws LockException, CommandProcessorResponse { if (checkConcurrency() && startImplicitTxn(queryTxnMgr)) { String userFromUGI = getUserFromUGI(); if (!queryTxnMgr.isTxnOpen()) { if (userFromUGI == null) { throw createProcessorResponse(10); } queryTxnMgr.openTxn(ctx, userFromUGI); } } }
private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) throws CommandProcessorResponse { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if(rootMsg != null) { errorMessage += "\n" + rootMsg; } SQLState = e.getCanonicalErrorMsg() != null ? e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); throw createProcessorResponse(ret); } private boolean requiresLock() {
private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if(rootMsg != null) { errorMessage += "\n" + rootMsg; } SQLState = e.getCanonicalErrorMsg() != null ? e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return createProcessorResponse(ret); } private boolean requiresLock() {
String userFromUGI = getUserFromUGI(); if(userFromUGI == null) { throw createProcessorResponse(10); console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); throw createProcessorResponse(10); } finally { perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.ACQUIRE_READ_WRITE_LOCKS);
throw createProcessorResponse(ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode());
errorMessage = "FAILED: Precompiled query has been cancelled or closed."; console.printError(errorMessage); return createProcessorResponse(12); console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return createProcessorResponse(12); return createProcessorResponse(ret); return rollback(createProcessorResponse(ret)); if (ret != 0) { return rollback(createProcessorResponse(ret)); console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return createProcessorResponse(12); return createProcessorResponse(ret); } finally { if (isInterrupted()) {
errorMessage = "FAILED: Precompiled query has been cancelled or closed."; console.printError(errorMessage); throw createProcessorResponse(12); console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); throw createProcessorResponse(12); console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); throw createProcessorResponse(12);
errorMessage = "FAILED: unexpected driverstate: " + lDrvState + ", for query " + queryStr; console.printError(errorMessage); throw createProcessorResponse(1000); } else { lDrvState.driverState = DriverState.EXECUTING; throw createProcessorResponse(exitVal); invokeFailureHooks(perfLogger, hookContext, errorMessage, null); console.printError(errorMessage); throw createProcessorResponse(1000); console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); throw createProcessorResponse(12); } finally {
errorMessage = authExp.getMessage(); SQLState = "42000"; throw createProcessorResponse(403); } finally { perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.DO_AUTHORIZATION); console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); throw createProcessorResponse(error.getErrorCode()); } finally {
return createProcessorResponse(0); } catch (CommandProcessorResponse cpr) {
public CommandProcessorResponse compileAndRespond(String command) { return createProcessorResponse(compileInternal(command)); }
return createProcessorResponse(12); console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return createProcessorResponse(12); ret = compileInternal(command); if (ret != 0) { return createProcessorResponse(ret); return createProcessorResponse(ret); return createProcessorResponse(ret); console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return createProcessorResponse(12); console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return createProcessorResponse(12); return createProcessorResponse(ret);