private synchronized void cleanup(OperationState state) throws HiveSQLException { setState(state);
@Override public void runInternal() throws HiveSQLException { setState(OperationState.PENDING); setBackgroundHandle(backgroundHandle); } catch (RejectedExecutionException rejected) { setState(OperationState.ERROR); throw new HiveSQLException("The background threadpool cannot accept" + " new task for execution, please retry the operation", rejected);
private void runQuery(HiveConf sqlOperationConf) throws HiveSQLException { try { // In Hive server mode, we are not able to retry in the FetchTask // case, when calling fetch queries since execute() has returned. // For now, we disable the test attempts. driver.setTryCount(Integer.MAX_VALUE); response = driver.run(); if (0 != response.getResponseCode()) { throw toSQLException("Error while processing statement", response); } } catch (HiveSQLException e) { // If the operation was cancelled by another thread, // Driver#run will return a non-zero response code. // We will simply return if the operation state is CANCELED, // otherwise throw an exception if (getStatus().getState() == OperationState.CANCELED) { return; } else { setState(OperationState.ERROR); throw e; } } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException("Error running query: " + e.toString(), e); } setState(OperationState.FINISHED); }
private void runQuery(HiveConf sqlOperationConf) throws HiveSQLException { try { // In Hive server mode, we are not able to retry in the FetchTask // case, when calling fetch queries since execute() has returned. // For now, we disable the test attempts. driver.setTryCount(Integer.MAX_VALUE); response = driver.run(); if (0 != response.getResponseCode()) { throw toSQLException("Error while processing statement", response); } } catch (HiveSQLException e) { // If the operation was cancelled by another thread, // Driver#run will return a non-zero response code. // We will simply return if the operation state is CANCELED, // otherwise throw an exception if (getStatus().getState() == OperationState.CANCELED) { return; } else { setState(OperationState.ERROR); throw e; } } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException("Error running query: " + e.toString(), e); } setState(OperationState.FINISHED); }
private void runQuery(HiveConf sqlOperationConf) throws HiveSQLException { try { // In Hive server mode, we are not able to retry in the FetchTask // case, when calling fetch queries since execute() has returned. // For now, we disable the test attempts. driver.setTryCount(Integer.MAX_VALUE); response = driver.run(); if (0 != response.getResponseCode()) { throw toSQLException("Error while processing statement", response); } } catch (HiveSQLException e) { // If the operation was cancelled by another thread, // Driver#run will return a non-zero response code. // We will simply return if the operation state is CANCELED, // otherwise throw an exception if (getStatus().getState() == OperationState.CANCELED) { return; } else { setState(OperationState.ERROR); throw e; } } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException("Error running query: " + e.toString(), e); } setState(OperationState.FINISHED); }
private void runQuery(HiveConf sqlOperationConf) throws HiveSQLException { try { // In Hive server mode, we are not able to retry in the FetchTask // case, when calling fetch queries since execute() has returned. // For now, we disable the test attempts. driver.setTryCount(Integer.MAX_VALUE); response = driver.run(); if (0 != response.getResponseCode()) { throw toSQLException("Error while processing statement", response); } } catch (HiveSQLException e) { // If the operation was cancelled by another thread, // Driver#run will return a non-zero response code. // We will simply return if the operation state is CANCELED, // otherwise throw an exception if (getStatus().getState() == OperationState.CANCELED) { return; } else { setState(OperationState.ERROR); throw e; } } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException("Error running query: " + e.toString(), e); } setState(OperationState.FINISHED); }
private void cleanup(OperationState state) throws HiveSQLException { setState(state); if (shouldRunAsync()) { Future<?> backgroundHandle = getBackgroundHandle(); if (backgroundHandle != null) { backgroundHandle.cancel(true); } } if (driver != null) { driver.close(); driver.destroy(); } driver = null; SessionState ss = SessionState.get(); if (ss.getTmpOutputFile() != null) { ss.getTmpOutputFile().delete(); } }
private void cleanup(OperationState state) throws HiveSQLException { setState(state); if (shouldRunAsync()) { Future<?> backgroundHandle = getBackgroundHandle(); if (backgroundHandle != null) { backgroundHandle.cancel(true); } } if (driver != null) { driver.close(); driver.destroy(); } driver = null; SessionState ss = SessionState.get(); if (ss.getTmpOutputFile() != null) { ss.getTmpOutputFile().delete(); } }
private void cleanup(OperationState state) throws HiveSQLException { setState(state); if (shouldRunAsync()) { Future<?> backgroundHandle = getBackgroundHandle(); if (backgroundHandle != null) { backgroundHandle.cancel(true); } } if (driver != null) { driver.close(); driver.destroy(); } driver = null; SessionState ss = SessionState.get(); if (ss.getTmpOutputFile() != null) { ss.getTmpOutputFile().delete(); } }
private void cleanup(OperationState state) throws HiveSQLException { setState(state); if (shouldRunAsync()) { Future<?> backgroundHandle = getBackgroundHandle(); if (backgroundHandle != null) { backgroundHandle.cancel(true); } } if (driver != null) { driver.close(); driver.destroy(); } driver = null; SessionState ss = SessionState.get(); if (ss.getTmpOutputFile() != null) { ss.getTmpOutputFile().delete(); } }
private synchronized void cleanup(OperationState state) throws HiveSQLException { setState(state);
@Override public void runInternal() throws HiveSQLException { setState(OperationState.PENDING); boolean runAsync = shouldRunAsync(); final boolean asyncPrepare = runAsync && HiveConf.getBoolVar(queryState.getConf(), HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_ASYNC_COMPILE); if (!asyncPrepare) { prepare(queryState); } if (!runAsync) { runQuery(); } else { // We'll pass ThreadLocals in the background thread from the foreground (handler) thread. // 1) ThreadLocal Hive object needs to be set in background thread // 2) The metastore client in Hive is associated with right user. // 3) Current UGI will get used by metastore when metastore is in embedded mode Runnable work = new BackgroundWork(getCurrentUGI(), parentSession.getSessionHive(), SessionState.getPerfLogger(), SessionState.get(), asyncPrepare); try { // This submit blocks if no background threads are available to run this operation Future<?> backgroundHandle = getParentSession().submitBackgroundOperation(work); setBackgroundHandle(backgroundHandle); } catch (RejectedExecutionException rejected) { setState(OperationState.ERROR); throw new HiveSQLException("The background threadpool cannot accept" + " new task for execution, please retry the operation", rejected); } } }
@Override public void runInternal() throws HiveSQLException { setState(OperationState.PENDING); final HiveConf opConfig = getConfigForOperation(); prepare(opConfig); setBackgroundHandle(backgroundHandle); } catch (RejectedExecutionException rejected) { setState(OperationState.ERROR); throw new HiveSQLException("The background threadpool cannot accept" + " new task for execution, please retry the operation", rejected);
@Override public void runInternal() throws HiveSQLException { setState(OperationState.PENDING); final HiveConf opConfig = getConfigForOperation(); prepare(opConfig); setBackgroundHandle(backgroundHandle); } catch (RejectedExecutionException rejected) { setState(OperationState.ERROR); throw new HiveSQLException("The background threadpool cannot accept" + " new task for execution, please retry the operation", rejected);