public static ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession, String statement, Map<String, String> confOverlay, boolean runAsync, long queryTimeout) throws HiveSQLException { String cleanStatement = HiveStringUtils.removeComments(statement); String[] tokens = cleanStatement.trim().split("\\s+"); CommandProcessor processor = null; try { processor = CommandProcessorFactory.getForHiveCommand(tokens, parentSession.getHiveConf()); } catch (SQLException e) { throw new HiveSQLException(e.getMessage(), e.getSQLState(), e); } if (processor == null) { // runAsync, queryTimeout makes sense only for a SQLOperation // Pass the original statement to SQLOperation as sql parser can remove comments by itself return new SQLOperation(parentSession, statement, confOverlay, runAsync, queryTimeout); } return new HiveCommandOperation(parentSession, cleanStatement, processor, confOverlay); } }
@Override public void close() throws HiveSQLException { setState(OperationState.CLOSED); tearDownSessionIO(); cleanTmpFile(); cleanupOperationLog(0); }
private void cleanTmpFile() { resetResultReader(); SessionState sessionState = getParentSession().getSessionState(); sessionState.deleteTmpOutputFile(); sessionState.deleteTmpErrOutputFile(); }
@Override public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { validateDefaultFetchOrientation(orientation); if (orientation.equals(FetchOrientation.FETCH_FIRST)) { resetResultReader(); } List<String> rows = readResults((int) maxRows); RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion(), false); // cannot do delimited split for some commands like "dfs -cat" that prints the contents of file which may have // different delimiter. so we will split only when the resultSchema has more than 1 column if (resultSchema != null && resultSchema.getSize() > 1) { for (String row : rows) { rowSet.addRow(row.split("\\t")); } } else { for (String row : rows) { rowSet.addRow(new String[]{row}); } } return rowSet; }
@Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); try { String command = getStatement().trim(); String[] tokens = statement.split("\\s"); String commandArgs = command.substring(tokens[0].length()).trim(); int returnCode = response.getResponseCode(); if (returnCode != 0) { throw toSQLException("Error while processing statement", response); setHasResultSet(true); resultSchema = new TableSchema(schema); } else { setHasResultSet(false); resultSchema = new TableSchema(); setState(OperationState.ERROR); throw e; } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException("Error running query: " + e.toString(), e); setState(OperationState.FINISHED);
SessionState sessionState = getParentSession().getSessionState(); File tmp = sessionState.getTmpOutputFile(); try {
@Override public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { validateDefaultFetchOrientation(orientation); if (orientation.equals(FetchOrientation.FETCH_FIRST)) { resetResultReader(); } List<String> rows = readResults((int) maxRows); RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion()); for (String row : rows) { rowSet.addRow(new String[] {row}); } return rowSet; }
@Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); try { String command = getStatement().trim(); String[] tokens = statement.split("\\s"); String commandArgs = command.substring(tokens[0].length()).trim(); CommandProcessorResponse response = commandProcessor.run(commandArgs); int returnCode = response.getResponseCode(); if (returnCode != 0) { throw toSQLException("Error while processing statement", response); } Schema schema = response.getSchema(); if (schema != null) { setHasResultSet(true); resultSchema = new TableSchema(schema); } else { setHasResultSet(false); resultSchema = new TableSchema(); } } catch (HiveSQLException e) { setState(OperationState.ERROR); throw e; } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException("Error running query: " + e.toString(), e); } setState(OperationState.FINISHED); }
SessionState sessionState = getParentSession().getSessionState(); File tmp = sessionState.getTmpOutputFile(); try {
@Override public void close() throws HiveSQLException { setState(OperationState.CLOSED); tearDownSessionIO(); cleanTmpFile(); cleanupOperationLog(); }
@Override public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { validateDefaultFetchOrientation(orientation); if (orientation.equals(FetchOrientation.FETCH_FIRST)) { resetResultReader(); } List<String> rows = readResults((int) maxRows); RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion()); for (String row : rows) { rowSet.addRow(new String[] {row}); } return rowSet; }
@Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); try { String command = getStatement().trim(); String[] tokens = statement.split("\\s"); String commandArgs = command.substring(tokens[0].length()).trim(); CommandProcessorResponse response = commandProcessor.run(commandArgs); int returnCode = response.getResponseCode(); if (returnCode != 0) { throw toSQLException("Error while processing statement", response); } Schema schema = response.getSchema(); if (schema != null) { setHasResultSet(true); resultSchema = new TableSchema(schema); } else { setHasResultSet(false); resultSchema = new TableSchema(); } } catch (HiveSQLException e) { setState(OperationState.ERROR); throw e; } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException("Error running query: " + e.toString(), e); } setState(OperationState.FINISHED); }
private void cleanTmpFile() { resetResultReader(); SessionState sessionState = getParentSession().getSessionState(); File tmp = sessionState.getTmpOutputFile(); tmp.delete(); }
public static ExecuteStatementOperation newExecuteStatementOperation( HiveSession parentSession, String statement, Map<String, String> confOverlay, boolean runAsync) throws HiveSQLException { String[] tokens = statement.trim().split("\\s+"); CommandProcessor processor = null; try { processor = CommandProcessorFactory.getForHiveCommand(tokens, parentSession.getHiveConf()); } catch (SQLException e) { throw new HiveSQLException(e.getMessage(), e.getSQLState(), e); } if (processor == null) { return new SQLOperation(parentSession, statement, confOverlay, runAsync); } return new HiveCommandOperation(parentSession, statement, processor, confOverlay); }
SessionState sessionState = getParentSession().getSessionState(); File tmp = sessionState.getTmpOutputFile(); try {
@Override public void close() throws HiveSQLException { setState(OperationState.CLOSED); tearDownSessionIO(); cleanTmpFile(); cleanupOperationLog(); }
@Override public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { validateDefaultFetchOrientation(orientation); if (orientation.equals(FetchOrientation.FETCH_FIRST)) { resetResultReader(); } List<String> rows = readResults((int) maxRows); RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion()); for (String row : rows) { rowSet.addRow(new String[] {row}); } return rowSet; }
@Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); try { String command = getStatement().trim(); String[] tokens = statement.split("\\s"); String commandArgs = command.substring(tokens[0].length()).trim(); CommandProcessorResponse response = commandProcessor.run(commandArgs); int returnCode = response.getResponseCode(); if (returnCode != 0) { throw toSQLException("Error while processing statement", response); } Schema schema = response.getSchema(); if (schema != null) { setHasResultSet(true); resultSchema = new TableSchema(schema); } else { setHasResultSet(false); resultSchema = new TableSchema(); } } catch (HiveSQLException e) { setState(OperationState.ERROR); throw e; } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException("Error running query: " + e.toString(), e); } setState(OperationState.FINISHED); }
private void cleanTmpFile() { resetResultReader(); SessionState sessionState = getParentSession().getSessionState(); File tmp = sessionState.getTmpOutputFile(); tmp.delete(); }
public static ExecuteStatementOperation newExecuteStatementOperation( HiveSession parentSession, String statement, Map<String, String> confOverlay, boolean runAsync) throws HiveSQLException { String[] tokens = statement.trim().split("\\s+"); CommandProcessor processor = null; try { processor = CommandProcessorFactory.getForHiveCommand(tokens, parentSession.getHiveConf()); } catch (SQLException e) { throw new HiveSQLException(e.getMessage(), e.getSQLState(), e); } if (processor == null) { return new SQLOperation(parentSession, statement, confOverlay, runAsync); } return new HiveCommandOperation(parentSession, statement, processor, confOverlay); }