@Override public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { validateDefaultFetchOrientation(orientation); if (orientation.equals(FetchOrientation.FETCH_FIRST)) { resetResultReader(); } List<String> rows = readResults((int) maxRows); RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion(), false); // cannot do delimited split for some commands like "dfs -cat" that prints the contents of file which may have // different delimiter. so we will split only when the resultSchema has more than 1 column if (resultSchema != null && resultSchema.getSize() > 1) { for (String row : rows) { rowSet.addRow(row.split("\\t")); } } else { for (String row : rows) { rowSet.addRow(new String[]{row}); } } return rowSet; }
@Override public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { validateDefaultFetchOrientation(orientation); if (orientation.equals(FetchOrientation.FETCH_FIRST)) { resetResultReader(); } List<String> rows = readResults((int) maxRows); RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion()); for (String row : rows) { rowSet.addRow(new String[] {row}); } return rowSet; }
@Override public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { validateDefaultFetchOrientation(orientation); if (orientation.equals(FetchOrientation.FETCH_FIRST)) { resetResultReader(); } List<String> rows = readResults((int) maxRows); RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion()); for (String row : rows) { rowSet.addRow(new String[] {row}); } return rowSet; }
@Override public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { validateDefaultFetchOrientation(orientation); if (orientation.equals(FetchOrientation.FETCH_FIRST)) { resetResultReader(); } List<String> rows = readResults((int) maxRows); RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion()); for (String row : rows) { rowSet.addRow(new String[] {row}); } return rowSet; }
@Override public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { validateDefaultFetchOrientation(orientation); if (orientation.equals(FetchOrientation.FETCH_FIRST)) { resetResultReader(); } List<String> rows = readResults((int) maxRows); RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion()); for (String row : rows) { rowSet.addRow(new String[] {row}); } return rowSet; }
@Override public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { validateDefaultFetchOrientation(orientation); if (orientation.equals(FetchOrientation.FETCH_FIRST)) { resetResultReader(); } List<String> rows = readResults((int) maxRows); RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion(), false); // cannot do delimited split for some commands like "dfs -cat" that prints the contents of file which may have // different delimiter. so we will split only when the resultSchema has more than 1 column if (resultSchema != null && resultSchema.getSize() > 1) { for (String row : rows) { rowSet.addRow(row.split("\\t")); } } else { for (String row : rows) { rowSet.addRow(new String[]{row}); } } return rowSet; }