public RecordBatches toRecordBatches() { return new RecordBatches(batches.stream() .map(t -> RecordBatchHolder.newRecordBatchHolder(t, 0, t.getRecordCount())) .collect(Collectors.toList()) ); } }
@Override public List<RecordBatchHolder> getRecordBatches() { return recordBatches.getBatches(); }
@Override public BatchSchema getSchema() { return recordBatches.getSchema(); }
private Pair<RecordBatchData, Integer> find(int index) { if (index >= recordBatches.getSize()) { throw new IllegalArgumentException(String.format("Invalid index %s", index)); } // Add the offset in the first batch int indexWorkspace = index; for(RecordBatchHolder batchHolder : recordBatches.getBatches()) { if (indexWorkspace < batchHolder.size()) { return new Pair<>(batchHolder.getData(), batchHolder.getStart() + indexWorkspace); } indexWorkspace -= batchHolder.size(); } throw new IllegalArgumentException(String.format("Invalid index %s", index)); }
@Override public int getReturnedRowCount() { return recordBatches.getSize(); }
private static com.dremio.dac.model.job.JobDataFragment createDataObject(ValueVector... vv) { RecordBatchData batch = createRecordBatch(vv); return new JobDataFragmentWrapper(0, new JobDataFragmentImpl( new RecordBatches(asList(newRecordBatchHolder(batch, 0, batch.getRecordCount()))), 0, TEST_JOB_ID)); }
public JobDataFragmentImpl(final RecordBatches recordBatches, final int offsetInJobResults, final JobId jobId) { this.recordBatches = recordBatches; this.jobId = jobId; this.nameToColumnIndex = getColumnIndicesFromSchema(recordBatches.getSchema()); }
@Override @SuppressWarnings("unchecked") public synchronized void close(){ try{ AutoCloseables.close( (List<AutoCloseable>) (Object) recordBatches.getBatches()); }catch(Exception ex){ Throwables.propagate(ex); } }
return new RecordBatches(batchHolders); } catch(IOException ex){ throw UserException.dataReadError(ex)
@Test public void testDataTrunc() throws Exception { Pair<? extends ValueVector, ResultVerifier> varChar1 = testVarCharVector(0, 0); Pair<? extends ValueVector, ResultVerifier> varChar2 = testVarCharVector(5, 5); Pair<? extends ValueVector, ResultVerifier> varChar3 = testVarCharVector(10, 10); Pair<? extends ValueVector, ResultVerifier> date1 = testDateMilliVector(0, 0); Pair<? extends ValueVector, ResultVerifier> date2 = testDateMilliVector(5, 5); Pair<? extends ValueVector, ResultVerifier> date3 = testDateMilliVector(10, 10); RecordBatchData batch1 = createRecordBatch(varChar1.getKey(), date1.getKey()); RecordBatchData batch2 = createRecordBatch(varChar2.getKey(), date2.getKey()); RecordBatchData batch3 = createRecordBatch(varChar3.getKey(), date3.getKey()); JobLoader jobLoader = mock(JobLoader.class); when(jobLoader.load(anyInt(), anyInt())).thenReturn( new RecordBatches(asList( newRecordBatchHolder(batch1, 0, 5), newRecordBatchHolder(batch2, 0, 5), newRecordBatchHolder(batch3, 0, 5) )) ); try (JobData dataInput = new JobDataWrapper(new JobDataImpl(jobLoader, TEST_JOB_ID))) { JobDataFragment truncDataInput = dataInput.truncate(10); DataPOJO dataOutput = OBJECT_MAPPER.readValue(OBJECT_MAPPER.writeValueAsString(truncDataInput), DataPOJO.class); assertEquals(truncDataInput.getColumns().toString(), dataOutput.getColumns().toString()); assertEquals(truncDataInput.getReturnedRowCount(), dataOutput.getReturnedRowCount()); varChar1.getValue().verify(dataOutput); varChar2.getValue().verify(dataOutput); date1.getValue().verify(dataOutput); date2.getValue().verify(dataOutput); } }
@Test public void testDataRange() throws Exception { Pair<? extends ValueVector, ResultVerifier> varChar1 = testVarCharVector(0, 0); Pair<? extends ValueVector, ResultVerifier> varChar2 = testVarCharVector(0, 5); Pair<? extends ValueVector, ResultVerifier> varChar3 = testVarCharVector(5, 10); Pair<? extends ValueVector, ResultVerifier> date1 = testDateMilliVector(0, 0); Pair<? extends ValueVector, ResultVerifier> date2 = testDateMilliVector(0, 5); Pair<? extends ValueVector, ResultVerifier> date3 = testDateMilliVector(5, 10); RecordBatchData batch1 = createRecordBatch(varChar1.getKey(), date1.getKey()); RecordBatchData batch2 = createRecordBatch(varChar2.getKey(), date2.getKey()); RecordBatchData batch3 = createRecordBatch(varChar3.getKey(), date3.getKey()); JobLoader jobLoader = mock(JobLoader.class); when(jobLoader.load(anyInt(), anyInt())).thenReturn( new RecordBatches(asList( newRecordBatchHolder(batch1, 0, 5), newRecordBatchHolder(batch2, 0, 5), newRecordBatchHolder(batch3, 0, 5) )) ); try (JobData dataInput = new JobDataWrapper(new JobDataImpl(jobLoader, TEST_JOB_ID))) { JobDataFragment rangeDataInput = dataInput.range(5, 10); DataPOJO dataOutput = OBJECT_MAPPER.readValue(OBJECT_MAPPER.writeValueAsString(rangeDataInput), DataPOJO.class); assertEquals(rangeDataInput.getColumns().toString(), dataOutput.getColumns().toString()); assertEquals(rangeDataInput.getReturnedRowCount(), dataOutput.getReturnedRowCount()); varChar2.getValue().verify(dataOutput); varChar3.getValue().verify(dataOutput); date2.getValue().verify(dataOutput); date3.getValue().verify(dataOutput); } }
recordBatches.add(newRecordBatchHolder(data3, 0, data3.getRecordCount())); try (JobDataFragment jdf = new JobDataFragmentWrapper(0, new JobDataFragmentImpl(new RecordBatches(recordBatches), 0, TEST_JOB_ID))) { new RecordBatches(asList( newRecordBatchHolder(data1, 2, 5), newRecordBatchHolder(data2, 1, 3),