@Override public Void run() throws Exception { checkNotNull(reader).setup(mutator); return null; } });
@Override public void setup(OutputMutator output) throws ExecutionSetupException { inner.setup(output); for(Populator p : populators){ p.setup(output); } }
@Override public void setup(OutputMutator output) throws ExecutionSetupException { this.outputMutator = output; inner.setup(mutator); newSchema(); }
private void nextReader() { AutoCloseables.closeNoChecked(current); current = null; currentIndex++; if(currentIndex == footer.getBlocks().size()) { return; } current = new ParquetRowiseReader( context, footer, currentIndex, status.getPath().toString(), GroupScan.ALL_COLUMNS, fs, schemaHelper, streamProvider ); try { current.setup(output); } catch (ExecutionSetupException e) { throw Throwables.propagate(e); } }
@Override public void setup(OutputMutator output) throws ExecutionSetupException { computeLocality(footer); splitColumns(footer, vectorizableReaderColumns, nonVectorizableReaderColumns); final ExecutionPath execPath = getExecutionPath(); delegates = execPath.getReaders(this); Preconditions.checkArgument(!delegates.isEmpty(), "There should be at least one delegated RecordReader"); for (RecordReader delegateReader : delegates) { delegateReader.setup(output); } for (SchemaPath path : vectorizableReaderColumns) { String name = path.getRootSegment().getNameSegment().getPath(); vectorizedMap.put(name, output.getVector(name)); } for (SchemaPath path : nonVectorizableReaderColumns) { String name = path.getRootSegment().getNameSegment().getPath(); nonVectorizedMap.put(name, output.getVector(name)); } context.getStats().setLongStat(Metric.PARQUET_EXEC_PATH, execPath.ordinal()); context.getStats().setLongStat(Metric.NUM_VECTORIZED_COLUMNS, vectorizableReaderColumns.size()); context.getStats().setLongStat(Metric.NUM_NON_VECTORIZED_COLUMNS, nonVectorizableReaderColumns.size()); context.getStats().setLongStat(Metric.FILTER_EXISTS, filterConditions != null && filterConditions.size() > 0 ? 1 : 0); }
dataset.setPath(file.getPath().toString()); try(RecordReader reader = new AdditionalColumnsRecordReader(((EasyFormatPlugin)formatPlugin).getRecordReader(operatorContext, dfs, dataset, GroupScan.ALL_COLUMNS), explorer.getImplicitFieldsForSample(selection))) { reader.setup(mutator); Map<String, ValueVector> fieldVectorMap = new HashMap<>(); for (VectorWrapper<?> vw : mutator.getContainer()) {
reader.setup(mutator);
reader.setup(mutator);
delegate.setup(mutator);