@Override public ProducerOperator create( final FragmentExecutionContext fragmentExecContext, final OperatorContext context, HBaseSubScan subScan) throws ExecutionSetupException { final List<SchemaPath> columns = subScan.getColumns() == null ? GroupScan.ALL_COLUMNS : subScan.getColumns(); final HBaseStoragePlugin plugin2 = fragmentExecContext.getStoragePlugin(subScan.getPluginId()); final Iterable<RecordReader> readers = FluentIterable.from(subScan.getScans()).transform(new Function<HBaseSubScanSpec, RecordReader>(){ @Override public RecordReader apply(HBaseSubScanSpec scanSpec) { return new HBaseRecordReader(plugin2.getConnection(), scanSpec, columns, context, false); }}); return new ScanOperator(fragmentExecContext.getSchemaUpdater(), subScan, context, readers.iterator()); }
private void setupReader(RecordReader reader) throws Exception { try(RollbackCloseable commit = AutoCloseables.rollbackable(reader)){ BatchSchema initialSchema = outgoing.getSchema(); setupReaderAsCorrectUser(reader); checkAndLearnSchema(); Preconditions.checkArgument(initialSchema.equals(outgoing.getSchema()), "Schema changed but not detected."); commit.commit(); } }
@Override public VectorAccessible setup() throws Exception { schema.maskAndReorder(config.getColumns()).materializeVectors(selectedColumns, mutator); outgoing.buildSchema(SelectionVectorMode.NONE); callBack.getSchemaChangedAndReset(); setupReader(currentReader); state = State.CAN_PRODUCE; return outgoing; }
try { stats.startSetup(); setupReader(currentReader); } finally { stats.stopSetup(); checkAndLearnSchema(); return outgoing.setAllCount(recordCount);
@Override public ProducerOperator create(FragmentExecutionContext fec, OperatorContext context, Values config) throws ExecutionSetupException { final JSONRecordReader reader = new JSONRecordReader(context, config.getContent().asNode(), null, Collections.singletonList(SchemaPath.getSimplePath("*"))); return new ScanOperator(fec.getSchemaUpdater(), config, context, Iterators.singletonIterator((RecordReader) reader)); } }
@Override public ProducerOperator create(FragmentExecutionContext fec, OperatorContext context, EmptyValues config) throws ExecutionSetupException { return new ScanOperator(fec.getSchemaUpdater(), config, context, Iterators.<RecordReader>singletonIterator(new EmptyRecordReader(context))); }
@Override public ProducerOperator create(FragmentExecutionContext fragmentExecContext, OperatorContext context, MockSubScanPOP config) throws ExecutionSetupException { final List<MockScanEntry> entries = config.getReadEntries(); final List<RecordReader> readers = Lists.newArrayList(); for(final MockScanEntry e : entries) { readers.add(new MockRecordReader(context, e)); } return new ScanOperator(fragmentExecContext.getSchemaUpdater(), config, context, readers.iterator()); } }
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override public ProducerOperator create(FragmentExecutionContext fec, OperatorContext context, SystemSubScan config) throws ExecutionSetupException { final SystemTable table = config.getTable(); final SystemStoragePlugin plugin2 = fec.getStoragePlugin(config.getPluginId()); final RecordReader reader = new PojoRecordReader(table.getPojoClass(), table.getIterator(plugin2.getSabotContext(), context), config.getColumns()); return new ScanOperator(fec.getSchemaUpdater(), config, context, Collections.singleton(reader).iterator()); } }
return new ScanOperator(fragmentExecContext.getSchemaUpdater(), config, context, Iterators.singletonIterator(new EmptyRecordReader())); return new ScanOperator(fragmentExecContext.getSchemaUpdater(), config, context, readers.iterator()); } catch (Exception e) { AutoCloseables.close(e, readers);
return new ScanOperator(fragmentExecContext.getSchemaUpdater(), config, context, Iterators.singletonIterator(new EmptyRecordReader())); return new ScanOperator(fragmentExecContext.getSchemaUpdater(), config, context, readers.iterator());
@Override public ProducerOperator create(FragmentExecutionContext fec, OperatorContext context, InfoSchemaSubScan config) throws ExecutionSetupException { final InfoSchemaTable table = config.getTable(); final InfoSchemaStoragePlugin plugin = fec.getStoragePlugin(config.getPluginId()); final DatasetListingService datasetListing = plugin.getSabotContext().getDatasetListing(); final String catalogName = context.getOptions().getOption(ExecConstants.USE_LEGACY_CATALOG_NAME) ? InfoSchemaConstants.IS_LEGACY_CATALOG_NAME : InfoSchemaConstants.IS_CATALOG_NAME; final RecordReader reader = table.asReader(catalogName, config.getUserName(), datasetListing, config.getQuery(), config.getColumns()); return new ScanOperator(fec.getSchemaUpdater(), config, context, Collections.singleton(reader).iterator()); }
@Override public ProducerOperator create(FragmentExecutionContext fragmentExecContext, final OperatorContext context, EasySubScan config) throws ExecutionSetupException { final FileSystemPlugin plugin = fragmentExecContext.getStoragePlugin(config.getPluginId()); final FileSystemWrapper fs = plugin.getFs(config.getUserName(), context.getStats()); final FormatPluginConfig formatConfig = PhysicalDatasetUtils.toFormatPlugin(config.getFileConfig(), Collections.<String>emptyList()); final EasyFormatPlugin<?> formatPlugin = (EasyFormatPlugin<?>) plugin.getFormatPlugin(formatConfig); FluentIterable<SplitAndExtended> unorderedWork = FluentIterable.from(config.getSplits()) .transform(new Function<DatasetSplit, SplitAndExtended>() { @Override public SplitAndExtended apply(DatasetSplit split) { return new SplitAndExtended(split); } }); final boolean sortReaders = context.getOptions().getOption(ExecConstants.SORT_FILE_BLOCKS); final List<SplitAndExtended> workList = sortReaders ? unorderedWork.toSortedList(SPLIT_COMPARATOR) : unorderedWork.toList(); final boolean selectAllColumns = selectsAllColumns(config.getSchema(), config.getColumns()); final CompositeReaderConfig readerConfig = CompositeReaderConfig.getCompound(config.getSchema(), config.getColumns(), config.getPartitionColumns()); final List<SchemaPath> innerFields = selectAllColumns ? ImmutableList.of(ColumnUtils.STAR_COLUMN) : readerConfig.getInnerColumns(); FluentIterable<RecordReader> readers = FluentIterable.from(workList).transform(new Function<SplitAndExtended, RecordReader>() { @Override public RecordReader apply(SplitAndExtended input) { try { RecordReader inner = formatPlugin.getRecordReader(context, fs, input.getExtended(), innerFields); return readerConfig.wrapIfNecessary(context.getAllocator(), inner, input.getSplit()); } catch (ExecutionSetupException e) { throw new RuntimeException(e); } }}); return new ScanOperator(fragmentExecContext.getSchemaUpdater(), config, context, readers.iterator()); }
return new ScanOperator(fec.getSchemaUpdater(), subScan, context, readers.iterator()); } catch (InvalidProtocolBufferException e) { throw new ExecutionSetupException(e);
final ScanOperator scan = new ScanOperator(fragmentExecContext.getSchemaUpdater(), config, context, readers.iterator(), globalDictionaries); logger.debug("Took {} ms to create Parquet Scan SqlOperatorImpl.", watch.elapsed(TimeUnit.MILLISECONDS)); return scan;