Refine search
@Override public ConnectorPageSource createPageSource(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorSplit split, List<ColumnHandle> columns) { return new RecordPageSource(recordSetProvider.getRecordSet(transactionHandle, session, split, columns)); } }
@Override public void close() { inner.close(); } }
@Override public long getCompletedBytes() { return inner.getCompletedBytes(); }
partitionKeys, hiveColumns, bucketConversion.map(BucketConversion::getBucketColumnHandles).orElse(ImmutableList.of()), columnCoercions, path, List<ColumnMapping> regularAndInterimColumnMappings = ColumnMapping.extractRegularAndInterimColumnMappings(columnMappings); Optional<BucketAdaptation> bucketAdaptation = bucketConversion.map(conversion -> { Map<Integer, ColumnMapping> hiveIndexToBlockIndex = uniqueIndex(regularAndInterimColumnMappings, columnMapping -> columnMapping.getHiveColumnHandle().getHiveColumnIndex()); int[] bucketColumnIndices = conversion.getBucketColumnHandles().stream() .collect(toList()); return Optional.of(new RecordPageSource(columnTypes, hiveRecordCursor));
private static ConnectorPageSource createPageSource( HiveRecordCursorProvider cursorProvider, ConnectorSession session, File targetFile, List<String> columnNames, List<Type> columnTypes, HiveStorageFormat format) { List<HiveColumnHandle> columnHandles = new ArrayList<>(columnNames.size()); TypeTranslator typeTranslator = new HiveTypeTranslator(); for (int i = 0; i < columnNames.size(); i++) { String columnName = columnNames.get(i); Type columnType = columnTypes.get(i); columnHandles.add(new HiveColumnHandle(columnName, toHiveType(typeTranslator, columnType), columnType.getTypeSignature(), i, REGULAR, Optional.empty())); } RecordCursor recordCursor = cursorProvider .createRecordCursor( conf, session, new Path(targetFile.getAbsolutePath()), 0, targetFile.length(), targetFile.length(), createSchema(format, columnNames, columnTypes), columnHandles, TupleDomain.all(), DateTimeZone.forID(session.getTimeZoneKey().getId()), TYPE_MANAGER, false) .get(); return new RecordPageSource(columnTypes, recordCursor); }
hiveStorageTimeZone ); if (pageSource.isPresent()) { return pageSource.get(); if (recordCursor != null) { List<Type> columnTypes = ImmutableList.copyOf(transform(hiveColumns, input -> typeManager.getType(input.getTypeSignature()))); return new RecordPageSource(columnTypes, recordCursor);
SystemTable systemTable = tables.getSystemTable(session, tableName) .orElseThrow(() -> new PrestoException(NOT_FOUND, format("Table %s not found", tableName))); for (Map.Entry<ColumnHandle, Domain> entry : constraint.getDomains().get().entrySet()) { String columnName = ((SystemColumnHandle) entry.getKey()).getColumnName(); newConstraints.put(columnsByName.get(columnName), entry.getValue()); return new RecordPageSource(new MappedRecordSet(toRecordSet(systemTransaction.getConnectorTransactionHandle(), systemTable, session, newContraint), userToSystemFieldIndex.build()));
private static ConnectorPageSource createPageSource( HiveRecordCursorProvider cursorProvider, ConnectorSession session, File targetFile, List<String> columnNames, List<Type> columnTypes, HiveStorageFormat format) { List<HiveColumnHandle> columnHandles = new ArrayList<>(columnNames.size()); for (int i = 0; i < columnNames.size(); i++) { String columnName = columnNames.get(i); Type columnType = columnTypes.get(i); columnHandles.add(new HiveColumnHandle("test", columnName, HiveType.toHiveType(columnType), columnType.getTypeSignature(), i, false)); } HiveRecordCursor recordCursor = cursorProvider .createHiveRecordCursor( "test", conf, session, new Path(targetFile.getAbsolutePath()), 0, targetFile.length(), createSchema(format, columnNames, columnTypes), columnHandles, ImmutableList.<HivePartitionKey>of(), TupleDomain.all(), DateTimeZone.forID(session.getTimeZoneKey().getId()), TYPE_MANAGER) .get(); return new RecordPageSource(columnTypes, recordCursor); }
BIGINT, field(0, BIGINT))); Supplier<CursorProcessor> cursorProcessor = expressionCompiler.compileCursorProcessor(Optional.empty(), projections, "key"); Supplier<PageProcessor> pageProcessor = expressionCompiler.compilePageProcessor(Optional.empty(), projections); new PlanNodeId("test"), new PlanNodeId("0"), (session, split, columns) -> new RecordPageSource(new PageRecordSet(ImmutableList.of(BIGINT), input)), cursorProcessor, pageProcessor,
@Override protected ConnectorPageSource createLookupPageSource(SplitInfo splitInfo, List<String> outputColumnNames) { IndexedTable indexedTable = indexedData.getIndexedTable( splitInfo.getTableName(), schemaNameToScaleFactor(splitInfo.getSchemaName()), ImmutableSet.copyOf(splitInfo.getLookupColumnNames())) .orElseThrow(() -> new IllegalArgumentException(String.format("No such index: %s%s", splitInfo.getTableName(), splitInfo.getLookupColumnNames()))); List<Type> lookupColumnTypes = types(splitInfo.getTableName(), splitInfo.getLookupColumnNames()); RecordSet keyRecordSet = new ListBasedRecordSet(splitInfo.getKeys(), lookupColumnTypes); RecordSet outputRecordSet = lookupIndexKeys(keyRecordSet, indexedTable, outputColumnNames); return new RecordPageSource(outputRecordSet); }
@Override public Page getOutput() { if (split == null) { return null; } if (!finishing && pageSource == null && cursor == null) { ConnectorPageSource source = pageSourceProvider.createPageSource(operatorContext.getSession(), split, columns); if (source instanceof RecordPageSource) { cursor = ((RecordPageSource) source).getCursor(); } else { pageSource = source; } } if (pageSource != null) { return processPageSource(); } else { return processColumnSource(); } }
@Override public Page getNextPage() { return inner.getNextPage(); }
@Override public long getReadTimeNanos() { return inner.getReadTimeNanos(); }
private static ConnectorPageSource createPageSource( HiveRecordCursorProvider cursorProvider, ConnectorSession session, File targetFile, List<String> columnNames, List<Type> columnTypes, HiveStorageFormat format) { List<HiveColumnHandle> columnHandles = new ArrayList<>(columnNames.size()); TypeTranslator typeTranslator = new HiveTypeTranslator(); for (int i = 0; i < columnNames.size(); i++) { String columnName = columnNames.get(i); Type columnType = columnTypes.get(i); columnHandles.add(new HiveColumnHandle(columnName, toHiveType(typeTranslator, columnType), columnType.getTypeSignature(), i, REGULAR, Optional.empty())); } RecordCursor recordCursor = cursorProvider .createRecordCursor( conf, session, new Path(targetFile.getAbsolutePath()), 0, targetFile.length(), targetFile.length(), createSchema(format, columnNames, columnTypes), columnHandles, TupleDomain.all(), DateTimeZone.forID(session.getTimeZoneKey().getId()), TYPE_MANAGER, false) .get(); return new RecordPageSource(columnTypes, recordCursor); }
@Override protected ConnectorPageSource createLookupPageSource(SplitInfo splitInfo, List<String> outputColumnNames) { IndexedTable indexedTable = indexedData.getIndexedTable( splitInfo.getTableName(), schemaNameToScaleFactor(splitInfo.getSchemaName()), ImmutableSet.copyOf(splitInfo.getLookupColumnNames())) .orElseThrow(() -> new IllegalArgumentException(String.format("No such index: %s%s", splitInfo.getTableName(), splitInfo.getLookupColumnNames()))); List<Type> lookupColumnTypes = types(splitInfo.getTableName(), splitInfo.getLookupColumnNames()); RecordSet keyRecordSet = new ListBasedRecordSet(splitInfo.getKeys(), lookupColumnTypes); RecordSet outputRecordSet = lookupIndexKeys(keyRecordSet, indexedTable, outputColumnNames); return new RecordPageSource(outputRecordSet); }
private static void assertFileContents( ConnectorSession session, File dataFile, Iterator<?>[] expectedValues, List<String> columnNames, List<Type> columnTypes) throws IOException { try (ConnectorPageSource pageSource = getFileFormat().createFileFormatReader( session, HDFS_ENVIRONMENT, dataFile, columnNames, columnTypes)) { if (pageSource instanceof RecordPageSource) { assertRecordCursor(columnTypes, expectedValues, ((RecordPageSource) pageSource).getCursor()); } else { assertPageSource(columnTypes, expectedValues, pageSource); } assertFalse(stream(expectedValues).allMatch(Iterator::hasNext)); } }
@Override public Page getNextPage() { return inner.getNextPage(); }
@Override public long getReadTimeNanos() { return inner.getReadTimeNanos(); }