rcFileEncoding, codecName, new AircompressorCodecFactory(new HadoopCodecFactory(getClass().getClassLoader())), metadata, validationInputFactory.isPresent());
@Override public RcFileCompressor createCompressor(String codecName) { CompressionCodec codec = createCompressionCodec(codecName); return new HadoopCompressor(codec); }
public PrestoRcFileFormatWriter(File targetFile, List<Type> types, RcFileEncoding encoding, HiveCompressionCodec compressionCodec) throws IOException { writer = new RcFileWriter( new OutputStreamSliceOutput(new FileOutputStream(targetFile)), types, encoding, compressionCodec.getCodec().map(Class::getName), new AircompressorCodecFactory(new HadoopCodecFactory(getClass().getClassLoader())), ImmutableMap.of(), true); }
@Override public RcFileDecompressor createDecompressor(String codecName) { CompressionCodec codec = createCompressionCodec(codecName); return new HadoopDecompressor(codec); }
private static RcFileReader createRcFileReader(TempFile tempFile, Type type, RcFileEncoding encoding) throws IOException { RcFileDataSource rcFileDataSource = new FileRcFileDataSource(tempFile.getFile()); RcFileReader rcFileReader = new RcFileReader( rcFileDataSource, encoding, ImmutableMap.of(0, type), new AircompressorCodecFactory(new HadoopCodecFactory(RcFileTester.class.getClassLoader())), 0, tempFile.getFile().length(), new DataSize(8, MEGABYTE)); assertEquals(rcFileReader.getColumnCount(), 1); return rcFileReader; }
@Override public RcFileCompressor createCompressor(String codecName) { CompressionCodec codec = createCompressionCodec(codecName); return new HadoopCompressor(codec); }
rcFileEncoding, readColumns.build(), new AircompressorCodecFactory(new HadoopCodecFactory(configuration.getClassLoader())), start, length,
@Override public RcFileDecompressor createDecompressor(String codecName) { CompressionCodec codec = createCompressionCodec(codecName); return new HadoopDecompressor(codec); }
private static DataSize writeRcFileColumnNew(File outputFile, Format format, Compression compression, Type type, Iterator<?> values, Map<String, String> metadata) throws Exception { OutputStreamSliceOutput output = new OutputStreamSliceOutput(new FileOutputStream(outputFile)); AircompressorCodecFactory codecFactory = new AircompressorCodecFactory(new HadoopCodecFactory(RcFileTester.class.getClassLoader())); RcFileWriter writer = new RcFileWriter( output, ImmutableList.of(type), format.getVectorEncoding(), compression.getCodecName(), codecFactory, metadata, new DataSize(100, KILOBYTE), // use a smaller size to create more row groups new DataSize(200, KILOBYTE), true); BlockBuilder blockBuilder = type.createBlockBuilder(null, 1024); while (values.hasNext()) { Object value = values.next(); writeValue(type, blockBuilder, value); } writer.write(new Page(blockBuilder.build())); writer.close(); writer.validate(new FileRcFileDataSource(outputFile)); return new DataSize(output.size(), BYTE); }
public PrestoRcFileFormatWriter(File targetFile, List<Type> types, RcFileEncoding encoding, HiveCompressionCodec compressionCodec) throws IOException { writer = new RcFileWriter( new OutputStreamSliceOutput(new FileOutputStream(targetFile)), types, encoding, compressionCodec.getCodec().map(Class::getName), new AircompressorCodecFactory(new HadoopCodecFactory(getClass().getClassLoader())), ImmutableMap.of(), true); }
private static RcFileReader createRcFileReader(TempFile tempFile, Type type, RcFileEncoding encoding) throws IOException { RcFileDataSource rcFileDataSource = new FileRcFileDataSource(tempFile.getFile()); RcFileReader rcFileReader = new RcFileReader( rcFileDataSource, encoding, ImmutableMap.of(0, type), new AircompressorCodecFactory(new HadoopCodecFactory(RcFileTester.class.getClassLoader())), 0, tempFile.getFile().length(), new DataSize(8, MEGABYTE)); assertEquals(rcFileReader.getColumnCount(), 1); return rcFileReader; }
private static DataSize writeRcFileColumnNew(File outputFile, Format format, Compression compression, Type type, Iterator<?> values, Map<String, String> metadata) throws Exception { OutputStreamSliceOutput output = new OutputStreamSliceOutput(new FileOutputStream(outputFile)); AircompressorCodecFactory codecFactory = new AircompressorCodecFactory(new HadoopCodecFactory(RcFileTester.class.getClassLoader())); RcFileWriter writer = new RcFileWriter( output, ImmutableList.of(type), format.getVectorEncoding(), compression.getCodecName(), codecFactory, metadata, new DataSize(100, KILOBYTE), // use a smaller size to create more row groups new DataSize(200, KILOBYTE), true); BlockBuilder blockBuilder = type.createBlockBuilder(null, 1024); while (values.hasNext()) { Object value = values.next(); writeValue(type, blockBuilder, value); } writer.write(new Page(blockBuilder.build())); writer.close(); writer.validate(new FileRcFileDataSource(outputFile)); return new DataSize(output.size(), BYTE); }