try { Schema schema = fileReader.getSchema(); String codecStr = fileReader.getMetaString(DataFileConstants.CODEC); CodecFactory codecFactory = CodecFactory.fromString("" + codecStr); List<String> metas = fileReader.getMetaKeys();
this.target = reader.getMetaString("file"); } else {
String codecStr = fileReader.getMetaString("avro.codec"); if (null == codecStr) { codecStr = "null";
metaValue, new DataFileReader<Void>(defaultOutputFile, new GenericDatumReader<>()) .getMetaString(metaKey));
@Test public void testNamedCodecs() throws IOException { Configuration conf = new Configuration(); Path myfile = new Path(mTempDir.getRoot().getPath(), "myfile"); Schema key = Schema.create(Schema.Type.STRING); Schema value = Schema.create(Schema.Type.STRING); Schema recordSchema = AvroKeyValue.getSchema(key, value); DatumReader<GenericRecord> datumReader = SpecificData.get().createDatumReader(recordSchema); DataFileReader<GenericRecord> reader; SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options() .withKeySchema(key) .withValueSchema(value) .withConfiguration(conf) .withPath(myfile); SortedKeyValueFile.Writer<CharSequence, CharSequence> writer; for(String codec : new String[]{"null", "deflate", "snappy", "bzip2"}) { LOG.debug("Using " + codec + "codec for a SortedKeyValueFile..."); options.withCodec(codec); writer = new SortedKeyValueFile.Writer<>(options); writer.close(); reader = new DataFileReader<>( new FsInput(new Path(myfile, SortedKeyValueFile.DATA_FILENAME), conf), datumReader); assertEquals(codec, reader.getMetaString("avro.codec")); reader.close(); } }
@Test public void testDeflateClassCodec() throws IOException { Configuration conf = new Configuration(); Path myfile = new Path(mTempDir.getRoot().getPath(), "myfile"); Schema key = Schema.create(Schema.Type.STRING); Schema value = Schema.create(Schema.Type.STRING); Schema recordSchema = AvroKeyValue.getSchema(key, value); DatumReader<GenericRecord> datumReader = SpecificData.get().createDatumReader(recordSchema); DataFileReader<GenericRecord> reader; LOG.debug("Using CodecFactory.deflateCodec() for a SortedKeyValueFile..."); SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options() .withKeySchema(key) .withValueSchema(value) .withConfiguration(conf) .withPath(myfile) .withCodec(CodecFactory.deflateCodec(9)); SortedKeyValueFile.Writer<CharSequence, CharSequence> writer = new SortedKeyValueFile.Writer<>(options); writer.close(); reader = new DataFileReader<>( new FsInput(new Path(myfile, SortedKeyValueFile.DATA_FILENAME), conf), datumReader); assertEquals("deflate", reader.getMetaString("avro.codec")); reader.close(); }
private DataFileReader<D> initMetadata(DataFileReader<D> reader) { if (metadata == null) { this.metadata = Maps.newHashMap(); for (String key : reader.getMetaKeys()) { metadata.put(key, reader.getMetaString(key)); } } return reader; }
try { Schema schema = fileReader.getSchema(); String codecStr = fileReader.getMetaString(DataFileConstants.CODEC); CodecFactory codecFactory = CodecFactory.fromString("" + codecStr); List<String> metas = fileReader.getMetaKeys();
this.target = reader.getMetaString("file"); this.ino = reader.getMetaString("ino"); logger.info("get ino from meta:" + ino); } else {
this.target = reader.getMetaString("file"); } else {
assertEquals(codec, reader.getMetaString(DataFileConstants.CODEC));
reader.getMetaString(DataFileConstants.CODEC));
/** * Import blob data that is smaller than inline lob limit and compress with * deflate codec. Blob data should be encoded and saved as Avro bytes. * @throws IOException * @throws SQLException */ @Test public void testBlobCompressedAvroImportInline() throws IOException, SQLException { String [] types = { getBlobType() }; String expectedVal = "This is short BLOB data"; String [] vals = { getBlobInsertStr(expectedVal) }; createTableWithColTypes(types, vals); runImport(getArgv("--compression-codec", CodecMap.DEFLATE)); Path outputFile = new Path(getTablePath(), "part-m-00000.avro"); DataFileReader<GenericRecord> reader = read(outputFile); GenericRecord record = reader.next(); // Verify that the data block of the Avro file is compressed with deflate // codec. assertEquals(CodecMap.DEFLATE, reader.getMetaString(DataFileConstants.CODEC)); // Verify that all columns are imported correctly. ByteBuffer buf = (ByteBuffer) record.get(getColName(0)); String returnVal = new String(buf.array()); assertEquals(getColName(0), expectedVal, returnVal); }