Codota Logo
DataFileReader.getMetaString
Code IndexAdd Codota to your IDE (free)

How to use
getMetaString
method
in
org.apache.avro.file.DataFileReader

Best Java code snippets using org.apache.avro.file.DataFileReader.getMetaString (Showing top 13 results out of 315)

  • Common ways to obtain DataFileReader
private void myMethod () {
DataFileReader d =
  • Codota IconFile file;DatumReader reader;new DataFileReader<>(file, reader)
  • Codota IconSeekableInput sin;DatumReader reader;new DataFileReader<>(sin, reader)
  • Codota IconSeekableInput sin;new DataFileReader<>(sin, new GenericDatumReader<Void>())
  • Smart code suggestions by Codota
}
origin: apache/avro

try {
 Schema schema = fileReader.getSchema();
 String codecStr = fileReader.getMetaString(DataFileConstants.CODEC);
 CodecFactory codecFactory = CodecFactory.fromString("" + codecStr);
 List<String> metas = fileReader.getMetaKeys();
origin: apache/flume

 this.target = reader.getMetaString("file");
} else {
origin: apache/avro

String codecStr = fileReader.getMetaString("avro.codec");
if (null == codecStr) {
 codecStr = "null";
origin: apache/avro

metaValue,
new DataFileReader<Void>(defaultOutputFile, new GenericDatumReader<>())
 .getMetaString(metaKey));
origin: apache/avro

@Test
public void testNamedCodecs() throws IOException {
 Configuration conf = new Configuration();
 Path myfile = new Path(mTempDir.getRoot().getPath(), "myfile");
 Schema key = Schema.create(Schema.Type.STRING);
 Schema value = Schema.create(Schema.Type.STRING);
 Schema recordSchema = AvroKeyValue.getSchema(key, value);
 DatumReader<GenericRecord> datumReader = SpecificData.get().createDatumReader(recordSchema);
 DataFileReader<GenericRecord> reader;
 SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options()
   .withKeySchema(key)
   .withValueSchema(value)
   .withConfiguration(conf)
   .withPath(myfile);
 SortedKeyValueFile.Writer<CharSequence, CharSequence> writer;
 for(String codec : new String[]{"null", "deflate", "snappy", "bzip2"}) {
   LOG.debug("Using " + codec + "codec for a SortedKeyValueFile...");
   options.withCodec(codec);
   writer = new SortedKeyValueFile.Writer<>(options);
   writer.close();
   reader = new DataFileReader<>(
     new FsInput(new Path(myfile, SortedKeyValueFile.DATA_FILENAME), conf),
     datumReader);
   assertEquals(codec, reader.getMetaString("avro.codec"));
   reader.close();
 }
}
origin: apache/avro

@Test
public void testDeflateClassCodec() throws IOException {
 Configuration conf = new Configuration();
 Path myfile = new Path(mTempDir.getRoot().getPath(), "myfile");
 Schema key = Schema.create(Schema.Type.STRING);
 Schema value = Schema.create(Schema.Type.STRING);
 Schema recordSchema = AvroKeyValue.getSchema(key, value);
 DatumReader<GenericRecord> datumReader = SpecificData.get().createDatumReader(recordSchema);
 DataFileReader<GenericRecord> reader;
 LOG.debug("Using CodecFactory.deflateCodec() for a SortedKeyValueFile...");
 SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options()
   .withKeySchema(key)
   .withValueSchema(value)
   .withConfiguration(conf)
   .withPath(myfile)
   .withCodec(CodecFactory.deflateCodec(9));
 SortedKeyValueFile.Writer<CharSequence, CharSequence> writer =
   new SortedKeyValueFile.Writer<>(options);
 writer.close();
 reader = new DataFileReader<>(
   new FsInput(new Path(myfile, SortedKeyValueFile.DATA_FILENAME), conf),
   datumReader);
 assertEquals("deflate", reader.getMetaString("avro.codec"));
 reader.close();
}
origin: Netflix/iceberg

private DataFileReader<D> initMetadata(DataFileReader<D> reader) {
 if (metadata == null) {
  this.metadata = Maps.newHashMap();
  for (String key : reader.getMetaKeys()) {
   metadata.put(key, reader.getMetaString(key));
  }
 }
 return reader;
}
origin: org.apache.avro/avro-tools

try {
 Schema schema = fileReader.getSchema();
 String codecStr = fileReader.getMetaString(DataFileConstants.CODEC);
 CodecFactory codecFactory = CodecFactory.fromString("" + codecStr);
 List<String> metas = fileReader.getMetaKeys();
origin: shunfei/flume-plugin

  this.target = reader.getMetaString("file");
  this.ino = reader.getMetaString("ino");
  logger.info("get ino from meta:" + ino);
} else {
origin: org.apache.flume/flume-ng-core

 this.target = reader.getMetaString("file");
} else {
origin: apache/sqoop

assertEquals(codec, reader.getMetaString(DataFileConstants.CODEC));
origin: apache/sqoop

reader.getMetaString(DataFileConstants.CODEC));
origin: apache/sqoop

/**
 * Import blob data that is smaller than inline lob limit and compress with
 * deflate codec. Blob data should be encoded and saved as Avro bytes.
 * @throws IOException
 * @throws SQLException
 */
@Test
public void testBlobCompressedAvroImportInline()
  throws IOException, SQLException {
 String [] types = { getBlobType() };
 String expectedVal = "This is short BLOB data";
 String [] vals = { getBlobInsertStr(expectedVal) };
 createTableWithColTypes(types, vals);
 runImport(getArgv("--compression-codec", CodecMap.DEFLATE));
 Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
 DataFileReader<GenericRecord> reader = read(outputFile);
 GenericRecord record = reader.next();
 // Verify that the data block of the Avro file is compressed with deflate
 // codec.
 assertEquals(CodecMap.DEFLATE,
   reader.getMetaString(DataFileConstants.CODEC));
 // Verify that all columns are imported correctly.
 ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
 String returnVal = new String(buf.array());
 assertEquals(getColName(0), expectedVal, returnVal);
}
org.apache.avro.fileDataFileReadergetMetaString

Popular methods of DataFileReader

  • <init>
    Construct a reader for a file.
  • next
  • hasNext
  • close
  • openReader
    Construct a reader for a file at the current position of the input, without reading the header.
  • getSchema
  • sync
    Move to the next synchronization point after a position. To process a range of file entires, call th
  • previousSync
    Return the last synchronization point before our current position.
  • seek
    Move to a specific, known synchronization point, one returned from DataFileWriter#sync() while writi
  • pastSync
    Return true if past the next synchronization point after a position.
  • getMeta
  • getHeader
  • getMeta,
  • getHeader,
  • getBlockCount,
  • initialize,
  • iterator,
  • blockFinished,
  • getBlockSize,
  • getMetaKeys,
  • nextBlock

Popular in Java

  • Reactive rest calls using spring rest template
  • notifyDataSetChanged (ArrayAdapter)
  • getSharedPreferences (Context)
  • compareTo (BigDecimal)
    Compares this BigDecimal with the specified BigDecimal. Two BigDecimal objects that are equal in val
  • Kernel (java.awt.image)
  • System (java.lang)
    Provides access to system-related information and resources including standard input and output. Ena
  • Set (java.util)
    A collection that contains no duplicate elements. More formally, sets contain no pair of elements e1
  • Collectors (java.util.stream)
  • ServletException (javax.servlet)
    Defines a general exception a servlet can throw when it encounters difficulty.
  • Reflections (org.reflections)
    Reflections one-stop-shop objectReflections scans your classpath, indexes the metadata, allows you t
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now