@Override public DataWriter<Group> build() throws IOException { Preconditions.checkNotNull(this.destination); Preconditions.checkArgument(!Strings.isNullOrEmpty(this.writerId)); Preconditions.checkNotNull(this.schema); Preconditions.checkArgument(this.format == WriterOutputFormat.PARQUET); switch (this.destination.getType()) { case HDFS: return new ParquetHdfsDataWriter(this, this.destination.getProperties()); default: throw new RuntimeException("Unknown destination type: " + this.destination.getType()); } }
@Test public void testWrite() throws Exception { long firstWrite; long secondWrite; List<Group> records; Group record1 = TestConstants.PARQUET_RECORD_1; Group record2 = TestConstants.PARQUET_RECORD_2; String filePath = TestConstants.TEST_OUTPUT_DIR + Path.SEPARATOR + this.filePath; File outputFile = new File(filePath, TestConstants.PARQUET_TEST_FILENAME); this.writer.write(record1); firstWrite = this.writer.recordsWritten(); this.writer.write(record2); secondWrite = this.writer.recordsWritten(); this.writer.close(); this.writer.commit(); records = readParquetFiles(outputFile); Group resultRecord1 = records.get(0); Group resultRecord2 = records.get(1); Assert.assertEquals(firstWrite, 1); Assert.assertEquals(secondWrite, 2); Assert.assertEquals(resultRecord1.getString("name", 0), "tilak"); Assert.assertEquals(resultRecord1.getInteger("age", 0), 22); Assert.assertEquals(resultRecord2.getString("name", 0), "other"); Assert.assertEquals(resultRecord2.getInteger("age", 0), 22); }
@Override public DataWriter<Group> build() throws IOException { Preconditions.checkNotNull(this.destination); Preconditions.checkArgument(!Strings.isNullOrEmpty(this.writerId)); Preconditions.checkNotNull(this.schema); Preconditions.checkArgument(this.format == WriterOutputFormat.PARQUET); switch (this.destination.getType()) { case HDFS: return new ParquetHdfsDataWriter(this, this.destination.getProperties()); default: throw new RuntimeException("Unknown destination type: " + this.destination.getType()); } }