public HoodieInsertValueGenResult(T record, Schema schema) { this.record = record; try { this.insertValue = record.getData().getInsertValue(schema); } catch (Exception e) { this.exception = Optional.of(e); } } }
/** * Transformer function to help transform a HoodieRecord. This transformer is used by BufferedIterator to offload some * expensive operations of transformation to the reader thread. */ static <T extends HoodieRecordPayload> Function<HoodieRecord<T>, Tuple2<HoodieRecord<T>, Optional<IndexedRecord>>> getTransformFunction(Schema schema) { return hoodieRecord -> { try { return new Tuple2<HoodieRecord<T>, Optional<IndexedRecord>>(hoodieRecord, hoodieRecord.getData().getInsertValue(schema)); } catch (IOException e) { throw new HoodieException(e); } }; }
/** * Writes all records passed */ public void write() { try { while (recordIterator.hasNext()) { HoodieRecord<T> record = recordIterator.next(); write(record, record.getData().getInsertValue(schema)); } } catch (IOException io) { throw new HoodieInsertException( "Failed to insert records for path " + getStorageWriterPath(), io); } }
/** * Writes all records passed */ public void write() { try { while (recordIterator.hasNext()) { HoodieRecord<T> record = recordIterator.next(); write(record, record.getData().getInsertValue(schema)); } } catch (IOException io) { throw new HoodieInsertException( "Failed to insert records for path " + getStorageWriterPath(), io); } }
HoodieMemoryConfig.DEFAULT_SPILLABLE_MAP_BASE_PATH); for (HoodieRecord<? extends HoodieRecordPayload> hoodieRecord : scanner) { Optional<IndexedRecord> record = hoodieRecord.getData().getInsertValue(readerSchema); if (allRecords.size() >= limit) { break;
Optional<IndexedRecord> recordOptional = hoodieRecord.getData().getInsertValue(getReaderSchema()); ArrayWritable aWritable; String key = hoodieRecord.getRecordKey();
private Optional<IndexedRecord> getIndexedRecord(HoodieRecord<T> hoodieRecord) { Optional recordMetadata = hoodieRecord.getData().getMetadata(); try { Optional<IndexedRecord> avroRecord = hoodieRecord.getData().getInsertValue(schema);
private Optional<IndexedRecord> getIndexedRecord(HoodieRecord<T> hoodieRecord) { Optional recordMetadata = hoodieRecord.getData().getMetadata(); try { Optional<IndexedRecord> avroRecord = hoodieRecord.getData().getInsertValue(schema);
public static List<HoodieRecord> updateHoodieTestRecordsWithoutHoodieMetadata(List<HoodieRecord> oldRecords, Schema schema, String fieldNameToUpdate, String newValue) throws IOException, URISyntaxException { return oldRecords .stream() .map(r -> { try { GenericRecord rec = (GenericRecord) r.getData().getInsertValue(schema).get(); rec.put(fieldNameToUpdate, newValue); return new HoodieRecord<>(r.getKey(), new HoodieAvroPayload(Optional.of(rec))); } catch (IOException io) { throw new HoodieIOException("unable to get data from hoodie record", io); } }).collect(Collectors.toList()); }
if (!writtenRecordKeys.contains(key)) { HoodieRecord<T> hoodieRecord = keyToNewRecords.get(key); writeRecord(hoodieRecord, hoodieRecord.getData().getInsertValue(schema)); insertRecordsWritten++;
scannedRecords.add((IndexedRecord) record.getData().getInsertValue(schema).get());
HoodieRecord record = records.get(key); List<IndexedRecord> recordsToUpdate = new ArrayList<>(); recordsToUpdate.add((IndexedRecord) record.getData().getInsertValue(schema).get()); GenericRecord gRecord = (GenericRecord) records.get(key).getData().getInsertValue(schema).get(); record = records.get(key); recordsToUpdate = new ArrayList<>(); recordsToUpdate.add((IndexedRecord) record.getData().getInsertValue(schema).get()); gRecord = (GenericRecord) records.get(key).getData().getInsertValue(schema).get();
records.put(r.getRecordKey(), r); }); GenericRecord gRecord = (GenericRecord) records.get(key).getData().getInsertValue(schema).get(); records.put(r.getRecordKey(), r); }); gRecord = (GenericRecord) records.get(key).getData().getInsertValue(schema).get();
public static void writeRecordsToLogFiles(FileSystem fs, String basePath, Schema schema, List<HoodieRecord> updatedRecords) { Map<HoodieRecordLocation, List<HoodieRecord>> groupedUpdated = updatedRecords.stream().collect( Collectors.groupingBy(HoodieRecord::getCurrentLocation)); groupedUpdated.entrySet().forEach(s -> { HoodieRecordLocation location = s.getKey(); String partitionPath = s.getValue().get(0).getPartitionPath(); Writer logWriter; try { logWriter = HoodieLogFormat.newWriterBuilder().onParentPath(new Path(basePath, partitionPath)) .withFileExtension(HoodieLogFile.DELTA_EXTENSION).withFileId(location.getFileId()) .overBaseCommit(location.getCommitTime()).withFs(fs).build(); Map<HoodieLogBlock.HeaderMetadataType, String> header = Maps.newHashMap(); header.put(HoodieLogBlock.HeaderMetadataType.INSTANT_TIME, location.getCommitTime()); header.put(HoodieLogBlock.HeaderMetadataType.SCHEMA, schema.toString()); logWriter.appendBlock(new HoodieAvroDataBlock(s.getValue().stream().map(r -> { try { GenericRecord val = (GenericRecord) r.getData().getInsertValue(schema).get(); HoodieAvroUtils.addHoodieKeyToRecord(val, r.getRecordKey(), r.getPartitionPath(), ""); return (IndexedRecord) val; } catch (IOException e) { return null; } }).collect(Collectors.toList()), header)); logWriter.close(); } catch (Exception e) { fail(e.toString()); } }); }
@Override public WriteStatus close() { try { // write out any pending records (this can happen when inserts are turned into updates) for (String key : keyToNewRecords.keySet()) { if (!writtenRecordKeys.contains(key)) { HoodieRecord<T> hoodieRecord = keyToNewRecords.get(key); writeRecord(hoodieRecord, hoodieRecord.getData().getInsertValue(schema)); insertRecordsWritten++; } } keyToNewRecords.clear(); writtenRecordKeys.clear(); if (storageWriter != null) { storageWriter.close(); } writeStatus.getStat().setTotalWriteBytes(FSUtils.getFileSize(fs, getStorageWriterPath())); writeStatus.getStat().setNumWrites(recordsWritten); writeStatus.getStat().setNumDeletes(recordsDeleted); writeStatus.getStat().setNumUpdateWrites(updatedRecordsWritten); writeStatus.getStat().setNumInserts(insertRecordsWritten); writeStatus.getStat().setTotalWriteErrors(writeStatus.getFailedRecords().size()); RuntimeStats runtimeStats = new RuntimeStats(); runtimeStats.setTotalUpsertTime(timer.endTimer()); writeStatus.getStat().setRuntimeStats(runtimeStats); return writeStatus; } catch (IOException e) { throw new HoodieUpsertException("Failed to close UpdateHandle", e); } }
final HoodieRecord originalRecord = originalRecordIterator.next(); final Optional<IndexedRecord> originalInsertValue = originalRecord.getData() .getInsertValue(HoodieTestDataGenerator.avroSchema); final HoodieInsertValueGenResult<HoodieRecord> payload = queue.iterator().next(); payload.record.getData().getInsertValue(HoodieTestDataGenerator.avroSchema)); recordsRead++;
scanner.forEach(s -> { try { if (!s.getData().getInsertValue(schema).isPresent()) { emptyPayloads.add(true);
record -> { GenericRecord rec = (GenericRecord) record.getData().getInsertValue(getReaderSchema()).get(); ArrayWritable aWritable = (ArrayWritable) avroToArrayWritable(rec, getWriterSchema()); this.executor.getQueue().insertRecord(aWritable);
HoodieRecord rec = records.get(((GenericRecord) record).get(HoodieRecord.RECORD_KEY_METADATA_FIELD)); try { assertEquals(rec.getData().getInsertValue(schema).get(), record); } catch (IOException io) { throw new UncheckedIOException(io);
int seqId = 1; for (HoodieRecord record : records) { GenericRecord avroRecord = (GenericRecord) record.getData().getInsertValue(schema).get(); HoodieAvroUtils.addCommitMetadataToRecord(avroRecord, commitTime, "" + seqId++); HoodieAvroUtils.addHoodieKeyToRecord(avroRecord, record.getRecordKey(), record.getPartitionPath(), filename);