public SparkBoundedInMemoryExecutor(final HoodieWriteConfig hoodieConfig, BoundedInMemoryQueueProducer<I> producer, BoundedInMemoryQueueConsumer<O, E> consumer, Function<I, O> bufferedIteratorTransform) { super(hoodieConfig.getWriteBufferLimitBytes(), producer, Optional.of(consumer), bufferedIteratorTransform); this.sparkThreadTaskContext = TaskContext.get(); }
new IteratorBasedQueueProducer<>(hoodieRecords.iterator()).produce(queue1); return true; }); new IteratorBasedQueueProducer<>(mockHoodieRecordsIterator).produce(queue2); } catch (Exception ex) { queue2.markAsFailed(ex);
new IteratorBasedQueueProducer<>(hoodieRecords.iterator()).produce(queue); queue.close(); return true;
new IteratorBasedQueueProducer<>(hoodieRecords.iterator()).produce(queue); return true; });
/** * Setup log and parquet reading in parallel. Both write to central buffer. */ @SuppressWarnings("unchecked") private List<BoundedInMemoryQueueProducer<ArrayWritable>> getParallelProducers() { List<BoundedInMemoryQueueProducer<ArrayWritable>> producers = new ArrayList<>(); producers.add(new FunctionBasedQueueProducer<>(buffer -> { logRecordScanner.scan(); return null; })); producers.add(new IteratorBasedQueueProducer<>(parquetRecordsIterator)); return producers; }
public SparkBoundedInMemoryExecutor(final HoodieWriteConfig hoodieConfig, BoundedInMemoryQueueProducer<I> producer, BoundedInMemoryQueueConsumer<O, E> consumer, Function<I, O> bufferedIteratorTransform) { super(hoodieConfig.getWriteBufferLimitBytes(), producer, Optional.of(consumer), bufferedIteratorTransform); this.sparkThreadTaskContext = TaskContext.get(); }
producers.add(new IteratorBasedQueueProducer<>(r.iterator())); } else { producers.add(new FunctionBasedQueueProducer<>((buf) -> {