public SimpleExportJob(Consumer<DocumentRecord>... consumers) { exportListener = new ExportListener(); for (Consumer<DocumentRecord> consumer : consumers) { exportListener.onDocumentReady(consumer); } this.addUrisReadyListener(exportListener); }
/** * Prepares each ExportListener created by a subclass before it's used to process a QueryBatch. Subclasses are * expected to call this with their own instance of ExportListener or a subclass of it. * * @param listener */ protected void prepareExportListener(ExportListener listener) { if (consistentSnapshot) { listener.withConsistentSnapshot(); } if (categories != null) { for (DocumentManager.Metadata category : categories) { listener.withMetadataCategory(category); } } if (nonDocumentFormat != null) { listener.withNonDocumentFormat(nonDocumentFormat); } if (transform != null) { listener.withTransform(transform); } }
.withConsistentSnapshot() .onUrisReady( new ExportListener() .withConsistentSnapshot() .onDocumentReady(doc -> {
/** * Exports the batch to a single zip file. * * @param queryBatch */ @Override protected void exportBatch(QueryBatch queryBatch) { File file = getFileForBatch(queryBatch, exportDir); WriteToZipConsumer consumer = new WriteToZipConsumer(file); prepareWriteToZipConsumer(consumer); ExportListener listener = new ExportListener(); listener.onDocumentReady(consumer); prepareExportListener(listener); listener.processEvent(queryBatch); consumer.close(); }
@Test public void testExportListenerException() { final AtomicInteger failureBatchCount = new AtomicInteger(); testListenerException( new ExportListener() .withTransform(new ServerTransform("thisTransformDoesntExist")) .onBatchFailure( (batch, throwable) -> failureBatchCount.incrementAndGet() ) ); // there should be one failure sent to the ExportListener // onBatchFailure listener since the transform is invalid assertEquals(1, failureBatchCount.get()); }
@Override public ExportToWriterListener withTransform(ServerTransform transform) { super.withTransform(transform); return this; }
/** * The format for the metadata retrieved with each document. The metadata will * be available in each DocumentRecord sent to the OutputListener registered * with onGenerateOutput. * * @param nonDocumentFormat the format for the metadata * @return this instance (for method chaining) */ @Override public ExportToWriterListener withNonDocumentFormat(Format nonDocumentFormat) { super.withNonDocumentFormat(nonDocumentFormat); return this; }
/** * Adds a metadata category to retrieve with each document. The metadata * will be available via {@link DocumentRecord#getMetadata * DocumentRecord.getMetadata} in each DocumentRecord sent to the * OutputListener registered with onGenerateOutput. To specify the format * for the metdata, call {@link #withNonDocumentFormat * withNonDocumentFormat}. * * @param category the metadata category to retrieve * @return this instance (for method chaining) */ @Override public ExportToWriterListener withMetadataCategory(DocumentManager.Metadata category) { super.withMetadataCategory(category); return this; }
/** * This implementation of initializeListener adds this instance of * ExportListener to the two RetryListener's in this QueryBatcher so they * will retry any batches that fail during the read request. */ @Override public void initializeListener(QueryBatcher queryBatcher) { HostAvailabilityListener hostAvailabilityListener = HostAvailabilityListener.getInstance(queryBatcher); if ( hostAvailabilityListener != null ) { BatchFailureListener<QueryBatch> retryListener = hostAvailabilityListener.initializeRetryListener(this); if ( retryListener != null ) onFailure(retryListener); } NoResponseListener noResponseListener = NoResponseListener.getInstance(queryBatcher); if ( noResponseListener != null ) { BatchFailureListener<QueryBatch> noResponseRetryListener = noResponseListener.initializeRetryListener(this); if ( noResponseRetryListener != null ) onFailure(noResponseRetryListener); } }
try ( DocumentPage docs = getDocs(batch) ) { while ( docs.hasNext() ) { for ( Consumer<DocumentRecord> listener : exportListeners ) {
public ExportToZipJob() { super(); addRequiredJobProperty("exportPath", "The path of the zip file to which selected records are exported", value -> setExportFile(new File(value))); addJobProperty("flattenUri", "Whether or not record URIs are flattened before being used as zip entry names; defaults to false", value -> getWriteToZipConsumer().setFlattenUri(Boolean.parseBoolean(value))); addJobProperty("transform", "The name of a REST transform to apply to each record before it is written to the zip file", value -> getExportListener().withTransform(new ServerTransform(value))); addJobProperty("uriPrefix", "Prefix to prepend to each URI it is used as an entry name; applied after a URI is optionally flattened", value -> getWriteToZipConsumer().setUriPrefix(value)); }
public void setExportFile(File exportFile) { this.exportFile = exportFile; this.exportFile = exportFile; if (this.exportFile.getParentFile() != null) { this.exportFile.getParentFile().mkdirs(); } this.writeToZipConsumer = new WriteToZipConsumer(exportFile); this.exportListener = new ExportListener(); this.exportListener.onDocumentReady(writeToZipConsumer); this.addUrisReadyListener(this.exportListener); }
batcher = dmm.newQueryBatcher(sqb.value(sqb.jsonProperty("jobId"), jobIds)); batcher.onUrisReady(new ExportListener().onDocumentReady(zipConsumer)); JobTicket jobTicket = dmm.startJob(batcher); batcher = dmm.newQueryBatcher(sqb.value(sqb.element(new QName("jobId")), jobIds)); batcher.onUrisReady(new ExportListener().onDocumentReady(zipConsumer)); jobTicket = dmm.startJob(batcher);
.withBatchSize(99) .withConsistentSnapshot() .onUrisReady(new ExportListener() .onDocumentReady(doc -> { String contents = doc.getContent(new StringHandle()).get(); if (doc.getUri().equals(contents)) {