@Override public void write(List<? extends DocumentWriteOperation> items) { for (DocumentWriteOperation op : items) { writeBatcher.add(op.getUri(), op.getMetadata(), op.getContent()); } }
batcher.addAs(uri2, meta, doc2); meta = new DocumentMetadataHandle().withCollections(collection, whbTestCollection); batcher.add(uri3, meta, doc3); meta = new DocumentMetadataHandle().withCollections(collection, whbTestCollection); batcher.add(uri4, meta, new JacksonHandle(doc4)); batcher.flushAndWait(); assertEquals("The success listener should have run", "true", successListenerWasRun.toString());
public Message process(Message message) throws Exception { String uri = (String) message.getBody().get("uri"); Document xmlBody = (Document) message.getBody().get("content"); // do processing and validation batcher.add(uri, new DOMHandle(xmlBody)); return message; }
@Test public void testWriteBatcher() { assertEquals(null, client.newDocumentManager().exists("doc1.txt")); assertEquals(null, client.newDocumentManager().exists("doc2.txt")); // begin copy from "Using WriteBatcher" in src/main/java/com/marklogic/datamovement/package-info.java WriteBatcher whb = dataMovementManager.newWriteBatcher() .withBatchSize(100) .withThreadCount(20) .onBatchSuccess(batch -> { logger.debug("batch # {}, so far: {}", batch.getJobBatchNumber(), batch.getJobWritesSoFar()); }) .onBatchFailure((batch,throwable) -> throwable.printStackTrace() ); JobTicket ticket = dataMovementManager.startJob(whb); // the add or addAs methods could be called in separate threads on the // single whb instance whb.add ("doc1.txt", new StringHandle("doc1 contents")); whb.addAs("doc2.txt", "doc2 contents"); whb.flushAndWait(); // send the two docs even though they're not a full batch dataMovementManager.stopJob(ticket); // end copy from "Using WriteBatcher" in src/main/java/com/marklogic/datamovement/package-info.java assertTrue(null != client.newDocumentManager().exists("doc1.txt")); assertTrue(null != client.newDocumentManager().exists("doc2.txt")); } }
public void testExceptions(WriteBatcher writeBatcher, DocumentWriteSet docs, int expectedSuccesses, int expectedFailures) { final AtomicInteger successfulBatchCount = new AtomicInteger(0); final AtomicInteger failureBatchCount = new AtomicInteger(0); writeBatcher .withBatchSize(1) .onBatchSuccess( batch -> successfulBatchCount.incrementAndGet() ) .onBatchFailure( (batch, throwable) -> failureBatchCount.incrementAndGet() ); moveMgr.startJob(writeBatcher); for ( DocumentWriteOperation doc : docs ) { writeBatcher.add(doc.getUri(), doc.getContent()); } // while batchSize=1 means all batches are queued, we still need to wait for them to finish writeBatcher.flushAndWait(); moveMgr.stopJob(writeBatcher); assertEquals(expectedSuccesses, successfulBatchCount.get()); assertEquals(expectedFailures, failureBatchCount.get()); }
@Test public void testCloseHandles() throws Exception { DocumentMetadataHandle meta = new DocumentMetadataHandle() .withCollections(whbTestCollection); final AtomicInteger failCount = new AtomicInteger(0); WriteBatcher batcher = moveMgr.newWriteBatcher() .onBatchFailure( (batch, throwable) -> { logger.error("Error in testCloseHandles", throwable); failCount.incrementAndGet(); } ); JobTicket ticket = moveMgr.startJob(batcher); AtomicBoolean closed = new AtomicBoolean(false); FileInputStream fileStream = new FileInputStream("src/test/resources/test.xml") { public void close() throws IOException { super.close(); closed.set(true); } }; batcher.add("test.xml", meta, new InputStreamHandle(fileStream)); // when we call flushAndWait, the WriteBatcher should write the batch the close all the handles batcher.flushAndWait(); assertEquals(true, closed.get()); moveMgr.stopJob(ticket); assertEquals(0, failCount.get()); }
if (!Files.isDirectory(path)) { FileHandle handle = new FileHandle(path.toFile()); batcher.add("/input/" + path.toFile().getName(), new DocumentMetadataHandle().withCollections("SupportCall"), handle);
DocumentMetadataHandle meta = new DocumentMetadataHandle() .withCollections(whbTestCollection, collection); batcher.add(uri, meta, new StringHandle("test").withFormat(Format.TEXT));
batcher.add("/doc/string", meta, new StringHandle(docContents)); batcher.flushAndWait(); moveMgr.stopJob(ticket);
dataMovementManager.startJob(batcher); batcher.add("/employee1.xml", new StringHandle(getResource("flow-manager-test/input/employee1.xml")).withFormat(Format.XML)); batcher.add("/employee2.xml", new StringHandle(getResource("flow-manager-test/input/employee2.xml")).withFormat(Format.XML)); batcher.flushAndWait();
writeBatcher.add("/doc-" + i + ".json", metadataHandle, handle);
String dataFormatString = dataFormat.toString(); for (int i = 0; i < TEST_SIZE; i++) { writeBatcher.add("/input-" + i + "." + dataFormatString, metadataHandle, handle);
String dataFormatString = dataFormat.toString(); for (int i = 0; i < testSize; i++) { writeBatcher.add("/input-" + i + "." + dataFormatString, metadataHandle, handle);
}); flowRunnerDataMovementManager.startJob(batcher); batcher.add("/input" + fileSuffix + "." + dataFormat.toString(), handle); batcher.flushAndWait();