@Test public void testPrematureStopIteratorJob() { // this test can't actually validate that the message is printed, so a human // must check the logging output // Expected something like: // 18:14:58.420 [main] WARN c.m.c.d.impl.QueryBatcherImpl - QueryBatcher instance "unnamed" stopped before all results were processed List<String> list = new ArrayList<>(); list.add("firstUri.txt"); QueryBatcher batcher = moveMgr.newQueryBatcher(list.iterator()); moveMgr.stopJob(batcher); }
@Override public QueryBatcherJobTicket run(DatabaseClient databaseClient) { DataMovementManager dmm = this.dataMovementManager != null ? this.dataMovementManager : databaseClient.newDataMovementManager(); String jobDescription = getJobDescription(); if (jobDescription != null && logger.isInfoEnabled()) { logger.info(jobDescription); } QueryBatcherBuilder builder = newQueryBatcherBuilder(); QueryBatcher queryBatcher = builder.buildQueryBatcher(databaseClient, dmm); prepareQueryBatcher(queryBatcher); JobTicket jobTicket = dmm.startJob(queryBatcher); if (awaitCompletion) { queryBatcher.awaitCompletion(); if (stopJobAfterCompletion) { dmm.stopJob(queryBatcher); } if (jobDescription != null && logger.isInfoEnabled()) { logger.info("Completed: " + jobDescription); } } return new QueryBatcherJobTicket(dmm, queryBatcher, jobTicket); }
public void testExceptions(QueryBatcher queryBatcher, int expectedSuccesses, int expectedFailures) { final AtomicInteger successfulBatchCount = new AtomicInteger(); final AtomicInteger failureBatchCount = new AtomicInteger(); queryBatcher .withBatchSize(1) .onUrisReady( batch -> successfulBatchCount.incrementAndGet() ) .onQueryFailure( queryThrowable -> failureBatchCount.incrementAndGet() ); moveMgr.startJob(queryBatcher); queryBatcher.awaitCompletion(); moveMgr.stopJob(queryBatcher); assertEquals(expectedSuccesses, successfulBatchCount.get()); assertEquals(expectedFailures, failureBatchCount.get()); }
private void testListenerException(QueryBatchListener listener) { final AtomicInteger failureBatchCount = new AtomicInteger(); Iterator<String> iterator = Arrays.asList(new String[] {uri1}).iterator(); QueryBatcher queryBatcher = moveMgr.newQueryBatcher(iterator) .onUrisReady( batch -> logger.debug("uri={}", batch.getItems()[0]) ) .onUrisReady(listener) .onQueryFailure( queryThrowable -> failureBatchCount.incrementAndGet() ); moveMgr.startJob(queryBatcher); queryBatcher.awaitCompletion(); moveMgr.stopJob(queryBatcher); // there should be no failure sent to the QueryBatcher onQueryFailure listeners assertEquals(0, failureBatchCount.get()); }
@Test public void testIssue793() { WriteBatcher batcher = moveMgr.newWriteBatcher(); batcher.addAs("test.txt", "test"); moveMgr.startJob(batcher); moveMgr.stopJob(batcher); } }
public void testExceptions(WriteBatcher writeBatcher, DocumentWriteSet docs, int expectedSuccesses, int expectedFailures) { final AtomicInteger successfulBatchCount = new AtomicInteger(0); final AtomicInteger failureBatchCount = new AtomicInteger(0); writeBatcher .withBatchSize(1) .onBatchSuccess( batch -> successfulBatchCount.incrementAndGet() ) .onBatchFailure( (batch, throwable) -> failureBatchCount.incrementAndGet() ); moveMgr.startJob(writeBatcher); for ( DocumentWriteOperation doc : docs ) { writeBatcher.add(doc.getUri(), doc.getContent()); } // while batchSize=1 means all batches are queued, we still need to wait for them to finish writeBatcher.flushAndWait(); moveMgr.stopJob(writeBatcher); assertEquals(expectedSuccesses, successfulBatchCount.get()); assertEquals(expectedFailures, failureBatchCount.get()); }
@Test public void testPrematureStopQueryJob() { // this test can't actually validate that the message is printed, so a human // must check the logging output // Expected something like: // 18:20:33.607 [main] WARN c.m.c.d.impl.QueryBatcherImpl - QueryBatcher instance "unnamed" stopped before all results were retrieved StructuredQueryDefinition query = new StructuredQueryBuilder().and(); QueryBatcher batcher = moveMgr.newQueryBatcher(query); moveMgr.stopJob(batcher); }
batchCount.incrementAndGet(); if(moveMgr.getJobReport(queryTicket.get()).getSuccessEventsCount() > 40){ moveMgr.stopJob(queryTicket.get());
@Test public void testCloseHandles() throws Exception { DocumentMetadataHandle meta = new DocumentMetadataHandle() .withCollections(whbTestCollection); final AtomicInteger failCount = new AtomicInteger(0); WriteBatcher batcher = moveMgr.newWriteBatcher() .onBatchFailure( (batch, throwable) -> { logger.error("Error in testCloseHandles", throwable); failCount.incrementAndGet(); } ); JobTicket ticket = moveMgr.startJob(batcher); AtomicBoolean closed = new AtomicBoolean(false); FileInputStream fileStream = new FileInputStream("src/test/resources/test.xml") { public void close() throws IOException { super.close(); closed.set(true); } }; batcher.add("test.xml", meta, new InputStreamHandle(fileStream)); // when we call flushAndWait, the WriteBatcher should write the batch the close all the handles batcher.flushAndWait(); assertEquals(true, closed.get()); moveMgr.stopJob(ticket); assertEquals(0, failCount.get()); }
moveMgr.stopJob(writeBatcher); assertTrue("Close method is not called on WriteBatchListener", calledBatchListener.get()); assertTrue("Close method is not called on WriteFailureListener", calledFailureListener.get());
JobTicket ticket = moveMgr.startJob( batcher ); batcher.awaitCompletion(); moveMgr.stopJob(ticket);
moveMgr.stopJob(writeBatcher);
batcher.add("/doc/string", meta, new StringHandle(docContents)); batcher.flushAndWait(); moveMgr.stopJob(ticket);
@Test public void testResultIgnore() throws Exception { DocumentMetadataHandle meta = new DocumentMetadataHandle().withCollections(collection); // write the document client.newDocumentManager().writeAs(collection + "/test2.json", meta, "{ \"testProperty\": \"test2\" }"); StructuredQueryDefinition query = sqb.value(sqb.jsonProperty("testProperty"), "test2"); ServerTransform transform = new ServerTransform(transformName2) .addParameter("newValue", "test2a"); ApplyTransformListener listener = new ApplyTransformListener() .withTransform(transform) .withApplyResult(ApplyResult.IGNORE); QueryBatcher batcher = moveMgr.newQueryBatcher(query) .onUrisReady(listener); JobTicket ticket = moveMgr.startJob( batcher ); batcher.awaitCompletion(); moveMgr.stopJob(ticket); JsonNode docContents = docMgr.readAs(collection + "/test2.json", JsonNode.class); assertEquals( "the transform should have changed testProperty to 'test2a'", "test2a", docContents.get("testProperty").textValue() ); }
@Test public void testResultReplace() throws Exception { DocumentMetadataHandle meta = new DocumentMetadataHandle().withCollections(collection); // write the document client.newDocumentManager().writeAs(collection + "/test1.json", meta, "{ \"testProperty\": \"test1\" }"); StructuredQueryDefinition query = sqb.value(sqb.jsonProperty("testProperty"), "test1"); ServerTransform transform = new ServerTransform(transformName1) .addParameter("newValue", "test1a"); ApplyTransformListener listener = new ApplyTransformListener() .withTransform(transform) .withApplyResult(ApplyResult.REPLACE); QueryBatcher batcher = moveMgr.newQueryBatcher(query) .onUrisReady(listener); JobTicket ticket = moveMgr.startJob( batcher ); batcher.awaitCompletion(); moveMgr.stopJob(ticket); JsonNode docContents = docMgr.readAs(collection + "/test1.json", JsonNode.class); assertEquals( "the transform should have changed testProperty to 'test1a'", "test1a", docContents.get("testProperty").textValue() ); }
@Test public void testWriteBatcher() { assertEquals(null, client.newDocumentManager().exists("doc1.txt")); assertEquals(null, client.newDocumentManager().exists("doc2.txt")); // begin copy from "Using WriteBatcher" in src/main/java/com/marklogic/datamovement/package-info.java WriteBatcher whb = dataMovementManager.newWriteBatcher() .withBatchSize(100) .withThreadCount(20) .onBatchSuccess(batch -> { logger.debug("batch # {}, so far: {}", batch.getJobBatchNumber(), batch.getJobWritesSoFar()); }) .onBatchFailure((batch,throwable) -> throwable.printStackTrace() ); JobTicket ticket = dataMovementManager.startJob(whb); // the add or addAs methods could be called in separate threads on the // single whb instance whb.add ("doc1.txt", new StringHandle("doc1 contents")); whb.addAs("doc2.txt", "doc2 contents"); whb.flushAndWait(); // send the two docs even though they're not a full batch dataMovementManager.stopJob(ticket); // end copy from "Using WriteBatcher" in src/main/java/com/marklogic/datamovement/package-info.java assertTrue(null != client.newDocumentManager().exists("doc1.txt")); assertTrue(null != client.newDocumentManager().exists("doc2.txt")); } }
@Test public void testQueryBatcher() { client.newDocumentManager().writeAs(collection + "/test1.json", meta, "[true]"); client.newDocumentManager().writeAs(collection + "/test1.xml", meta, "<xml/>"); client.newDocumentManager().writeAs(collection + "/test1.txt", meta, "text"); assertEquals(3, client.newQueryManager().search(collectionQuery, new SearchHandle()).getTotalResults()); StructuredQueryDefinition query = collectionQuery; // begin copy from "Using QueryBatcher" in src/main/java/com/marklogic/datamovement/package-info.java QueryBatcher qhb = dataMovementManager.newQueryBatcher(query) .withBatchSize(1000) .withThreadCount(20) .withConsistentSnapshot() .onUrisReady(batch -> { for ( String uri : batch.getItems() ) { if ( uri.endsWith(".txt") ) { client.newDocumentManager().delete(uri); } } }) .onQueryFailure(queryBatchException -> queryBatchException.printStackTrace()); JobTicket ticket = dataMovementManager.startJob(qhb); qhb.awaitCompletion(); dataMovementManager.stopJob(ticket); // end copy from "Using QueryBatcher" in src/main/java/com/marklogic/datamovement/package-info.java SearchHandle results = client.newQueryManager().search(collectionQuery, new SearchHandle()); assertEquals(2, results.getTotalResults()); for ( MatchDocumentSummary match : results.getMatchResults() ) { assertTrue(match.getUri().matches(".*/test1.(json|xml)")); } }
JobTicket ticket = moveMgr.startJob(queryBatcher); queryBatcher.awaitCompletion(); moveMgr.stopJob(ticket);
public static void setup() throws Exception { WriteBatcher writeBatcher = moveMgr.newWriteBatcher(); moveMgr.startJob(writeBatcher); // a collection so we're only looking at docs related to this test DocumentMetadataHandle meta = new DocumentMetadataHandle() .withCollections(collection, qhbTestCollection); // all the docs are one-word text docs writeBatcher.addAs(uri1, meta, new StringHandle("{name:\"John Doe\", department:\"HR\"}").withFormat(JSON)); writeBatcher.addAs(uri2, meta, new StringHandle("{name:\"Jane Doe\", department:\"HR\"}").withFormat(JSON)); writeBatcher.addAs(uri3, meta, new StringHandle("{name:\"John Smith\", department:\"HR\"}").withFormat(JSON)); writeBatcher.addAs(uri4, meta, new StringHandle("{name:\"John Lennon\",department:\"HR\"}").withFormat(JSON)); writeBatcher.addAs(uri5, meta, new StringHandle("{name:\"John Man\", department:\"Engineering\"}").withFormat(JSON)); writeBatcher.flushAsync(); writeBatcher.awaitCompletion(); moveMgr.stopJob(writeBatcher); StringHandle options = new StringHandle( "<options xmlns='http://marklogic.com/appservices/search'>" + "<constraint name='dept'>" + "<value>" + "<json-property>department</json-property>" + "</value>" + "</constraint>" + "</options>") .withFormat(XML); QueryOptionsManager queryOptionsMgr = Common.connectAdmin().newServerConfigManager().newQueryOptionsManager(); queryOptionsMgr.writeOptions("employees", options); }