@Test public void testXQueryTransform() throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException, ResourceNotResendableException { runTransform(new ServerTransform(TransformExtensionsTest.XQUERY_NAME)); }
@Test public void testXSLTransform() throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException, ResourceNotResendableException { runTransform(new ServerTransform(TransformExtensionsTest.XSLT_NAME)); }
@Test public void testJavascriptTransform() throws Exception{ runTransform(new ServerTransform(JS_NAME)); }
public ExportToZipJob() { super(); addRequiredJobProperty("exportPath", "The path of the zip file to which selected records are exported", value -> setExportFile(new File(value))); addJobProperty("flattenUri", "Whether or not record URIs are flattened before being used as zip entry names; defaults to false", value -> getWriteToZipConsumer().setFlattenUri(Boolean.parseBoolean(value))); addJobProperty("transform", "The name of a REST transform to apply to each record before it is written to the zip file", value -> getExportListener().withTransform(new ServerTransform(value))); addJobProperty("uriPrefix", "Prefix to prepend to each URI it is used as an entry name; applied after a URI is optionally flattened", value -> getWriteToZipConsumer().setUriPrefix(value)); }
@Test public void testExportToWriterListenerException() { final AtomicInteger failureBatchCount = new AtomicInteger(); testListenerException( new ExportToWriterListener(new StringWriter()) .withTransform(new ServerTransform("thisTransformDoesntExist")) .onBatchFailure( (batch, throwable) -> failureBatchCount.incrementAndGet() ) ); // there should be one failure sent to the ExportToWriterListener // onBatchFailure listener since the transform is invalid assertEquals(1, failureBatchCount.get()); }
@Test public void testApplyTransformListenerException() { final AtomicInteger failureBatchCount = new AtomicInteger(); testListenerException( new ApplyTransformListener() .withTransform(new ServerTransform("thisTransformDoesntExist")) .onBatchFailure( (batch, throwable) -> failureBatchCount.incrementAndGet() ) ); // there should be one failure sent to the ApplyTransformListener // onBatchFailure listener since the transform is invalid assertEquals(1, failureBatchCount.get()); }
@Test public void testExportListenerException() { final AtomicInteger failureBatchCount = new AtomicInteger(); testListenerException( new ExportListener() .withTransform(new ServerTransform("thisTransformDoesntExist")) .onBatchFailure( (batch, throwable) -> failureBatchCount.incrementAndGet() ) ); // there should be one failure sent to the ExportListener // onBatchFailure listener since the transform is invalid assertEquals(1, failureBatchCount.get()); }
@Test public void testRuleMatchTransform() { StructuredQueryBuilder qb = new StructuredQueryBuilder(); StructuredQueryDefinition structuredQuery; structuredQuery = qb.rangeConstraint("favorited", Operator.EQ, "true"); ServerTransform transform = new ServerTransform("ruleTransform"); DOMHandle answer = ruleManager.match(structuredQuery, 0L, QueryManager.DEFAULT_PAGE_LENGTH, new String[] {}, new DOMHandle(), transform); Document doc = answer.get(); NodeList nl = doc.getElementsByTagNameNS( "", "transformed-name"); assertEquals(2, nl.getLength()); }
public ExportBatchesToZipsJob() { super(); addRequiredJobProperty("exportPath", "Directory path to which each batch should be written as a zip", value -> setExportDir(new File(value))); addJobProperty("filenamePrefix", "Prefix written to the beginning of the filename of each file; defaults to batch-", value -> getExportListener().withFilenamePrefix(value)); addJobProperty("filenameExtension", "Filename extension for each file; defaults to .zip", value -> getExportListener().withFilenameExtension(value)); addJobProperty("flattenUri", "Whether or not record URIs are flattened before being used as zip entry names; defaults to false", value -> getExportListener().withFlattenUri(Boolean.parseBoolean(value))); addJobProperty("transform", "The name of a REST transform to apply to each record before it is written to the zip file", value -> getExportListener().withTransform(new ServerTransform(value))); addJobProperty("uriPrefix", "Prefix to prepend to each URI it is used as an entry name; applied after a URI is optionally flattened", value -> getExportListener().withUriPrefix(value)); }
public ExportToFileJob() { super(); addRequiredJobProperty("exportPath", "The path of the file to which selected records are exported", value -> setExportFile(new File(value))); addJobProperty("fileHeader", "Optional content that should be written to the start of each file", value -> setFileHeader(value)); addJobProperty("fileFooter", "Optional content that should be written to the end of each file", value -> setFileFooter(value)); addJobProperty("recordPrefix", "Optional content to be written before each record is written", value -> getExportListener().withRecordPrefix(value)); addJobProperty("recordSuffix", "Optional content to be written after each record is written", value -> getExportListener().withRecordSuffix(value)); addJobProperty("transform", "Optional REST transform to apply to each record before it is written", value -> getExportListener().withTransform(new ServerTransform(value))); }
@Test public void testJavascriptRestTransformAdapter() throws Exception{ ServerTransform transform = new ServerTransform(MLCP_TRANSFORM_ADAPTER) .addParameter("ml.module", "/ext/RestTransformAdapter.xqy") .addParameter("ml.namespace", "http://marklogic.com/mlcp/transform/RestTransformAdapter.xqy") .addParameter("ml.transform", JS_NAME) .addParameter("value", "true"); runTransform(transform); }
@Test public void testXQueryRestTransformAdapter() throws Exception{ ServerTransform transform = new ServerTransform(MLCP_TRANSFORM_ADAPTER) .addParameter("ml.module", "/ext/RestTransformAdapter.xqy") .addParameter("ml.namespace", "http://marklogic.com/mlcp/transform/RestTransformAdapter.xqy") .addParameter("ml.transform", TransformExtensionsTest.XQUERY_NAME) .addParameter("value", "true"); runTransform(transform); }
uris.add(collection + "/test3.txt"); ServerTransform transform = new ServerTransform(transformName1); List skippedUris = new ArrayList<>(); StringBuilder failures = new StringBuilder();
@Test public void testJavascriptMlcpTransformAdapter() throws Exception{ String transformContents = Common.testFileToString("SampleMlcpTransform.sjs"); libMgr.write("/ext/SampleMlcpTransform.sjs", new StringHandle(transformContents).withFormat(Format.TEXT)); ServerTransform transform = new ServerTransform(MLCP_TRANSFORM_ADAPTER) .addParameter("ml.module", "/ext/SampleMlcpTransform.sjs") .addParameter("attr-value", "true"); runTransform(transform); libMgr.delete("/ext/SampleMlcpTransform.sjs"); }
public ExportBatchesToDirectoryJob() { super(); // Need to process this property first so that the listener isn't null addRequiredJobProperty("exportPath", "Directory path to which each batch should be written as a file", value -> setExportDir(new File(value))); addJobProperty("fileHeader", "Content written to the start of each file", value -> getExportListener().withFileHeader(value)); addJobProperty("fileFooter", "Content written to the end of each file", value -> getExportListener().withFileFooter(value)); addJobProperty("filenamePrefix", "Prefix written to the beginning of the filename of each file; defaults to batch-", value -> getExportListener().withFilenamePrefix(value)); addJobProperty("filenameExtension", "Filename extension for each file; defaults to .zip", value -> getExportListener().withFilenameExtension(value)); addJobProperty("recordPrefix", "Optional content to be written before each record is written", value -> getExportListener().withRecordPrefix(value)); addJobProperty("recordSuffix", "Optional content to be written after each record is written", value -> getExportListener().withRecordSuffix(value)); addJobProperty("transform", "Optional REST transform to apply to each record before it is written", value -> getExportListener().withTransform(new ServerTransform(value))); }
@Test public void testXQueryMlcpTransformAdapter() throws Exception{ String transformContents = Common.testFileToString("SampleMlcpTransform.xqy"); libMgr.write("/ext/SampleMlcpTransform.xqy", new StringHandle(transformContents).withFormat(Format.TEXT)); ServerTransform transform = new ServerTransform(MLCP_TRANSFORM_ADAPTER) .addParameter("ml.module", "/ext/SampleMlcpTransform.xqy") .addParameter("ml.namespace", "http://marklogic.com/example") .addParameter("attr-value", "true"); runTransform(transform); libMgr.delete("/ext/SampleMlcpTransform.xqy"); }
@Test public void testResultIgnore() throws Exception { DocumentMetadataHandle meta = new DocumentMetadataHandle().withCollections(collection); // write the document client.newDocumentManager().writeAs(collection + "/test2.json", meta, "{ \"testProperty\": \"test2\" }"); StructuredQueryDefinition query = sqb.value(sqb.jsonProperty("testProperty"), "test2"); ServerTransform transform = new ServerTransform(transformName2) .addParameter("newValue", "test2a"); ApplyTransformListener listener = new ApplyTransformListener() .withTransform(transform) .withApplyResult(ApplyResult.IGNORE); QueryBatcher batcher = moveMgr.newQueryBatcher(query) .onUrisReady(listener); JobTicket ticket = moveMgr.startJob( batcher ); batcher.awaitCompletion(); moveMgr.stopJob(ticket); JsonNode docContents = docMgr.readAs(collection + "/test2.json", JsonNode.class); assertEquals( "the transform should have changed testProperty to 'test2a'", "test2a", docContents.get("testProperty").textValue() ); }
@Test public void testResultReplace() throws Exception { DocumentMetadataHandle meta = new DocumentMetadataHandle().withCollections(collection); // write the document client.newDocumentManager().writeAs(collection + "/test1.json", meta, "{ \"testProperty\": \"test1\" }"); StructuredQueryDefinition query = sqb.value(sqb.jsonProperty("testProperty"), "test1"); ServerTransform transform = new ServerTransform(transformName1) .addParameter("newValue", "test1a"); ApplyTransformListener listener = new ApplyTransformListener() .withTransform(transform) .withApplyResult(ApplyResult.REPLACE); QueryBatcher batcher = moveMgr.newQueryBatcher(query) .onUrisReady(listener); JobTicket ticket = moveMgr.startJob( batcher ); batcher.awaitCompletion(); moveMgr.stopJob(ticket); JsonNode docContents = docMgr.readAs(collection + "/test1.json", JsonNode.class); assertEquals( "the transform should have changed testProperty to 'test1a'", "test1a", docContents.get("testProperty").textValue() ); }
assertEquals(0, getStagingDocCount()); ServerTransform runFlow = new ServerTransform("ml:inputFlow"); runFlow.addParameter("entity-name", entityName); runFlow.addParameter("flow-name", flowName);
StringQueryDefinition s = q.newStringDefinition(""); s.setCriteria("a"); s.setResponseTransform(new ServerTransform("test118")); q.search(s, new SearchHandle());