@Test public void testParseJobSpec() throws Exception { SLAEventKafkaJobMonitor monitor = new SLAEventKafkaJobMonitor("topic", null, new URI("/base/URI"), HighLevelConsumerTest.getSimpleConfig(Optional.of(KafkaJobMonitor.KAFKA_JOB_MONITOR_PREFIX)), new NoopSchemaVersionWriter(), Optional.<Pattern>absent(), Optional.<Pattern>absent(), this.templateURI, ImmutableMap.of("metadataKey1", "key1")); monitor.buildMetricsContextAndMetrics(); GobblinTrackingEvent event = createSLAEvent("DatasetPublish", new URI("/data/myDataset"), ImmutableMap.of("metadataKey1","value1","key1","value2")); Collection<Either<JobSpec, URI>> jobSpecs = monitor.parseJobSpec(event); Assert.assertEquals(jobSpecs.size(), 1); JobSpec jobSpec = (JobSpec) jobSpecs.iterator().next().get(); Assert.assertEquals(jobSpec.getUri(), new URI("/base/URI/data/myDataset")); Assert.assertEquals(jobSpec.getTemplateURI().get(), templateURI); // should insert configuration from metadata Assert.assertEquals(jobSpec.getConfig().getString("key1"), "value1"); monitor.shutdownMetrics(); }
@Test public void testFilterByName() throws Exception { SLAEventKafkaJobMonitor monitor = new SLAEventKafkaJobMonitor("topic", null, new URI("/base/URI"), HighLevelConsumerTest.getSimpleConfig(Optional.of(KafkaJobMonitor.KAFKA_JOB_MONITOR_PREFIX)), new NoopSchemaVersionWriter(), Optional.<Pattern>absent(), Optional.of(Pattern.compile("^accept.*")), this.templateURI, ImmutableMap.<String, String>of()); monitor.buildMetricsContextAndMetrics(); GobblinTrackingEvent event; Collection<Either<JobSpec, URI>> jobSpecs; event = createSLAEvent("acceptthis", new URI("/data/myDataset"), Maps.<String, String>newHashMap()); jobSpecs = monitor.parseJobSpec(event); Assert.assertEquals(jobSpecs.size(), 1); Assert.assertEquals(monitor.getRejectedEvents().getCount(), 0); event = createSLAEvent("donotacceptthis", new URI("/data/myDataset"), Maps.<String, String>newHashMap()); jobSpecs = monitor.parseJobSpec(event); Assert.assertEquals(jobSpecs.size(), 0); Assert.assertEquals(monitor.getRejectedEvents().getCount(), 1); monitor.shutdownMetrics(); }
@Test public void testFilterByDatasetURN() throws Exception { Properties props = new Properties(); props.put(SLAEventKafkaJobMonitor.TEMPLATE_KEY, templateURI.toString()); props.put(SLAEventKafkaJobMonitor.DATASET_URN_FILTER_KEY, "^/accept.*"); Config config = ConfigFactory.parseProperties(props).withFallback(superConfig); SLAEventKafkaJobMonitor monitor = new SLAEventKafkaJobMonitor("topic", null, new URI("/base/URI"), HighLevelConsumerTest.getSimpleConfig(Optional.of(KafkaJobMonitor.KAFKA_JOB_MONITOR_PREFIX)), new NoopSchemaVersionWriter(), Optional.of(Pattern.compile("^/accept.*")), Optional.<Pattern>absent(), this.templateURI, ImmutableMap.<String, String>of()); monitor.buildMetricsContextAndMetrics(); GobblinTrackingEvent event; Collection<Either<JobSpec, URI>> jobSpecs; event = createSLAEvent("event", new URI("/accept/myDataset"), Maps.<String, String>newHashMap()); jobSpecs = monitor.parseJobSpec(event); Assert.assertEquals(jobSpecs.size(), 1); Assert.assertEquals(monitor.getRejectedEvents().getCount(), 0); event = createSLAEvent("event", new URI("/reject/myDataset"), Maps.<String, String>newHashMap()); jobSpecs = monitor.parseJobSpec(event); Assert.assertEquals(jobSpecs.size(), 0); Assert.assertEquals(monitor.getRejectedEvents().getCount(), 1); monitor.shutdownMetrics(); }