@Test public void testFilterByName() throws Exception { SLAEventKafkaJobMonitor monitor = new SLAEventKafkaJobMonitor("topic", null, new URI("/base/URI"), HighLevelConsumerTest.getSimpleConfig(Optional.of(KafkaJobMonitor.KAFKA_JOB_MONITOR_PREFIX)), new NoopSchemaVersionWriter(), Optional.<Pattern>absent(), Optional.of(Pattern.compile("^accept.*")), this.templateURI, ImmutableMap.<String, String>of()); monitor.buildMetricsContextAndMetrics(); GobblinTrackingEvent event; Collection<Either<JobSpec, URI>> jobSpecs; event = createSLAEvent("acceptthis", new URI("/data/myDataset"), Maps.<String, String>newHashMap()); jobSpecs = monitor.parseJobSpec(event); Assert.assertEquals(jobSpecs.size(), 1); Assert.assertEquals(monitor.getRejectedEvents().getCount(), 0); event = createSLAEvent("donotacceptthis", new URI("/data/myDataset"), Maps.<String, String>newHashMap()); jobSpecs = monitor.parseJobSpec(event); Assert.assertEquals(jobSpecs.size(), 0); Assert.assertEquals(monitor.getRejectedEvents().getCount(), 1); monitor.shutdownMetrics(); }
@Override public Collection<Either<JobSpec, URI>> parseJobSpec(GobblinTrackingEvent event) { if (!acceptEvent(event)) { this.rejectedEvents.inc(); return Lists.newArrayList(); } String datasetURN = event.getMetadata().get(SlaEventKeys.DATASET_URN_KEY); URI jobSpecURI = PathUtils.mergePaths(new Path(this.baseURI), new Path(datasetURN)).toUri(); Map<String, String> jobConfigMap = Maps.newHashMap(); for (Map.Entry<String, String> entry : this.extractKeys.entrySet()) { if (event.getMetadata().containsKey(entry.getKey())) { jobConfigMap.put(entry.getValue(), event.getMetadata().get(entry.getKey())); } } Config jobConfig = ConfigFactory.parseMap(jobConfigMap); JobSpec jobSpec = JobSpec.builder(jobSpecURI).withTemplate(this.template).withConfig(jobConfig).build(); return Lists.newArrayList(Either.<JobSpec, URI>left(jobSpec)); }
@Override protected void createMetrics() { super.createMetrics(); this.rejectedEvents = getMetricContext().counter(RuntimeMetrics.GOBBLIN_JOB_MONITOR_SLAEVENT_REJECTEDEVENTS); }
@Test public void testFactory() throws Exception { Pattern urnFilter = Pattern.compile("filter"); Pattern nameFilter = Pattern.compile("filtername"); Map<String, String> configMap = ImmutableMap.<String, String>builder() .put(SLAEventKafkaJobMonitor.DATASET_URN_FILTER_KEY, urnFilter.pattern()) .put(SLAEventKafkaJobMonitor.EVENT_NAME_FILTER_KEY, nameFilter.pattern()) .put(SLAEventKafkaJobMonitor.TEMPLATE_KEY, "template") .put(SLAEventKafkaJobMonitor.EXTRACT_KEYS + ".key1", "value1") .put(SLAEventKafkaJobMonitor.BASE_URI_KEY, "uri") .put(SLAEventKafkaJobMonitor.TOPIC_KEY, "topic") .put(SLAEventKafkaJobMonitor.SCHEMA_VERSION_READER_CLASS, FixedSchemaVersionWriter.class.getName()).build(); Config config = ConfigFactory.parseMap(configMap). withFallback(HighLevelConsumerTest.getSimpleConfig(Optional.of(KafkaJobMonitor.KAFKA_JOB_MONITOR_PREFIX))); SLAEventKafkaJobMonitor monitor = (SLAEventKafkaJobMonitor) (new SLAEventKafkaJobMonitor.Factory()).forConfig(config, null); Assert.assertEquals(monitor.getUrnFilter().get().pattern(), urnFilter.pattern()); Assert.assertEquals(monitor.getNameFilter().get().pattern(), nameFilter.pattern()); Assert.assertEquals(monitor.getTemplate(), new URI("template")); Assert.assertEquals(monitor.getExtractKeys().size(), 1); Assert.assertEquals(monitor.getExtractKeys().get("key1"), "value1"); Assert.assertEquals(monitor.getBaseURI(), new URI("uri")); Assert.assertEquals(monitor.getTopic(), "topic"); Assert.assertEquals(monitor.getVersionWriter().getClass(), FixedSchemaVersionWriter.class); }
return new SLAEventKafkaJobMonitor(topic, jobCatalog, baseUri, localScopeConfig, versionWriter, urnFilter, nameFilter, template, extractKeys);
return new SLAEventKafkaJobMonitor(topic, jobCatalog, baseUri, localScopeConfig, versionWriter, urnFilter, nameFilter, template, extractKeys);
@Test public void testFilterByDatasetURN() throws Exception { Properties props = new Properties(); props.put(SLAEventKafkaJobMonitor.TEMPLATE_KEY, templateURI.toString()); props.put(SLAEventKafkaJobMonitor.DATASET_URN_FILTER_KEY, "^/accept.*"); Config config = ConfigFactory.parseProperties(props).withFallback(superConfig); SLAEventKafkaJobMonitor monitor = new SLAEventKafkaJobMonitor("topic", null, new URI("/base/URI"), HighLevelConsumerTest.getSimpleConfig(Optional.of(KafkaJobMonitor.KAFKA_JOB_MONITOR_PREFIX)), new NoopSchemaVersionWriter(), Optional.of(Pattern.compile("^/accept.*")), Optional.<Pattern>absent(), this.templateURI, ImmutableMap.<String, String>of()); monitor.buildMetricsContextAndMetrics(); GobblinTrackingEvent event; Collection<Either<JobSpec, URI>> jobSpecs; event = createSLAEvent("event", new URI("/accept/myDataset"), Maps.<String, String>newHashMap()); jobSpecs = monitor.parseJobSpec(event); Assert.assertEquals(jobSpecs.size(), 1); Assert.assertEquals(monitor.getRejectedEvents().getCount(), 0); event = createSLAEvent("event", new URI("/reject/myDataset"), Maps.<String, String>newHashMap()); jobSpecs = monitor.parseJobSpec(event); Assert.assertEquals(jobSpecs.size(), 0); Assert.assertEquals(monitor.getRejectedEvents().getCount(), 1); monitor.shutdownMetrics(); }
@Override protected void createMetrics() { super.createMetrics(); this.rejectedEvents = getMetricContext().counter(RuntimeMetrics.GOBBLIN_JOB_MONITOR_SLAEVENT_REJECTEDEVENTS); }
@Override public Collection<Either<JobSpec, URI>> parseJobSpec(GobblinTrackingEvent event) { if (!acceptEvent(event)) { this.rejectedEvents.inc(); return Lists.newArrayList(); } String datasetURN = event.getMetadata().get(SlaEventKeys.DATASET_URN_KEY); URI jobSpecURI = PathUtils.mergePaths(new Path(this.baseURI), new Path(datasetURN)).toUri(); Map<String, String> jobConfigMap = Maps.newHashMap(); for (Map.Entry<String, String> entry : this.extractKeys.entrySet()) { if (event.getMetadata().containsKey(entry.getKey())) { jobConfigMap.put(entry.getValue(), event.getMetadata().get(entry.getKey())); } } Config jobConfig = ConfigFactory.parseMap(jobConfigMap); JobSpec jobSpec = JobSpec.builder(jobSpecURI).withTemplate(this.template).withConfig(jobConfig).build(); return Lists.newArrayList(Either.<JobSpec, URI>left(jobSpec)); }
@Test public void testParseJobSpec() throws Exception { SLAEventKafkaJobMonitor monitor = new SLAEventKafkaJobMonitor("topic", null, new URI("/base/URI"), HighLevelConsumerTest.getSimpleConfig(Optional.of(KafkaJobMonitor.KAFKA_JOB_MONITOR_PREFIX)), new NoopSchemaVersionWriter(), Optional.<Pattern>absent(), Optional.<Pattern>absent(), this.templateURI, ImmutableMap.of("metadataKey1", "key1")); monitor.buildMetricsContextAndMetrics(); GobblinTrackingEvent event = createSLAEvent("DatasetPublish", new URI("/data/myDataset"), ImmutableMap.of("metadataKey1","value1","key1","value2")); Collection<Either<JobSpec, URI>> jobSpecs = monitor.parseJobSpec(event); Assert.assertEquals(jobSpecs.size(), 1); JobSpec jobSpec = (JobSpec) jobSpecs.iterator().next().get(); Assert.assertEquals(jobSpec.getUri(), new URI("/base/URI/data/myDataset")); Assert.assertEquals(jobSpec.getTemplateURI().get(), templateURI); // should insert configuration from metadata Assert.assertEquals(jobSpec.getConfig().getString("key1"), "value1"); monitor.shutdownMetrics(); }