@Before public void setup() { generator = new NodataMetadataGenerator(); }
LOG.info("Nodata alert aggregation stream: {} already exists", NODATA_ALERT_AGGR_STREAM); } else { streamDefinitionsMap.put(NODATA_ALERT_AGGR_STREAM, buildAggregationStream()); LOG.info("Created nodata alert aggregation stream: {}", NODATA_ALERT_AGGR_STREAM); LOG.info("Nodata alert aggregation output stream: {} already exists", NODATA_ALERT_AGGR_OUTPUT_STREAM); } else { streamDefinitionsMap.put(NODATA_ALERT_AGGR_OUTPUT_STREAM, buildAggregationOutputStream()); LOG.info("Created nodata alert aggregation output stream: {}", NODATA_ALERT_AGGR_OUTPUT_STREAM); NODATA_ALERT_AGGR_STREAM, NODATA_ALERT_AGGR_DATASOURCE_NAME); } else { kafkaSources.put(NODATA_ALERT_AGGR_DATASOURCE_NAME, buildAggregationDatasource()); LOG.info("Created nodata alert aggregation datasource {} for stream {}", NODATA_ALERT_AGGR_DATASOURCE_NAME, NODATA_ALERT_AGGR_STREAM); NODATA_ALERT_AGGR_OUTPUT_STREAM, NODATA_ALERT_AGGR_OUTPUT_DATASOURCE_NAME); } else { kafkaSources.put(NODATA_ALERT_AGGR_OUTPUT_DATASOURCE_NAME, buildAggregationOutputDatasource()); LOG.info("Created nodata alert aggregation output datasource {} for stream {}", NODATA_ALERT_AGGR_DATASOURCE_NAME, NODATA_ALERT_AGGR_OUTPUT_STREAM); LOG.info("Stream: {} nodata alert policy: {} already exists", streamName, policyName); } else { policies.put(policyName, buildDynamicNodataPolicy( streamName, policyName, NODATA_ALERT_AGGR_OUTPUT_STREAM, aggrPolicyName);
streamDefinitions = listToMap(client.listStreams()); new NodataMetadataGenerator().execute(config, streamDefinitions, kafkaSources, policies, publishments);
@Test public void testNormal() throws Exception { StreamDefinition sd = createStreamDefinitionWithNodataAlert(); Map<String, StreamDefinition> streamDefinitionsMap = new HashMap<String, StreamDefinition>(); streamDefinitionsMap.put(sd.getStreamId(), sd); Map<String, Kafka2TupleMetadata> kafkaSources = new HashMap<String, Kafka2TupleMetadata>(); Map<String, PolicyDefinition> policies = new HashMap<String, PolicyDefinition>(); Map<String, Publishment> publishments = new HashMap<String, Publishment>(); generator.execute(config, streamDefinitionsMap, kafkaSources, policies, publishments); Assert.assertEquals(2, kafkaSources.size()); kafkaSources.forEach((key, value) -> { LOG.info("KafkaSources > {}: {}", key, ToStringBuilder.reflectionToString(value)); }); Assert.assertEquals(2, policies.size()); policies.forEach((key, value) -> { LOG.info("Policies > {}: {}", key, ToStringBuilder.reflectionToString(value)); }); Assert.assertEquals(4, publishments.size()); publishments.forEach((key, value) -> { LOG.info("Publishments > {}: {}", key, ToStringBuilder.reflectionToString(value)); }); }