@Test public void testTuple2StreamMetadata() { Tuple2StreamMetadata metadata = new Tuple2StreamMetadata(); Set activeStreamNames = new HashSet<>(); activeStreamNames.add("defaultStringStream"); metadata.setStreamNameSelectorCls("org.apache.eagle.alert.engine.scheme.PlainStringStreamNameSelector"); metadata.setStreamNameSelectorProp(new Properties()); metadata.getStreamNameSelectorProp().put("userProvidedStreamName", "defaultStringStream"); metadata.setActiveStreamNames(activeStreamNames); metadata.setTimestampColumn("timestamp"); Tuple2StreamMetadata metadata1 = new Tuple2StreamMetadata(); Set activeStreamNames1 = new HashSet<>(); activeStreamNames1.add("defaultStringStream"); metadata1.setStreamNameSelectorCls("org.apache.eagle.alert.engine.scheme.PlainStringStreamNameSelector"); metadata1.setStreamNameSelectorProp(new Properties()); metadata1.getStreamNameSelectorProp().put("userProvidedStreamName", "defaultStringStream"); metadata1.setActiveStreamNames(activeStreamNames1); metadata1.setTimestampColumn("timestamp"); Assert.assertFalse(metadata == metadata1); Assert.assertFalse(metadata.equals(metadata1)); Assert.assertFalse(metadata.hashCode() == metadata1.hashCode()); } }
Map<String, Object> m = (Map<String, Object>) tuple.get(1); String streamName = cachedSelector.getStreamName(m); if (!metadata.getActiveStreamNames().contains(streamName)) { if (LOG.isDebugEnabled()) { LOG.debug("streamName {} is not within activeStreamNames {}", streamName, metadata.getActiveStreamNames()); Object timeObject = m.get(metadata.getTimestampColumn()); long timestamp = 0L; if (timeObject == null) { timestamp = ((Number) timeObject).longValue(); } else { String timestampFieldValue = (String) m.get(metadata.getTimestampColumn()); String dateFormat = metadata.getTimestampFormat(); if (Strings.isNullOrEmpty(dateFormat)) { if (LOG.isDebugEnabled()) { } else { try { SimpleDateFormat sdf = new SimpleDateFormat(metadata.getTimestampFormat()); timestamp = sdf.parse(timestampFieldValue).getTime(); } catch (Exception ex) { LOG.error("continue with current timestamp because error happens while parsing timestamp column " + metadata.getTimestampColumn() + " with format " + metadata.getTimestampFormat()); timestamp = System.currentTimeMillis();
public void validateAndEnsureDefault() { Preconditions.checkNotNull(streamSource); Preconditions.checkNotNull(streamDefinition); if (streamSource.getType() == null) { streamSource.setType("KAFKA"); } String dataSourceName = (getStreamDefinition().getStreamId() + "_CUSTOMIZED").toUpperCase(); getStreamDefinition().setDataSource(dataSourceName); getStreamSource().setName(dataSourceName); Tuple2StreamMetadata codec = new Tuple2StreamMetadata(); codec.setTimestampColumn("timestamp"); codec.setStreamNameSelectorCls(JsonStringStreamNameSelector.class.getName()); Properties streamNameSelectorProp = new Properties(); streamNameSelectorProp.put("userProvidedStreamName", streamSource.getName()); codec.setStreamNameSelectorProp(streamNameSelectorProp); if (StringUtils.isBlank(codec.getStreamNameSelectorCls())) { codec.setStreamNameSelectorCls(JsonStringStreamNameSelector.class.getName()); } if (StringUtils.isBlank(codec.getTimestampFormat())) { codec.setTimestampFormat(null); } this.streamSource.setCodec(codec); } }
public Tuple2StreamConverter(Tuple2StreamMetadata metadata) { this.metadata = metadata; try { cachedSelector = (StreamNameSelector) Class.forName(metadata.getStreamNameSelectorCls()) .getConstructor(Properties.class) .newInstance(metadata.getStreamNameSelectorProp()); } catch (Exception ex) { LOG.error("error initializing StreamNameSelector object", ex); throw new IllegalStateException(ex); } }
private Kafka2TupleMetadata buildAggregationDatasource() { Kafka2TupleMetadata datasource = new Kafka2TupleMetadata(); datasource.setName(NODATA_ALERT_AGGR_DATASOURCE_NAME); datasource.setType(DATASOURCE_TYPE); datasource.setSchemeCls(DATASOURCE_SCHEME_CLS); datasource.setTopic(NODATA_ALERT_AGGR_TOPIC_NAME); Tuple2StreamMetadata codec = new Tuple2StreamMetadata(); codec.setStreamNameSelectorCls(JSON_STRING_STREAM_NAME_SELECTOR_CLS); codec.setTimestampColumn(STREAM_TIMESTAMP_COLUMN_NAME); codec.setTimestampFormat(STREAM_TIMESTAMP_FORMAT); Properties codecProperties = new Properties(); codecProperties.put("userProvidedStreamName", NODATA_ALERT_AGGR_STREAM); codecProperties.put("streamNameFormat", "%s"); codec.setStreamNameSelectorProp(codecProperties); datasource.setCodec(codec); return datasource; }
private Map<String, Kafka2TupleMetadata> createDatasource(final String topicName, final String dataSourceName) { Kafka2TupleMetadata ds = new Kafka2TupleMetadata(); ds.setName(dataSourceName); ds.setType("KAFKA"); ds.setProperties(new HashMap<String, String>()); ds.setTopic(topicName); ds.setSchemeCls("PlainStringScheme"); ds.setCodec(new Tuple2StreamMetadata()); Map<String, Kafka2TupleMetadata> dsMap = new HashMap<String, Kafka2TupleMetadata>(); dsMap.put(ds.getName(), ds); return dsMap; }
tupleMetadata.getActiveStreamNames().add(stream);
private Kafka2TupleMetadata buildAggregationOutputDatasource() { Kafka2TupleMetadata datasource = new Kafka2TupleMetadata(); datasource.setName(NODATA_ALERT_AGGR_OUTPUT_DATASOURCE_NAME); datasource.setType(DATASOURCE_TYPE); datasource.setSchemeCls(DATASOURCE_SCHEME_CLS); datasource.setTopic(NODATA_ALERT_AGGR_OUTPUT_TOPIC_NAME); Tuple2StreamMetadata codec = new Tuple2StreamMetadata(); codec.setStreamNameSelectorCls(JSON_STRING_STREAM_NAME_SELECTOR_CLS); codec.setTimestampColumn(STREAM_TIMESTAMP_COLUMN_NAME); codec.setTimestampFormat(STREAM_TIMESTAMP_FORMAT); Properties codecProperties = new Properties(); codecProperties.put("userProvidedStreamName", NODATA_ALERT_AGGR_OUTPUT_STREAM); codecProperties.put("streamNameFormat", "%s"); codec.setStreamNameSelectorProp(codecProperties); datasource.setCodec(codec); return datasource; }
ds.setTopic("name-of-topic1"); ds.setSchemeCls("PlainStringScheme"); ds.setCodec(new Tuple2StreamMetadata()); Map<String, Kafka2TupleMetadata> dsMap = new HashMap<String, Kafka2TupleMetadata>(); dsMap.put(ds.getName(), ds);
@SuppressWarnings( {"unchecked", "rawtypes"}) @Test public void test() { Tuple2StreamMetadata metadata = new Tuple2StreamMetadata(); Set activeStreamNames = new HashSet<>(); activeStreamNames.add("defaultStringStream"); metadata.setStreamNameSelectorCls("org.apache.eagle.alert.engine.scheme.PlainStringStreamNameSelector"); metadata.setStreamNameSelectorProp(new Properties()); metadata.getStreamNameSelectorProp().put("userProvidedStreamName", "defaultStringStream"); metadata.setActiveStreamNames(activeStreamNames); metadata.setTimestampColumn("timestamp"); Tuple2StreamConverter convert = new Tuple2StreamConverter(metadata); String topic = "testTopic"; Map m = new HashMap<>(); m.put("value", "IAmPlainString"); long t = System.currentTimeMillis(); m.put("timestamp", t); List<Object> ret = convert.convert(Arrays.asList(topic, m)); Assert.assertEquals(topic, ret.get(0)); Assert.assertEquals("defaultStringStream", ret.get(1)); Assert.assertEquals(t, ret.get(2)); Assert.assertEquals(m, ret.get(3)); } }
Tuple2StreamMetadata tuple2Stream = new Tuple2StreamMetadata(); Properties prop = new Properties(); prop.put(JsonStringStreamNameSelector.USER_PROVIDED_STREAM_NAME_PROPERTY, streamDesc.getStreamId()); tuple2Stream.setStreamNameSelectorProp(prop); tuple2Stream.setTimestampColumn("timestamp"); tuple2Stream.setStreamNameSelectorCls(JsonStringStreamNameSelector.class.getCanonicalName()); datasource.setCodec(tuple2Stream); alertMetadataService.addDataSource(datasource);
private static Kafka2TupleMetadata createKafka2TupleMetadata() { Kafka2TupleMetadata ktm = new Kafka2TupleMetadata(); ktm.setName(TEST_DATASOURCE_1); ktm.setSchemeCls("SchemeClass"); ktm.setTopic("tupleTopic"); ktm.setType("KAFKA"); ktm.setCodec(new Tuple2StreamMetadata()); return ktm; }
for (String topic : plainStringTopics) { String streamId = getStreamNameByTopic(topic); Tuple2StreamMetadata tuple2StreamMetadata = new Tuple2StreamMetadata(); Set<String> activeStreamNames = new HashSet<>(); activeStreamNames.add(streamId); tuple2StreamMetadata.setStreamNameSelectorCls("org.apache.eagle.alert.engine.scheme.PlainStringStreamNameSelector"); tuple2StreamMetadata.setStreamNameSelectorProp(new Properties()); tuple2StreamMetadata.getStreamNameSelectorProp().put("userProvidedStreamName", streamId); tuple2StreamMetadata.setActiveStreamNames(activeStreamNames); tuple2StreamMetadata.setTimestampColumn("timestamp"); tuple2StreamMetadataMap.put(topic, tuple2StreamMetadata); Tuple2StreamMetadata tuple2StreamMetadata = new Tuple2StreamMetadata(); Set<String> activeStreamNames = new HashSet<>(); activeStreamNames.add(streamId); tuple2StreamMetadata.setStreamNameSelectorCls("org.apache.eagle.alert.engine.scheme.JsonStringStreamNameSelector"); tuple2StreamMetadata.setStreamNameSelectorProp(new Properties()); tuple2StreamMetadata.getStreamNameSelectorProp().put("userProvidedStreamName", streamId); tuple2StreamMetadata.setActiveStreamNames(activeStreamNames); tuple2StreamMetadata.setTimestampColumn("timestamp"); tuple2StreamMetadataMap.put(topic, tuple2StreamMetadata);
@Test public void testKafka2TupleMetadata() { Kafka2TupleMetadata kafka2TupleMetadata = new Kafka2TupleMetadata(); kafka2TupleMetadata.setName("setName"); kafka2TupleMetadata.setCodec(new Tuple2StreamMetadata()); kafka2TupleMetadata.setType("setType"); kafka2TupleMetadata.setTopic("setTopic"); kafka2TupleMetadata.setSchemeCls("org.apache.eagle.alert.engine.scheme.PlainStringScheme"); Kafka2TupleMetadata kafka2TupleMetadata1 = new Kafka2TupleMetadata(); kafka2TupleMetadata1.setName("setName"); kafka2TupleMetadata1.setCodec(new Tuple2StreamMetadata()); kafka2TupleMetadata1.setType("setType"); kafka2TupleMetadata1.setTopic("setTopic"); kafka2TupleMetadata1.setSchemeCls("org.apache.eagle.alert.engine.scheme.PlainStringScheme"); Assert.assertFalse(kafka2TupleMetadata1 == kafka2TupleMetadata); Assert.assertTrue(kafka2TupleMetadata1.equals(kafka2TupleMetadata)); Assert.assertTrue(kafka2TupleMetadata1.hashCode() == kafka2TupleMetadata.hashCode()); kafka2TupleMetadata1.setType("setType1"); Assert.assertFalse(kafka2TupleMetadata1.equals(kafka2TupleMetadata)); Assert.assertFalse(kafka2TupleMetadata1.hashCode() == kafka2TupleMetadata.hashCode()); } }
ds.setName(DS_NAME); ds.setTopic(TEST_TOPIC); ds.setCodec(new Tuple2StreamMetadata()); context.addDataSource(ds);