public void addDataSource(Kafka2TupleMetadata dataSource) { this.datasources.put(dataSource.getName(), dataSource); }
for (Kafka2TupleMetadata ds : context.getDataSourceMetadata().values()) { if (!sds.stream().anyMatch(t -> t.getDataSource().equals(ds.getName()))) { state.appendUnusedDatasource(ds.getName()); state.appendDataSourceValidation(ds.getName(), String.format(" unsupported data source type %s !", ds.getType())); state.appendDataSourceValidation(ds.getName(), String.format("codec of datasource must *not* be null!")); continue;
private void validateStreams() { Collection<Kafka2TupleMetadata> datasources = context.getDataSourceMetadata().values(); Collection<PolicyDefinition> definitions = context.getPolicies().values(); for (StreamDefinition sd : context.getStreamSchemas().values()) { if (!datasources.stream().anyMatch(d -> d.getName().equals(sd.getDataSource()))) { state.appendStreamValidation(sd.getStreamId(), String.format("stream %s reference unknown data source %s !", sd.getStreamId(), sd.getDataSource())); } if (!definitions.stream().anyMatch(p -> p.getInputStreams().contains(sd.getStreamId()))) { state.appendUnusedStreams(sd.getStreamId()); } // more on columns if (sd.getColumns() == null || sd.getColumns().size() == 0) { state.appendStreamValidation(sd.getStreamId(), String.format("stream %s have empty columns!", sd.getStreamId())); } } }
@SuppressWarnings("unchecked") private static <T, K> K getKey(T t) { if (t instanceof Topology) { return (K) ((Topology) t).getName(); } else if (t instanceof PolicyAssignment) { return (K) ((PolicyAssignment) t).getPolicyName(); } else if (t instanceof Kafka2TupleMetadata) { return (K) ((Kafka2TupleMetadata) t).getName(); } else if (t instanceof PolicyDefinition) { return (K) ((PolicyDefinition) t).getName(); } else if (t instanceof Publishment) { return (K) ((Publishment) t).getName(); } else if (t instanceof StreamDefinition) { return (K) ((StreamDefinition) t).getStreamId(); } else if (t instanceof MonitoredStream) { return (K) ((MonitoredStream) t).getStreamGroup(); } throw new RuntimeException("unexpected key class " + t.getClass()); }
@Path("/streams/create") @POST public OpResult createStream(StreamDefinitionWrapper stream) { Preconditions.checkNotNull(stream.getStreamDefinition(),"Stream definition is null"); Preconditions.checkNotNull(stream.getStreamSource(),"Stream source is null"); stream.validateAndEnsureDefault(); OpResult createStreamResult = dao.createStream(stream.getStreamDefinition()); OpResult createDataSourceResult = dao.addDataSource(stream.getStreamSource()); // TODO: Check kafka topic exist or not. if (createStreamResult.code == OpResult.SUCCESS && createDataSourceResult.code == OpResult.SUCCESS) { return OpResult.success("Successfully create stream " + stream.getStreamDefinition().getStreamId() + ", and datasource " + stream.getStreamSource().getName()); } else { return OpResult.fail("Error: " + StringUtils.join(new String[]{createDataSourceResult.message, createDataSourceResult.message},",")); } }
public void validateAndEnsureDefault() { Preconditions.checkNotNull(streamSource); Preconditions.checkNotNull(streamDefinition); if (streamSource.getType() == null) { streamSource.setType("KAFKA"); } String dataSourceName = (getStreamDefinition().getStreamId() + "_CUSTOMIZED").toUpperCase(); getStreamDefinition().setDataSource(dataSourceName); getStreamSource().setName(dataSourceName); Tuple2StreamMetadata codec = new Tuple2StreamMetadata(); codec.setTimestampColumn("timestamp"); codec.setStreamNameSelectorCls(JsonStringStreamNameSelector.class.getName()); Properties streamNameSelectorProp = new Properties(); streamNameSelectorProp.put("userProvidedStreamName", streamSource.getName()); codec.setStreamNameSelectorProp(streamNameSelectorProp); if (StringUtils.isBlank(codec.getStreamNameSelectorCls())) { codec.setStreamNameSelectorCls(JsonStringStreamNameSelector.class.getName()); } if (StringUtils.isBlank(codec.getTimestampFormat())) { codec.setTimestampFormat(null); } this.streamSource.setCodec(codec); } }
private Map<String, Kafka2TupleMetadata> createDatasource(final String topicName, final String dataSourceName) { Kafka2TupleMetadata ds = new Kafka2TupleMetadata(); ds.setName(dataSourceName); ds.setType("KAFKA"); ds.setProperties(new HashMap<String, String>()); ds.setTopic(topicName); ds.setSchemeCls("PlainStringScheme"); ds.setCodec(new Tuple2StreamMetadata()); Map<String, Kafka2TupleMetadata> dsMap = new HashMap<String, Kafka2TupleMetadata>(); dsMap.put(ds.getName(), ds); return dsMap; }