Refine search
@Override public void populateDAG(DAG dag, Configuration conf) { Gen gen = dag.addOperator("gen", Gen.class); DevNull devNull = dag.addOperator("devNull", DevNull.class); dag.addStream("data", gen.out, devNull.data); } }
/** * Create the DAG */ @Override public void populateDAG(DAG dag, Configuration conf) { InputReceiver randomGen = dag.addOperator("Receiver", InputReceiver.class); DimensionGenerator dimensionGenerator = dag.addOperator("DimensionsGenerator", DimensionGenerator.class); dag.addStream("Events", randomGen.outputInline, dimensionGenerator.inputPort); MachineInfoAveragingPrerequisitesOperator prereqAverageOper = addAverageCalculation(dag, conf); dag.addStream("DimensionalData", dimensionGenerator.outputInline, prereqAverageOper.inputPort); }
@Override public void populateDAG(DAG dag, Configuration conf) { MessageReceiver receiver = dag.addOperator("Message Receiver", MessageReceiver.class); MessageResponder responder = dag.addOperator("Message Responder", MessageResponder.class); // Locality has to be container so that the operators use the same socket dag.addStream("messages", receiver.messageOutput, responder.messageInput).setLocality(DAG.Locality.CONTAINER_LOCAL); } }
@Override public void populateDAG(DAG dag, Configuration conf) { FSRecordReaderModule recordReader = dag.addModule("recordReader", FSRecordReaderModule.class); CsvParser csvParser = dag.addOperator("csvParser", CsvParser.class); CsvFormatter formatter = dag.addOperator("formatter", new CsvFormatter()); StringFileOutputOperator fileOutput = dag.addOperator("fileOutput", new StringFileOutputOperator()); dag.addStream("record", recordReader.records, csvParser.in); dag.addStream("pojo", csvParser.out, formatter.in); dag.addStream("string", formatter.out, fileOutput.input); } }
@Override public void populateDAG(DAG dag, Configuration conf) { RandomEventGenerator rand = dag.addOperator("rand", new RandomEventGenerator()); FibonacciOperator fib = dag.addOperator("FIB", FibonacciOperator.class); DefaultDelayOperator opDelay = dag.addOperator("opDelay", DefaultDelayOperator.class); StdoutOperator console = new StdoutOperator(); console.setExtraOutputFileName(extraOutputFileName); dag.addOperator("console", console); dag.addStream("dummy_to_operator", rand.integer_data, fib.dummyInputPort); dag.addStream("operator_to_delay", fib.output, opDelay.input, console.input); dag.addStream("delay_to_operator", opDelay.output, fib.input); }
/** * This function sets up the DAG for calculating the average * * @param dag the DAG instance * @param conf the configuration instance * @return MachineInfoAveragingPrerequisitesOperator */ private MachineInfoAveragingPrerequisitesOperator addAverageCalculation(DAG dag, Configuration conf) { MachineInfoAveragingPrerequisitesOperator prereqAverageOper = dag.addOperator("Aggregator", MachineInfoAveragingPrerequisitesOperator.class); MachineInfoAveragingOperator averageOperator = dag.addOperator("AverageCalculator", MachineInfoAveragingOperator.class); RedisKeyValPairOutputOperator<MachineKey, Map<String, String>> redisAvgOperator = dag.addOperator("Persister", new RedisKeyValPairOutputOperator<MachineKey, Map<String, String>>()); dag.addStream("Average", averageOperator.outputPort, redisAvgOperator.input); SmtpOutputOperator smtpOutputOperator = dag.addOperator("Alerter", new SmtpOutputOperator()); dag.addStream("Aggregates", prereqAverageOper.outputPort, averageOperator.inputPort); dag.addStream("Alerts", averageOperator.smtpAlert, smtpOutputOperator.input); return prereqAverageOper; }
@Override public void populateDAG(DAG dag, Configuration conf) { WordCountInputOperator input = dag.addOperator("wordinput", new WordCountInputOperator()); UniqueCounter<String> wordCount = dag.addOperator("count", new UniqueCounter<String>()); dag.addStream("wordinput-count", input.outputPort, wordCount.data); ConsoleOutputOperator consoleOperator = dag.addOperator("console", new ConsoleOutputOperator()); dag.addStream("count-console",wordCount.count, consoleOperator.input); } }
@Override public void populateDAG(DAG dag, Configuration conf) { JsonGenerator generator = dag.addOperator("JsonGenerator", JsonGenerator.class); JsonParser jsonParser = dag.addOperator("jsonParser", JsonParser.class); CsvFormatter formatter = dag.addOperator("formatter", CsvFormatter.class); formatter.setSchema(SchemaUtils.jarResourceFileToString(filename)); dag.setInputPortAttribute(formatter.in, PortContext.TUPLE_CLASS, PojoEvent.class); HDFSOutputOperator<String> hdfsOutput = dag.addOperator("HDFSOutputOperator", HDFSOutputOperator.class); hdfsOutput.setLineDelimiter(""); dag.addStream("parserStream", generator.out, jsonParser.in); dag.addStream("formatterStream", jsonParser.out, formatter.in); dag.addStream("outputStream", formatter.out, hdfsOutput.input); } }
@Override public void populateDAG(DAG dag, Configuration conf) { LineByLineFileInputOperator in = dag.addOperator("lines", LineByLineFileInputOperator.class); KafkaSinglePortOutputOperator<String,String> out = dag.addOperator("kafkaOutput", new KafkaSinglePortOutputOperator<String,String>()); dag.addStream("data", in.output, out.inputPort).setLocality(Locality.CONTAINER_LOCAL); } }
@Override public void populateDAG(DAG dag, Configuration conf) { RandomNumberGenerator random = dag.addOperator("randomInt", RandomNumberGenerator.class); TestPartition testPartition = dag.addOperator("testPartition", TestPartition.class); Codec3 codec = new Codec3(); dag.setInputPortAttribute(testPartition.in, PortContext.STREAM_CODEC, codec); //Add locality if needed, e.g.: .setLocality(Locality.CONTAINER_LOCAL); dag.addStream("randomData", random.out, testPartition.in); } }
@Override public void populateDAG(DAG dag, Configuration conf) { RandomEventGenerator rand = dag.addOperator("rand", new RandomEventGenerator()); PiCalculateOperator calc = dag.addOperator("picalc", new PiCalculateOperator()); ConsoleOutputOperator console = dag.addOperator("console", new ConsoleOutputOperator()); dag.addStream("rand_calc", rand.integer_data, calc.input).setLocality(locality); dag.addStream("rand_console",calc.output, console.input).setLocality(locality); }
@Override public void populateDAG(DAG dag, Configuration configuration) { // create operators FileReader fileReader = dag.addOperator("FileReader", FileReader.class); CustomParser customParser = dag.addOperator("CustomParser", CustomParser.class); JdbcPOJOInsertOutputOperator jdbcOutputOperator = dag.addOperator("JdbcOutput", JdbcPOJOInsertOutputOperator.class); // configure operators jdbcOutputOperator.setFieldInfos(addFieldInfos()); JdbcTransactionalStore outputStore = new JdbcTransactionalStore(); jdbcOutputOperator.setStore(outputStore); // add stream dag.addStream("Data", fileReader.output, customParser.input); dag.addStream("POJOs", customParser.output, jdbcOutputOperator.input); }
@Override public void populateDAG(DAG dag, Configuration conf) { //dag.setAttribute(DAGContext.APPLICATION_NAME, "TweetsDump"); TwitterSampleInput twitterStream = dag.addOperator("TweetSampler", new TwitterSampleInput()); Status2Hbase hBaseWriter = dag.addOperator("DatabaseWriter", new Status2Hbase()); dag.addStream("Statuses", twitterStream.status, hBaseWriter.input).setLocality(Locality.CONTAINER_LOCAL); }
@Override public void populateDAG(DAG dag, Configuration configuration) { KafkaSinglePortStringInputOperator kafkaInput = dag.addOperator("kafkaInput", new KafkaSinglePortStringInputOperator()); kafkaInput.setWindowDataManager(new FSWindowDataManager()); ExactlyOnceJdbcOutputApp.UniqueCounterFlat count = dag.addOperator("count", new ExactlyOnceJdbcOutputApp.UniqueCounterFlat()); FileWriter fileWriter = dag.addOperator("fileWriter", new FileWriter()); ConsoleOutputOperator cons = dag.addOperator("console", new ConsoleOutputOperator()); dag.addStream("words", kafkaInput.outputPort, count.data); dag.addStream("counts", count.counts, fileWriter.input, cons.input); }
/** * Create the DAG */ @Override public void populateDAG(DAG dag, Configuration conf) { InputGenerator randomInputGenerator = dag.addOperator("rand", new InputGenerator()); FaithfulRScript rScriptOp = dag.addOperator("rScriptOp", new FaithfulRScript("com/datatorrent/examples/r/oldfaithful/eruptionModel.R", "eruptionModel", "retVal")); ConsoleOutputOperator consoles = dag.addOperator("consoles", new ConsoleOutputOperator()); Map<String, FaithfulRScript.REXP_TYPE> argTypeMap = new HashMap<String, FaithfulRScript.REXP_TYPE>(); argTypeMap.put("ELAPSEDTIME", FaithfulRScript.REXP_TYPE.REXP_INT); argTypeMap.put("ERUPTIONS", FaithfulRScript.REXP_TYPE.REXP_ARRAY_DOUBLE); argTypeMap.put("WAITING", FaithfulRScript.REXP_TYPE.REXP_ARRAY_INT); rScriptOp.setArgTypeMap(argTypeMap); dag.addStream("ingen_faithfulRscript", randomInputGenerator.outputPort, rScriptOp.faithfulInput).setLocality(locality); dag.addStream("ingen_faithfulRscript_eT", randomInputGenerator.elapsedTime, rScriptOp.inputElapsedTime).setLocality(locality); dag.addStream("faithfulRscript_console_s", rScriptOp.strOutput, consoles.input).setLocality(locality); } }
@Override public void populateDAG(DAG dag, Configuration conf) { TwitterSampleInput twitterFeed = new TwitterSampleInput(); twitterFeed = dag.addOperator("TweetSampler", twitterFeed); TwitterStatusWordExtractor wordExtractor = dag.addOperator("WordExtractor", TwitterStatusWordExtractor.class); UniqueCounter<String> uniqueCounter = dag.addOperator("UniqueWordCounter", new UniqueCounter<String>()); WindowedTopCounter<String> topCounts = dag.addOperator("TopCounter", new WindowedTopCounter<String>()); topCounts.setSlidingWindowWidth(120); topCounts.setDagWindowWidth(1); dag.addStream("TweetStream", twitterFeed.text, wordExtractor.input); dag.addStream("TwittedWords", wordExtractor.output, uniqueCounter.data); dag.addStream("UniqueWordCounts", uniqueCounter.count, topCounts.input).setLocality(Locality.CONTAINER_LOCAL); TwitterTopCounterApplication.consoleOutput(dag, "topWords", topCounts.output, SNAPSHOT_SCHEMA, "word"); } }
@Override public void populateDAG(DAG dag, Configuration config) { KafkaSinglePortInputOperator in = dag.addOperator("kafkaIn", new KafkaSinglePortInputOperator()); TwitterStreamOperator tweeterStream = dag.addOperator("tweeterStream", TwitterStreamOperator.class); HdfsSinkOperator hdfsSinkOperator = dag.addOperator("hdfsSinkOperator", HdfsSinkOperator.class); hdfsSinkOperator.setFileName(config.get(DiPConfiguration.HDFS_FILE_NAME)); HBaseSinkOperator hBaseSinkOperator = dag.addOperator("hBaseSinkOperator",new HBaseSinkOperator(config)); hBaseSinkOperator.getStore().setTableName(config.get(DiPConfiguration.HBASE_TABLE_NAME)); hBaseSinkOperator.getStore().setZookeeperQuorum(config.get(DiPConfiguration.ZK_HOST)); hBaseSinkOperator.getStore().setZookeeperClientPort(config.getInt(DiPConfiguration.ZK_PORT, 2181)); dag.addStream("kafkaStream", in.outputPort, tweeterStream.inputPort).setLocality(Locality.CONTAINER_LOCAL); dag.addStream("hdfsStream", tweeterStream.hdfsOutputPort, hdfsSinkOperator.input); dag.addStream("hbaseStream", tweeterStream.hBaseOutputPort, hBaseSinkOperator.input); }
@Override public void populateDAG(DAG dag, Configuration conf) { pojoDataGenerator = dag.addOperator("Input", new POJOGenerator()); TransformOperator transform = dag.addOperator("Process", new TransformOperator()); // Set expression map Map<String, String> expMap = new HashMap<>(); expMap.put("name", "{$.firstName}.concat(\" \").concat({$.lastName})"); expMap.put("age", "(new java.util.Date()).getYear() - {$.dateOfBirth}.getYear()"); expMap.put("address", "{$.address}.toLowerCase()"); transform.setExpressionMap(expMap); FunctionOperator.MapFunctionOperator<Object, ?> output = dag.addOperator("out", new FunctionOperator.MapFunctionOperator<>(outputFn)); dag.addStream("InputToTransform", pojoDataGenerator.output, transform.input); dag.addStream("TransformToOutput", transform.output, output.input); dag.setInputPortAttribute(transform.input, Context.PortContext.TUPLE_CLASS, CustomerEvent.class); dag.setOutputPortAttribute(transform.output, Context.PortContext.TUPLE_CLASS, CustomerInfo.class); setPartitioner(dag,conf,transform); }
@Override public void populateDAG(DAG dag, Configuration conf) { KafkaSinglePortInputOperator in = dag.addOperator("kafkaIn", new KafkaSinglePortInputOperator()); in.setInitialOffset(AbstractKafkaInputOperator.InitialOffset.EARLIEST.name()); LineOutputOperator out = dag.addOperator("fileOut", new LineOutputOperator()); dag.addStream("data", in.outputPort, out.input); } }
@Override public void populateDAG(DAG dag, Configuration conf) { KafkaSinglePortStringInputOperator kafkaInput = dag.addOperator("kafkaInput", new KafkaSinglePortStringInputOperator()); kafkaInput.setWindowDataManager(new FSWindowDataManager()); UniqueCounterFlat count = dag.addOperator("count", new UniqueCounterFlat()); CountStoreOperator store = dag.addOperator("store", new CountStoreOperator()); store.setStore(new JdbcTransactionalStore()); ConsoleOutputOperator cons = dag.addOperator("console", new ConsoleOutputOperator()); dag.addStream("words", kafkaInput.outputPort, count.data); dag.addStream("counts", count.counts, store.input, cons.input); }