/** {@inheritDoc} */ @Override public void put(T tuple) { count++; process(tuple); }
@Override public Sink getSink() { return input.getSink(); }
@Override public StreamCodec<byte[]> getStreamCodec() { if (ValidationToFile.this.streamCodec == null) { return super.getStreamCodec(); } else { return streamCodec; } } };
@Test public void testPopulateFieldInfo() { TestOutputOperator outputOperator = setupForOutputOperatorTest(); outputOperator.setup(context); outputOperator.input.setup(tpc); outputOperator.activate(context); List<TestPojo> events = Lists.newArrayList(); for (int i = 0; i < 3; i++) { Set<Integer> set = new HashSet<Integer>(); set.add(i); List<Integer> list = new ArrayList<Integer>(); list.add(i); Map<String, Integer> map = new HashMap<String, Integer>(); map.put("key" + i, i); events.add(new TestPojo(UUID.randomUUID(), i, "abclast", true, i, 2.0, set, list, map, new Date(System.currentTimeMillis()))); } outputOperator.beginWindow(0); for (TestPojo event : events) { outputOperator.input.process(event); } outputOperator.endWindow(); Assert.assertEquals("rows in db", 3, outputOperator.getNumOfEventsInStore()); outputOperator.getEventsInStore(); }
@Override public void setup(Context.PortContext context) { input.setup(context); }
@Override public void setConnected(boolean connected) { input.setConnected(connected); }
outputOperator.beginWindow(wid); innerObj.setIntVal(wid + 1); outputOperator.input.put(innerObj);
@Test public void testWriting() throws Exception { List<SimpleOrder> orderList = new ArrayList<>(); orderList.add(new SimpleOrder(1, 11, 100.25, "customerOne")); orderList.add(new SimpleOrder(2, 22, 200.25, "customerTwo")); orderList.add(new SimpleOrder(3, 33, 300.25, "customerThree")); avroWriter.setSchemaString(AVRO_SCHEMA); avroWriter.data.setup(testMeta.portContext); avroWriter.setup(testMeta.context); avroWriter.beginWindow(0); ListIterator<SimpleOrder> itr = orderList.listIterator(); while (itr.hasNext()) { avroWriter.data.process(itr.next()); } avroWriter.endWindow(); Assert.assertEquals("Number of tuples", 3, outputSink.collectedTuples.size()); avroWriter.teardown(); }
@Before public void setupApexContexts() throws Exception { Attribute.AttributeMap.DefaultAttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap(); attributeMap.put(DAG.APPLICATION_ID, APP_ID); contextForUserUpsertOperator = mockOperatorContext(OPERATOR_ID_FOR_USER_UPSERTS, attributeMap); userUpsertOperator = new UserUpsertOperator(); Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap(); portAttributes.put(Context.PortContext.TUPLE_CLASS, User.class); testPortContextForUserUpserts = new TestPortContext(portAttributes); userUpsertOperator.setup(contextForUserUpsertOperator); userUpsertOperator.activate(contextForUserUpsertOperator); userUpsertOperator.input.setup(testPortContextForUserUpserts); }
@Override public void process(Object t) { AbstractJdbcPOJOOutputOperator.super.input.process(t); }
@Test public void testWriteFailure() throws Exception { List<Order> orderList = new ArrayList<>(); orderList.add(new Order(11)); orderList.add(new Order(22)); orderList.add(new Order(33)); avroWriter.setSchemaString(AVRO_SCHEMA); avroWriter.setup(testMeta.context); avroWriter.data.setup(testMeta.portContext); avroWriter.beginWindow(0); ListIterator<Order> itr = orderList.listIterator(); while (itr.hasNext()) { avroWriter.data.process(itr.next()); } Assert.assertEquals("Field write failures", 12, avroWriter.fieldErrorCount); Assert.assertEquals("Record write failures", 3, avroWriter.errorCount); avroWriter.endWindow(); Assert.assertEquals("Number of tuples", 0, outputSink.collectedTuples.size()); avroWriter.teardown(); }
inputPort.getSink().put(toKafka.get(0)); inputPort.getSink().put(toKafka.get(1)); inputPort.getSink().put(toKafka.get(2)); kafkaOutput.endWindow(); kafkaOutput.beginWindow(2); inputPort.getSink().put(toKafka.get(3)); inputPort.getSink().put(toKafka.get(4)); inputPort.getSink().put(toKafka.get(5)); kafkaOutput.endWindow(); kafkaOutput.beginWindow(3); inputPort.getSink().put(toKafka.get(6)); inputPort.getSink().put(toKafka.get(7)); inputPort.getSink().put(toKafka.get(3)); inputPort.getSink().put(toKafka.get(4)); inputPort.getSink().put(toKafka.get(5)); kafkaOutput.endWindow(); kafkaOutput.beginWindow(3); inputPort.getSink().put(toKafka.get(6)); inputPort.getSink().put(toKafka.get(7)); inputPort.getSink().put(toKafka.get(8)); inputPort.getSink().put(toKafka.get(9)); kafkaOutput.endWindow(); kafkaOutput.beginWindow(4); inputPort.getSink().put(toKafka.get(10)); inputPort.getSink().put(toKafka.get(11)); kafkaOutput.endWindow();
@Before public void setupApexContexts() throws Exception { Attribute.AttributeMap.DefaultAttributeMap attributeMapForCounters = new Attribute.AttributeMap.DefaultAttributeMap(); attributeMapForCounters.put(DAG.APPLICATION_ID, APP_ID); contextForCountersOperator = mockOperatorContext(OPERATOR_ID_FOR_COUNTER_COLUMNS, attributeMapForCounters); Attribute.AttributeMap.DefaultAttributeMap portAttributesForCounters = new Attribute.AttributeMap.DefaultAttributeMap(); portAttributesForCounters.put(Context.PortContext.TUPLE_CLASS, CounterColumnTableEntry.class); testPortContextForCounters = new TestPortContext(portAttributesForCounters); counterUpdatesOperator = new CounterColumnUpdatesOperator(); counterUpdatesOperator.setup(contextForCountersOperator); counterUpdatesOperator.activate(contextForCountersOperator); counterUpdatesOperator.input.setup(testPortContextForCounters); }
@Override public StreamCodec<INPUT> getStreamCodec() { if (AbstractFileOutputOperator.this.streamCodec == null) { return super.getStreamCodec(); } else { return streamCodec; } } };
@Override public void process(Object tuple) { CassandraPOJOOutputOperator.super.input.process(tuple); }
CollectorTestSink indexSink = new CollectorTestSink(); Sink inSink = oper.data.getSink(); oper.index.setSink(indexSink);
@Before public void setupApexContexts() throws Exception { Attribute.AttributeMap.DefaultAttributeMap attributeMapForCompositePrimaryKey = new Attribute.AttributeMap.DefaultAttributeMap(); attributeMapForCompositePrimaryKey.put(DAG.APPLICATION_ID, APP_ID); contextForCompositePrimaryKeysOperator = mockOperatorContext(OPERATOR_ID_FOR_COMPOSITE_PRIMARY_KEYS, attributeMapForCompositePrimaryKey); Attribute.AttributeMap.DefaultAttributeMap portAttributesForCompositePrimaryKeys = new Attribute.AttributeMap.DefaultAttributeMap(); portAttributesForCompositePrimaryKeys.put(Context.PortContext.TUPLE_CLASS, CompositePrimaryKeyRow.class); testPortContextForCompositePrimaryKeys = new TestPortContext(portAttributesForCompositePrimaryKeys); compositePrimaryKeysOperator = new CompositePrimaryKeyUpdateOperator(); compositePrimaryKeysOperator.setup(contextForCompositePrimaryKeysOperator); compositePrimaryKeysOperator.activate(contextForCompositePrimaryKeysOperator); compositePrimaryKeysOperator.input.setup(testPortContextForCompositePrimaryKeys); }
@Override public StreamCodec<Map<K, V>> getStreamCodec() { StreamCodec<Map<K, V>> streamCodec = AbstractBaseNOperatorMap.this.getStreamCodec(); if (streamCodec == null) { return super.getStreamCodec(); } else { return streamCodec; } }
@Override public void process(Object tuple) { CassandraPOJOOutputOperator.super.input.process(tuple); }