private static List<BulletRecord> sendRawRecordTuplesTo(IRichBolt bolt, String id, int n, int batchSize) { List<BulletRecord> sent = new ArrayList<>(); for (int i = 0; i < n; i += batchSize) { BulletRecord[] batch = new BulletRecord[batchSize]; for (int j = 0; j < batchSize; ++j) { batch[j] = RecordBox.get().add("field", String.valueOf(i + j)).getRecord(); } Tuple tuple = TupleUtils.makeIDTuple(TupleClassifier.Type.DATA_TUPLE, id, getListBytes(batch)); bolt.execute(tuple); sent.addAll(asList(batch)); } return sent; }
@Test public void testProjection() { Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", makeProjectionQuery(Pair.of("field", "id"), Pair.of("map_field.id", "mid")), METADATA); bolt.execute(query); BulletRecord record = RecordBox.get().add("field", "b235gf23b").add("timestamp", 92L) .addMap("map_field", Pair.of("id", "123"), Pair.of("bar", "foo")) .getRecord(); Tuple matching = makeRecordTuple(record); bolt.execute(matching); BulletRecord expectedRecord = RecordBox.get().add("id", "b235gf23b").add("mid", "123").getRecord(); Tuple expected = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", expectedRecord); Assert.assertTrue(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, 1, expected)); }
private static byte[] getGroupDataWithCount(String countField, int count) { GroupData groupData = new GroupData(new HashSet<>(singletonList(new GroupOperation(COUNT, null, countField)))); IntStream.range(0, count).forEach(i -> groupData.consume(RecordBox.get().getRecord())); return SerializerDeserializer.toBytes(groupData); }
@Test public void testProjectionAndFiltering() { Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", makeProjectionFilterQuery("map_field.id", singletonList("123"), EQUALS, Pair.of("field", "id"), Pair.of("map_field.id", "mid")), METADATA); bolt.execute(query); BulletRecord record = RecordBox.get().add("field", "b235gf23b").add("timestamp", 92L) .addMap("map_field", Pair.of("id", "123"), Pair.of("bar", "foo")) .getRecord(); Tuple matching = makeRecordTuple(record); bolt.execute(matching); BulletRecord expectedRecord = RecordBox.get().add("id", "b235gf23b").add("mid", "123").getRecord(); Tuple expected = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", expectedRecord); Assert.assertTrue(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, 1, expected)); }
private static List<BulletRecord> sendSlidingWindowWithRawRecordTuplesTo(IRichBolt bolt, String id, int n) { BulletRecord[] sent = new BulletRecord[n]; for (int i = 0; i < n; ++i) { sent[i] = RecordBox.get().add("field", String.valueOf(i)).getRecord(); } byte[] listBytes = getListBytes(sent); byte[] dataBytes = SerializerDeserializer.toBytes(new SlidingRecord.Data(sent.length, listBytes)); Tuple tuple = TupleUtils.makeIDTuple(TupleClassifier.Type.DATA_TUPLE, id, dataBytes); bolt.execute(tuple); return asList(sent); }
@Test public void testFilteringUsingProjectedName() { Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", makeProjectionFilterQuery("mid", singletonList("123"), EQUALS, Pair.of("field", "id"), Pair.of("map_field.id", "mid")), METADATA); bolt.execute(query); BulletRecord record = RecordBox.get().add("field", "b235gf23b").add("timestamp", 92L) .addMap("map_field", Pair.of("id", "123"), Pair.of("bar", "foo")) .getRecord(); Tuple matching = makeRecordTuple(record); bolt.execute(matching); BulletRecord expectedRecord = RecordBox.get().add("id", "b235gf23b").add("mid", "123").getRecord(); Tuple expected = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", expectedRecord); Assert.assertFalse(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, expected)); }
@Test public void testQueryErrorsAreSilentlyIgnored() { Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", "{'aggregation': { 'type': null }}"); bolt.execute(query); BulletRecord record = RecordBox.get().add("field", "b235gf23b").getRecord(); Tuple someTuple = makeRecordTuple(record); bolt.execute(someTuple); bolt.execute(someTuple); Assert.assertEquals(collector.getEmittedCount(), 0); }
@Test public void testProjectionNotLosingFilterColumn() { Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", makeProjectionFilterQuery("timestamp", singletonList("92"), EQUALS, Pair.of("field", "id"), Pair.of("map_field.id", "mid")), METADATA); bolt.execute(query); BulletRecord record = RecordBox.get().add("field", "b235gf23b").add("timestamp", 92L) .addMap("map_field", Pair.of("id", "123"), Pair.of("bar", "foo")) .getRecord(); Tuple matching = makeRecordTuple(record); bolt.execute(matching); BulletRecord expectedRecord = RecordBox.get().add("id", "b235gf23b").add("mid", "123").getRecord(); Tuple expected = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", expectedRecord); Assert.assertTrue(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, expected)); }
bolt.execute(query); IntStream.range(0, 8).mapToObj(i -> RecordBox.get().add("A", i).getRecord()) .map(FilterBoltTest::makeRecordTuple) .forEach(bolt::execute); IntStream.range(0, 6).mapToObj(i -> RecordBox.get().add("A", 0).getRecord()) .map(FilterBoltTest::makeRecordTuple) .forEach(bolt::execute); IntStream.range(0, 2).mapToObj(i -> RecordBox.get().add("A", 3).getRecord()) .map(FilterBoltTest::makeRecordTuple) .forEach(bolt::execute); Assert.assertEquals(records.size(), 2); BulletRecord expectedA = RecordBox.get().add("A", "0").add("foo", "null").add("cnt", 7L).getRecord(); BulletRecord expectedB = RecordBox.get().add("A", "3").add("foo", "null").add("cnt", 3L).getRecord();
BulletRecord recordA = RecordBox.get().add("field", "abc") .add("experience", "tv") .add("mid", 11) .getRecord(); BulletRecord recordB = RecordBox.get().addMap("demographic_map", Pair.of("age", "67")).getRecord(); bolt.execute(matching); BulletRecord expectedRecord = RecordBox.get().add("field", "abc").add("experience", "tv") .add("mid", 11).getRecord(); BulletRecord notExpectedRecord = RecordBox.get().addMap("demographic_map", Pair.of("age", "67")).getRecord();
@Test public void testBadJson() { Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", "'filters' : [], ", METADATA); bolt.execute(query); BulletRecord record = RecordBox.get().add("field", "b235gf23b").getRecord(); Tuple matching = makeRecordTuple(record); bolt.execute(matching); Tuple expected = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", record); Assert.assertTrue(collector.wasAcked(query)); Assert.assertTrue(collector.wasAcked(matching)); Assert.assertFalse(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, expected)); }
bolt.execute(query); IntStream.range(0, 101).mapToObj(i -> RecordBox.get().add("field", i).getRecord()) .map(FilterBoltTest::makeRecordTuple) .forEach(bolt::execute); BulletRecord expectedA = RecordBox.get().add(RANGE_FIELD, NEGATIVE_INFINITY_START + SEPARATOR + 0.0 + END_EXCLUSIVE) .add(COUNT_FIELD, 0.0) .add(PROBABILITY_FIELD, 0.0).getRecord(); BulletRecord expectedB = RecordBox.get().add(RANGE_FIELD, START_INCLUSIVE + 0.0 + SEPARATOR + 50.0 + END_EXCLUSIVE) .add(COUNT_FIELD, 50.0) .add(PROBABILITY_FIELD, 50.0 / 101).getRecord(); BulletRecord expectedC = RecordBox.get().add(RANGE_FIELD, START_INCLUSIVE + 50.0 + SEPARATOR + 100.0 + END_EXCLUSIVE) .add(COUNT_FIELD, 50.0) .add(PROBABILITY_FIELD, 50.0 / 101).getRecord(); BulletRecord expectedD = RecordBox.get().add(RANGE_FIELD, START_INCLUSIVE + 100.0 + SEPARATOR + POSITIVE_INFINITY_END) .add(COUNT_FIELD, 1.0) .add(PROBABILITY_FIELD, 1.0 / 101).getRecord(); Assert.assertEquals(records.get(0), expectedA); Assert.assertEquals(records.get(1), expectedB);
@Test public void testFiltering() { Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", makeFieldFilterQuery("b235gf23b"), METADATA); bolt.execute(query); BulletRecord record = RecordBox.get().add("field", "b235gf23b").getRecord(); Tuple matching = makeRecordTuple(record); bolt.execute(matching); BulletRecord anotherRecord = RecordBox.get().add("field", "wontmatch").getRecord(); Tuple nonMatching = makeRecordTuple(anotherRecord); bolt.execute(nonMatching); Tuple expected = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", record); Assert.assertTrue(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, 1, expected)); Tuple anotherExpected = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", anotherRecord); Assert.assertFalse(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, anotherExpected)); }
"field", 10, null); IntStream.range(0, 50).mapToObj(i -> RecordBox.get().add("field", i).getRecord()) .forEach(distribution::consume); "field", 10, null); IntStream.range(50, 101).mapToObj(i -> RecordBox.get().add("field", i).getRecord()) .forEach(distribution::consume); BulletRecord expectedA = RecordBox.get().add(RANGE_FIELD, NEGATIVE_INFINITY_START + SEPARATOR + 0.0 + END_EXCLUSIVE) .add(COUNT_FIELD, 0.0) .add(PROBABILITY_FIELD, 0.0).getRecord(); BulletRecord expectedB = RecordBox.get().add(RANGE_FIELD, START_INCLUSIVE + 0.0 + SEPARATOR + 50.0 + END_EXCLUSIVE) .add(COUNT_FIELD, 50.0) .add(PROBABILITY_FIELD, 50.0 / 101).getRecord(); BulletRecord expectedC = RecordBox.get().add(RANGE_FIELD, START_INCLUSIVE + 50.0 + SEPARATOR + 100.0 + END_EXCLUSIVE) .add(COUNT_FIELD, 50.0) .add(PROBABILITY_FIELD, 50.0 / 101).getRecord(); BulletRecord expectedD = RecordBox.get().add(RANGE_FIELD, START_INCLUSIVE + 100.0 + SEPARATOR + POSITIVE_INFINITY_END) .add(COUNT_FIELD, 1.0) .add(PROBABILITY_FIELD, 1.0 / 101).getRecord();
@Test public void testDifferentQueryMatchingDifferentTuple() { Tuple queryA = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", makeFieldFilterQuery("b235gf23b"), METADATA); Tuple queryB = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "43", makeFilterQuery("timestamp", asList("1", "2", "3", "45"), NOT_EQUALS), METADATA); bolt.execute(queryA); bolt.execute(queryB); BulletRecord recordA = RecordBox.get().add("field", "b235gf23b").add("timestamp", 45L).getRecord(); BulletRecord recordB = RecordBox.get().add("field", "b235gf23b").add("timestamp", 42L).getRecord(); Tuple matchingA = makeRecordTuple(recordA); Tuple matchingB = makeRecordTuple(recordB); bolt.execute(matchingA); bolt.execute(matchingB); Tuple expectedAA = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", recordA); Tuple expectedAB = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", recordB); Tuple expectedB = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "43", recordB); Assert.assertTrue(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, 1, expectedAA)); Assert.assertFalse(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, expectedAB)); Assert.assertTrue(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, 1, expectedB)); }
@Test public void testQueryNotDoneAndThenDone() { bolt = ComponentUtils.prepare(new DonableFilterBolt(), collector); Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", makeFieldFilterQuery("b235gf23b"), METADATA); bolt.execute(query); BulletRecord record = RecordBox.get().add("field", "b235gf23b").getRecord(); Tuple matching = makeRecordTuple(record); bolt.execute(matching); Tuple tick = TupleUtils.makeTuple(TupleClassifier.Type.TICK_TUPLE); bolt.execute(tick); bolt.execute(tick); Tuple expected = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", record); Assert.assertTrue(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, 1, expected)); BulletRecord anotherRecord = RecordBox.get().add("field", "b235gf23b").add("mid", "2342").getRecord(); Tuple anotherExpected = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", anotherRecord); Assert.assertFalse(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, anotherExpected)); }
@Test public void testFailQueryInitialization() { bolt = ComponentUtils.prepare(new NoQueryFilterBolt(), collector); Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", makeFieldFilterQuery("b235gf23b"), METADATA); bolt.execute(query); BulletRecord record = RecordBox.get().add("field", "b235gf23b").getRecord(); Tuple matching = makeRecordTuple(record); bolt.execute(matching); Tuple expected = makeDataTuple(TupleClassifier.Type.DATA_TUPLE, "42", record); Assert.assertFalse(wasRawRecordEmittedTo(TopologyConstants.DATA_STREAM, expected)); }
TopK topK = TopKTest.makeTopK(bulletConfig, makeAttributes(null, 5L), fields, 2, null); IntStream.range(0, 32).mapToObj(i -> RecordBox.get().add("A", i % 8).getRecord()).forEach(topK::consume); IntStream.range(0, 8).mapToObj(i -> RecordBox.get().add("A", i % 2).getRecord()).forEach(topK::consume); BulletRecord expectedA = RecordBox.get().add("A", "0").add("foo", "null").add("cnt", 8L).getRecord(); BulletRecord expectedB = RecordBox.get().add("A", "1").add("foo", "null").add("cnt", 8L).getRecord();
@Test public void testFilteringLatency() { config = new BulletStormConfig(); // Don't use the overridden aggregation default size but turn on built in metrics config.set(BulletStormConfig.TOPOLOGY_METRICS_BUILT_IN_ENABLE, true); collector = new CustomCollector(); CustomTopologyContext context = new CustomTopologyContext(); bolt = new FilterBolt(TopologyConstants.RECORD_COMPONENT, config); ComponentUtils.prepare(new HashMap<>(), bolt, context, collector); Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", makeFieldFilterQuery("bar"), METADATA); bolt.execute(query); BulletRecord record = RecordBox.get().add("field", "foo").getRecord(); long start = System.currentTimeMillis(); IntStream.range(0, 10).mapToObj(i -> makeRecordTuple(record, System.currentTimeMillis())) .forEach(bolt::execute); long end = System.currentTimeMillis(); double actualLatecy = context.getDoubleMetric(TopologyConstants.LATENCY_METRIC); Assert.assertTrue(actualLatecy <= end - start); }