public void updateSensorIndexingConfig(String sensorType, byte[] data) throws IOException { updateSensorIndexingConfig(sensorType, new ByteArrayInputStream(data)); }
@Override public void update(String name, byte[] data) throws IOException { getConfigurations().updateSensorIndexingConfig(name, data); }
public void updateSensorIndexingConfig(String sensorType, InputStream io) throws IOException { Map<String, Object> sensorIndexingConfig = JSONUtils.INSTANCE.load(io, JSONUtils.MAP_SUPPLIER); updateSensorIndexingConfig(sensorType, sensorIndexingConfig); }
public static void updateSensorIndexingConfigsFromZookeeper(IndexingConfigurations configurations, CuratorFramework client) throws Exception { updateConfigsFromZookeeper( configurations , INDEXING , sensorType -> configurations.updateSensorIndexingConfig(sensorType, readSensorIndexingConfigBytesFromZookeeper(sensorType, client)) , client ); }
private WriterConfiguration createConfig(String writer, String sensor, String json) throws Exception { IndexingConfigurations indexingConfig = new IndexingConfigurations(); indexingConfig.updateSensorIndexingConfig(sensor, json.getBytes()); return new IndexingWriterConfiguration(writer, indexingConfig); }
public static IndexingConfigurations getSampleIndexingConfigs() throws IOException { IndexingConfigurations configurations = new IndexingConfigurations(); configurations.updateGlobalConfig(ConfigurationsUtils.readGlobalConfigFromFile(TestConstants.SAMPLE_CONFIG_PATH)); Map<String, byte[]> sensorIndexingConfigs = ConfigurationsUtils.readSensorIndexingConfigsFromFile(TestConstants.SAMPLE_CONFIG_PATH); for(String sensorType: sensorIndexingConfigs.keySet()) { configurations.updateSensorIndexingConfig(sensorType, sensorIndexingConfigs.get(sensorType)); } return configurations; } }
bulkMessageWriterBolt.setCuratorFramework(client); bulkMessageWriterBolt.setZKCache(cache); bulkMessageWriterBolt.getConfigurations().updateSensorIndexingConfig(sensorType, new FileInputStream(sampleSensorIndexingConfigPath)); bulkMessageWriterBolt.declareOutputFields(declarer);
bulkMessageWriterBolt.setCuratorFramework(client); bulkMessageWriterBolt.setZKCache(cache); bulkMessageWriterBolt.getConfigurations().updateSensorIndexingConfig(sensorType, new FileInputStream(sampleSensorIndexingConfigPath)); bulkMessageWriterBolt.declareOutputFields(declarer);
@Test public void testSourceTypeMissing() throws Exception { // setup the bolt BulkMessageWriterBolt<IndexingConfigurations> bulkMessageWriterBolt = new BulkMessageWriterBolt<IndexingConfigurations>( "zookeeperUrl", "INDEXING") .withBulkMessageWriter(bulkMessageWriter) .withMessageGetter(MessageGetters.JSON_FROM_FIELD.name()) .withMessageGetterField("message"); bulkMessageWriterBolt.setCuratorFramework(client); bulkMessageWriterBolt.setZKCache(cache); bulkMessageWriterBolt.getConfigurations().updateSensorIndexingConfig(sensorType, new FileInputStream(sampleSensorIndexingConfigPath)); // initialize the bolt bulkMessageWriterBolt.declareOutputFields(declarer); Map stormConf = new HashMap(); bulkMessageWriterBolt.prepare(stormConf, topologyContext, outputCollector); // create a message with no source type JSONObject message = (JSONObject) new JSONParser().parse(sampleMessageString); message.remove("source.type"); when(tuple.getValueByField("message")).thenReturn(message); // the tuple should be handled as an error and ack'd bulkMessageWriterBolt.execute(tuple); verify(outputCollector, times(1)).emit(eq(Constants.ERROR_STREAM), any()); verify(outputCollector, times(1)).ack(tuple); }
bulkMessageWriterBolt.setCuratorFramework(client); bulkMessageWriterBolt.setZKCache(cache); bulkMessageWriterBolt.getConfigurations().updateSensorIndexingConfig(sensorType , new FileInputStream(sampleSensorIndexingConfigPath)); bulkMessageWriterBolt.declareOutputFields(declarer);
/** * If an invalid message is sent to indexing, the message should be handled as an error * and the topology should continue processing. */ @Test public void testMessageInvalid() throws Exception { FakeClock clock = new FakeClock(); // setup the bolt BulkMessageWriterBolt<IndexingConfigurations> bolt = new BulkMessageWriterBolt<IndexingConfigurations>( "zookeeperUrl", "INDEXING") .withBulkMessageWriter(bulkMessageWriter) .withMessageGetter(MessageGetters.JSON_FROM_POSITION.name()) .withMessageGetterField("message"); bolt.setCuratorFramework(client); bolt.setZKCache(cache); bolt.getConfigurations().updateSensorIndexingConfig(sensorType, new FileInputStream(sampleSensorIndexingConfigPath)); // initialize the bolt bolt.declareOutputFields(declarer); Map stormConf = new HashMap(); bolt.prepare(stormConf, topologyContext, outputCollector, clock); // execute a tuple that contains an invalid message byte[] invalidJSON = "this is not valid JSON".getBytes(); when(tuple.getBinary(0)).thenReturn(invalidJSON); bolt.execute(tuple); // the tuple should be handled as an error and ack'd verify(outputCollector, times(1)).emit(eq(Constants.ERROR_STREAM), any()); verify(outputCollector, times(1)).ack(tuple); }
@Test public void testGetAllConfiguredTimeouts() throws FileNotFoundException, IOException { //default IndexingWriterConfiguration config = new IndexingWriterConfiguration("hdfs", new IndexingConfigurations() ); Assert.assertEquals(0, config.getAllConfiguredTimeouts().size()); //non-default IndexingConfigurations iconfigs = new IndexingConfigurations(); iconfigs.updateSensorIndexingConfig( sensorType, new FileInputStream(sampleSensorIndexingConfigPath)); config = new IndexingWriterConfiguration("elasticsearch", iconfigs); Assert.assertEquals(1, config.getAllConfiguredTimeouts().size()); Assert.assertEquals(7, (long)config.getAllConfiguredTimeouts().get(0)); } @Test