public static NormalizedResourceRequest getSpoutResources(StormTopology topology, Map<String, Object> topologyConf, String componentId) { if (topology.get_spouts() != null) { SpoutSpec spout = topology.get_spouts().get(componentId); return new NormalizedResourceRequest(spout.get_common(), topologyConf, componentId); } return null; }
@Test public void testSpoutToBolt() throws Exception { Stream<Tuple> stream = streamBuilder.newStream(newSpout(Utils.DEFAULT_STREAM_ID)); stream.to(newBolt()); StormTopology topology = streamBuilder.build(); assertEquals(1, topology.get_spouts_size()); assertEquals(1, topology.get_bolts_size()); String spoutId = topology.get_spouts().keySet().iterator().next(); Map<GlobalStreamId, Grouping> expected = new HashMap<>(); expected.put(new GlobalStreamId(spoutId, "default"), Grouping.shuffle(new NullStruct())); assertEquals(expected, topology.get_bolts().values().iterator().next().get_common().get_inputs()); }
public static Map<String, Map<String, Object>> lag(StormTopology stormTopology, Map<String, Object> topologyConf) { Map<String, Map<String, Object>> result = new HashMap<>(); Map<String, SpoutSpec> spouts = stormTopology.get_spouts(); for (Map.Entry<String, SpoutSpec> spout: spouts.entrySet()) { try { SpoutSpec spoutSpec = spout.getValue(); addLagResultForKafkaSpout(result, spout.getKey(), spoutSpec); } catch (Exception e) { logger.warn("Exception thrown while getting lag for spout id: " + spout.getKey()); logger.warn("Exception message:" + e.getMessage(), e); } } return result; }
@Test public void testBranch() throws Exception { Stream<Tuple> stream = streamBuilder.newStream(newSpout(Utils.DEFAULT_STREAM_ID)); Stream<Tuple>[] streams = stream.branch(x -> true); StormTopology topology = streamBuilder.build(); assertEquals(1, topology.get_spouts_size()); assertEquals(1, topology.get_bolts_size()); Map<GlobalStreamId, Grouping> expected = new HashMap<>(); String spoutId = topology.get_spouts().keySet().iterator().next(); expected.put(new GlobalStreamId(spoutId, "default"), Grouping.shuffle(new NullStruct())); assertEquals(expected, topology.get_bolts().values().iterator().next().get_common().get_inputs()); assertEquals(1, streams.length); assertEquals(1, streams[0].node.getOutputStreams().size()); String parentStream = streams[0].node.getOutputStreams().iterator().next() + "-branch"; assertEquals(1, streams[0].node.getParents(parentStream).size()); Node processorNdoe = streams[0].node.getParents(parentStream).iterator().next(); assertTrue(processorNdoe instanceof ProcessorNode); assertTrue(((ProcessorNode) processorNdoe).getProcessor() instanceof BranchProcessor); assertTrue(processorNdoe.getParents("default").iterator().next() instanceof SpoutNode); }
public static Set<String> getComponentIds(StormTopology topology) { Set<String> ret = new HashSet<>(); ret.addAll(topology.get_bolts().keySet()); ret.addAll(topology.get_spouts().keySet()); ret.addAll(topology.get_state_spouts().keySet()); return ret; }
@Test public void testRepartition() throws Exception { Stream<String> stream = streamBuilder.newStream(newSpout(Utils.DEFAULT_STREAM_ID), new ValueMapper<>(0)); stream.repartition(3).filter(x -> true).repartition(2).filter(x -> true).aggregate(new Count<>()); StormTopology topology = streamBuilder.build(); assertEquals(1, topology.get_spouts_size()); SpoutSpec spout = topology.get_spouts().get("spout1"); assertEquals(4, topology.get_bolts_size()); Bolt bolt1 = topology.get_bolts().get("bolt1"); Bolt bolt2 = topology.get_bolts().get("bolt2"); Bolt bolt3 = topology.get_bolts().get("bolt3"); Bolt bolt4 = topology.get_bolts().get("bolt4"); assertEquals(1, spout.get_common().get_parallelism_hint()); assertEquals(1, bolt1.get_common().get_parallelism_hint()); assertEquals(3, bolt2.get_common().get_parallelism_hint()); assertEquals(2, bolt3.get_common().get_parallelism_hint()); assertEquals(2, bolt4.get_common().get_parallelism_hint()); }
public static Map<String, Object> allComponents(StormTopology topology) { Map<String, Object> components = new HashMap<>(topology.get_bolts()); components.putAll(topology.get_spouts()); components.putAll(topology.get_state_spouts()); return components; }
public static Map<GlobalStreamId, Grouping> eventLoggerInputs(StormTopology topology) { Map<GlobalStreamId, Grouping> inputs = new HashMap<GlobalStreamId, Grouping>(); Set<String> allIds = new HashSet<String>(); allIds.addAll(topology.get_bolts().keySet()); allIds.addAll(topology.get_spouts().keySet()); for (String id : allIds) { inputs.put(Utils.getGlobalStreamId(id, EVENTLOGGER_STREAM_ID), Thrift.prepareFieldsGrouping(Arrays.asList("component-id"))); } return inputs; }
private static String getExecutorType(WorkerTopologyContext workerTopologyContext, String componentId) { StormTopology topology = workerTopologyContext.getRawTopology(); Map<String, SpoutSpec> spouts = topology.get_spouts(); Map<String, Bolt> bolts = topology.get_bolts(); if (spouts.containsKey(componentId)) { return ClientStatsUtil.SPOUT; } else if (bolts.containsKey(componentId)) { return ClientStatsUtil.BOLT; } else { throw new RuntimeException("Could not find " + componentId + " in " + topology); } }
public static Map<String, NormalizedResourceRequest> getSpoutsResources(StormTopology topology, Map<String, Object> topologyConf) { Map<String, NormalizedResourceRequest> spoutResources = new HashMap<>(); if (topology.get_spouts() != null) { for (Map.Entry<String, SpoutSpec> spout : topology.get_spouts().entrySet()) { NormalizedResourceRequest topologyResources = new NormalizedResourceRequest(spout.getValue().get_common(), topologyConf, spout.getKey()); if (LOG.isTraceEnabled()) { LOG.trace("Turned {} into {}", spout.getValue().get_common().get_json_conf(), topologyResources); } spoutResources.put(spout.getKey(), topologyResources); } } return spoutResources; }
static Map<String, Map<String, Double>> getSpoutsResources(StormTopology topology, Map<String, Object> topologyConf) { Map<String, Map<String, Double>> spoutResources = new HashMap<>(); if (topology.get_spouts() != null) { for (Map.Entry<String, SpoutSpec> spout : topology.get_spouts().entrySet()) { Map<String, Double> topologyResources = parseResources(spout.getValue().get_common().get_json_conf()); checkInitialization(topologyResources, spout.getValue().toString(), topologyConf); spoutResources.put(spout.getKey(), topologyResources); } } return spoutResources; }
public IdDictionary(StormTopology topology) { List<String> componentNames = new ArrayList<>(topology.get_spouts().keySet()); componentNames.addAll(topology.get_bolts().keySet()); componentNames.addAll(topology.get_state_spouts().keySet()); for (String name : componentNames) { ComponentCommon common = Utils.getComponentCommon(topology, name); List<String> streams = new ArrayList<>(common.get_streams().keySet()); streamNametoId.put(name, idify(streams)); streamIdToName.put(name, simpleReverseMap(streamNametoId.get(name))); } }
public static ComponentCommon getComponentCommon(StormTopology topology, String id) { if (topology.get_spouts().containsKey(id)) { return topology.get_spouts().get(id).get_common(); } if (topology.get_bolts().containsKey(id)) { return topology.get_bolts().get(id).get_common(); } if (topology.get_state_spouts().containsKey(id)) { return topology.get_state_spouts().get(id).get_common(); } throw new IllegalArgumentException("Could not find component with id " + id); }
private static void validateIds(StormTopology topology) throws InvalidTopologyException { List<String> componentIds = new ArrayList<>(); componentIds.addAll(validateIds(topology.get_bolts())); componentIds.addAll(validateIds(topology.get_spouts())); componentIds.addAll(validateIds(topology.get_state_spouts())); List<String> offending = Utils.getRepeat(componentIds); if (!offending.isEmpty()) { throw new WrappedInvalidTopologyException("Duplicate component ids: " + offending); } }
public static Map<GlobalStreamId, Grouping> ackerInputs(StormTopology topology) { Map<GlobalStreamId, Grouping> inputs = new HashMap<>(); Set<String> boltIds = topology.get_bolts().keySet(); Set<String> spoutIds = topology.get_spouts().keySet(); for (String id : spoutIds) { inputs.put(Utils.getGlobalStreamId(id, Acker.ACKER_INIT_STREAM_ID), Thrift.prepareFieldsGrouping(Arrays.asList("id"))); } for (String id : boltIds) { inputs.put(Utils.getGlobalStreamId(id, Acker.ACKER_ACK_STREAM_ID), Thrift.prepareFieldsGrouping(Arrays.asList("id"))); inputs.put(Utils.getGlobalStreamId(id, Acker.ACKER_FAIL_STREAM_ID), Thrift.prepareFieldsGrouping(Arrays.asList("id"))); inputs.put(Utils.getGlobalStreamId(id, Acker.ACKER_RESET_TIMEOUT_STREAM_ID), Thrift.prepareFieldsGrouping(Arrays.asList("id"))); } return inputs; }
public static ComponentCommon getComponentCommon(StormTopology topology, String componentId) { Bolt b = topology.get_bolts().get(componentId); if (b != null) { return b.get_common(); } SpoutSpec s = topology.get_spouts().get(componentId); if (s != null) { return s.get_common(); } StateSpoutSpec ss = topology.get_state_spouts().get(componentId); if (ss != null) { return ss.get_common(); } throw new IllegalArgumentException("Could not find component common for " + componentId); } }
public int maxTopologyMessageTimeout() { Integer max = ObjectReader.getInt(_topoConf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)); for (String spout : getRawTopology().get_spouts().keySet()) { ComponentCommon common = getComponentCommon(spout); String jsonConf = common.get_json_conf(); if (jsonConf != null) { try { Map<String, Object> conf = (Map) JSONValue.parseWithException(jsonConf); Object comp = conf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS); max = Math.max(ObjectReader.getInt(comp, max), max); } catch (ParseException e) { throw new RuntimeException(e); } } } return max; }
public static Map<ExecutorDetails, String> genExecsAndComps(StormTopology topology, int spoutParallelism, int boltParallelism) { Map<ExecutorDetails, String> retMap = new HashMap<>(); int startTask = 0; int endTask = 1; for (Map.Entry<String, SpoutSpec> entry : topology.get_spouts().entrySet()) { for (int i = 0; i < spoutParallelism; i++) { retMap.put(new ExecutorDetails(startTask, endTask), entry.getKey()); startTask++; endTask++; } } for (Map.Entry<String, Bolt> entry : topology.get_bolts().entrySet()) { for (int i = 0; i < boltParallelism; i++) { retMap.put(new ExecutorDetails(startTask, endTask), entry.getKey()); startTask++; endTask++; } } return retMap; }
/** * Create a new topology to be tracked. * @param origTopo the original topology. * @param cluster a cluster that should have been launched with tracking enabled. */ public TrackedTopology(StormTopology origTopo, ILocalCluster cluster) { LOG.warn("CLUSTER {} - {}", cluster, cluster.getTrackedId()); this.cluster = cluster; lastSpoutCommit = new AtomicInteger(0); String id = cluster.getTrackedId(); topology = origTopo.deepCopy(); for (Bolt bolt : topology.get_bolts().values()) { IRichBolt obj = (IRichBolt) Thrift.deserializeComponentObject(bolt.get_bolt_object()); bolt.set_bolt_object(Thrift.serializeComponentObject(new BoltTracker(obj, id))); } for (SpoutSpec spout : topology.get_spouts().values()) { IRichSpout obj = (IRichSpout) Thrift.deserializeComponentObject(spout.get_spout_object()); spout.set_spout_object(Thrift.serializeComponentObject(new SpoutTracker(obj, id))); } }
@Test public void testGlobalAggregate() throws Exception { Stream<String> stream = streamBuilder.newStream(newSpout(Utils.DEFAULT_STREAM_ID), new ValueMapper<>(0), 2); stream.aggregate(new Count<>()); StormTopology topology = streamBuilder.build(); assertEquals(2, topology.get_bolts_size()); Bolt bolt1 = topology.get_bolts().get("bolt1"); Bolt bolt2 = topology.get_bolts().get("bolt2"); String spoutId = topology.get_spouts().keySet().iterator().next(); Map<GlobalStreamId, Grouping> expected1 = new HashMap<>(); expected1.put(new GlobalStreamId(spoutId, "default"), Grouping.shuffle(new NullStruct())); Map<GlobalStreamId, Grouping> expected2 = new HashMap<>(); expected2.put(new GlobalStreamId("bolt1", "s1"), Grouping.fields(Collections.emptyList())); expected2.put(new GlobalStreamId("bolt1", "s1__punctuation"), Grouping.all(new NullStruct())); assertEquals(expected1, bolt1.get_common().get_inputs()); assertEquals(expected2, bolt2.get_common().get_inputs()); }