Map<String, Map<String, LoadAwareCustomStreamGrouping>> ret = new HashMap<>(); Map<String, Map<String, Grouping>> outputGroupings = workerTopologyContext.getTargets(componentId); for (Map.Entry<String, Map<String, Grouping>> entry : outputGroupings.entrySet()) { String streamId = entry.getKey(); Map<String, Grouping> componentGrouping = entry.getValue(); Fields outFields = workerTopologyContext.getComponentOutputFields(componentId, streamId); Map<String, LoadAwareCustomStreamGrouping> componentGrouper = new HashMap<String, LoadAwareCustomStreamGrouping>(); for (Map.Entry<String, Grouping> cg : componentGrouping.entrySet()) { String component = cg.getKey(); Grouping grouping = cg.getValue(); List<Integer> outTasks = workerTopologyContext.getComponentTasks(component); LoadAwareCustomStreamGrouping grouper = GrouperFactory.mkGrouper( workerTopologyContext, componentId, streamId, outFields, grouping, outTasks, topoConf); for (String stream : workerTopologyContext.getComponentCommon(componentId).get_streams().keySet()) { if (!ret.containsKey(stream)) { ret.put(stream, null);
@Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) { random = new Random(); sourceNodeInfo = new NodeInfo(context.getThisWorkerHost(), Sets.newHashSet((long) context.getThisWorkerPort())); taskToNodePort = context.getTaskToNodePort(); this.targetTasks = targetTasks; capacity = targetTasks.size() == 1 ? 1 : Math.max(1000, targetTasks.size() * 5); conf = context.getConf(); dnsToSwitchMapping = ReflectionUtils.newInstance((String) conf.get(Config.STORM_NETWORK_TOPOGRAPHY_PLUGIN)); localityGroup = new HashMap<>(); currentScope = Scope.WORKER_LOCAL; higherBound = ObjectReader.getDouble(conf.get(Config.TOPOLOGY_LOCALITYAWARE_HIGHER_BOUND)); lowerBound = ObjectReader.getDouble(conf.get(Config.TOPOLOGY_LOCALITYAWARE_LOWER_BOUND)); rets = (List<Integer>[]) new List<?>[targetTasks.size()]; int i = 0; for (int target : targetTasks) { rets[i] = Arrays.asList(target); orig.put(target, new IndexAndWeights(i)); i++; } // can't leave choices to be empty, so initiate it similar as ShuffleGrouping choices = new int[capacity]; current = new AtomicInteger(0); // allocate another array to be switched prepareChoices = new int[capacity]; updateRing(null); }
public TaskMetrics(WorkerTopologyContext context, String componentId, Integer taskid) { this.topologyId = context.getStormId(); this.componentId = componentId; this.taskId = taskid; this.workerPort = context.getThisWorkerPort(); }
@Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) { this.targetTasks = targetTasks; if (this.fields != null) { this.outFields = context.getComponentOutputFields(stream); } }
@Override public void report(Throwable error) { LOG.error("Error", error); if (Time.deltaSecs(intervalStartTime.get()) > errorIntervalSecs) { intervalErrors.set(0); intervalStartTime.set(Time.currentTimeSecs()); } if (intervalErrors.incrementAndGet() <= maxPerInterval) { try { stormClusterState.reportError(stormId, componentId, Utils.hostname(), workerTopologyContext.getThisWorkerPort().longValue(), error); } catch (UnknownHostException e) { throw Utils.wrapInRuntime(e); } } } }
@Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> tasks) { List<Integer> sourceTasks = new ArrayList<>(context.getComponentTasks(stream.get_componentId())); Collections.sort(sourceTasks); if (sourceTasks.size() != tasks.size()) { throw new RuntimeException("Can only do an identity grouping when source and target have same number of tasks"); } tasks = new ArrayList<>(tasks); Collections.sort(tasks); for (int i = 0; i < sourceTasks.size(); i++) { int s = sourceTasks.get(i); int t = tasks.get(i); _precomputed.put(s, Arrays.asList(t)); } }
/** * {@inheritDoc} */ @Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) { this.targetTasks = targetTasks; this.partitionKeyIndexes = new ArrayList<>(); Fields componentOutputFields = context.getComponentOutputFields(stream); for (String partitionKeyName : partitionKeyNames) { partitionKeyIndexes.add(componentOutputFields.fieldIndex(partitionKeyName)); } }
hostname, workerTopologyContext.getThisWorkerPort(), componentId, taskId, Time.currentTimeSecs(), interval); List<IMetricsConsumer.DataPoint> dataPoints = new ArrayList<>();
@Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> tasks) { List<Integer> sourceTasks = new ArrayList<>(context.getComponentTasks(stream.get_componentId())); Collections.sort(sourceTasks); if(sourceTasks.size()!=tasks.size()) { throw new RuntimeException("Can only do an identity grouping when source and target have same number of tasks"); } tasks = new ArrayList<>(tasks); Collections.sort(tasks); for(int i=0; i<sourceTasks.size(); i++) { int s = sourceTasks.get(i); int t = tasks.get(i); _precomputed.put(s, Arrays.asList(t)); } }
private WorkerTopologyContext mockContext(List<Integer> availableTaskIds) { Map<String, Object> conf = new HashMap<>(); conf.put(Config.STORM_NETWORK_TOPOGRAPHY_PLUGIN, "org.apache.storm.networktopography.DefaultRackDNSToSwitchMapping"); conf.put(Config.TOPOLOGY_LOCALITYAWARE_HIGHER_BOUND, 0.8); conf.put(Config.TOPOLOGY_LOCALITYAWARE_LOWER_BOUND, 0.2); WorkerTopologyContext context = mock(WorkerTopologyContext.class); when(context.getConf()).thenReturn(conf); Map<Integer, NodeInfo> taskNodeToPort = new HashMap<>(); NodeInfo nodeInfo = new NodeInfo("node-id", Sets.newHashSet(6700L)); availableTaskIds.forEach(e -> taskNodeToPort.put(e, nodeInfo)); when(context.getTaskToNodePort()).thenReturn(new AtomicReference<>(taskNodeToPort)); when(context.getThisWorkerHost()).thenReturn("node-id"); when(context.getThisWorkerPort()).thenReturn(6700); return context; }
public static Meter meter(String name, WorkerTopologyContext context, String componentId, Integer taskId, String streamId) { String metricName = metricName(name, context.getStormId(), componentId, streamId, taskId, context.getThisWorkerPort()); return REGISTRY.meter(metricName); }
@Test public void testChooseTasksFields() { PartialKeyGrouping pkg = new PartialKeyGrouping(new Fields("test")); WorkerTopologyContext context = mock(WorkerTopologyContext.class); when(context.getComponentOutputFields(any(GlobalStreamId.class))).thenReturn(new Fields("test")); pkg.prepare(context, mock(GlobalStreamId.class), Lists.newArrayList(0, 1, 2, 3, 4, 5)); Values message = new Values("key1"); List<Integer> choice1 = pkg.chooseTasks(0, message); assertThat(choice1.size(), is(1)); List<Integer> choice2 = pkg.chooseTasks(0, message); assertThat(choice2, is(not(choice1))); List<Integer> choice3 = pkg.chooseTasks(0, message); assertThat(choice3, is(not(choice2))); assertThat(choice3, is(choice1)); } }
when(context.getThisWorkerPort()).thenReturn(port.intValue());
public static Counter counter(String name, WorkerTopologyContext context, String componentId, Integer taskId, String streamId) { String metricName = metricName(name, context.getStormId(), componentId, streamId, taskId, context.getThisWorkerPort()); return REGISTRY.counter(metricName); }
@Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) { this.targetTasks = targetTasks; targetTaskStats = new long[this.targetTasks.size()]; if (this.fields != null) { this.outFields = context.getComponentOutputFields(stream); } }
public TaskMetrics(WorkerTopologyContext context, String componentId, Integer taskid){ this.topologyId = context.getStormId(); this.componentId = componentId; this.taskId = taskid; this.workerPort = context.getThisWorkerPort(); }
/** * {@inheritDoc} */ @Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) { this.targetTasks = targetTasks; this.partitionKeyIndexes = new ArrayList<>(); Fields componentOutputFields = context.getComponentOutputFields(stream); for (String partitionKeyName : partitionKeyNames) { partitionKeyIndexes.add(componentOutputFields.fieldIndex(partitionKeyName)); } }
public static Counter counter(String name, WorkerTopologyContext context, String componentId, Integer taskId, String streamId){ String metricName = metricName(name, context.getStormId(), componentId, streamId,taskId, context.getThisWorkerPort()); return REGISTRY.counter(metricName); }
public static Meter meter(String name, WorkerTopologyContext context, String componentId, Integer taskId, String streamId){ String metricName = metricName(name, context.getStormId(), componentId, streamId,taskId, context.getThisWorkerPort()); return REGISTRY.meter(metricName); }