@Override public void init(Context context) { super.init(context); Preconditions.checkNotNull(this.context, "Must specify context for local tables."); kvStore = this.context.getTaskContext().getStore(tableId); Preconditions.checkNotNull(kvStore, String.format( "Backing store for table %s was not injected by SamzaContainer", tableId)); logger.info("Initialized backing store for table " + tableId); }
StreamTableJoinOperatorImpl(StreamTableJoinOperatorSpec<K, M, R, JM> joinOpSpec, Context context) { this.joinOpSpec = joinOpSpec; this.table = context.getTaskContext().getTable(joinOpSpec.getTableId()); }
@Override public void init(Context context) { this.tagToRateLimiterMap = Collections.unmodifiableMap(tagToTargetRateMap.entrySet().stream() .map(e -> { String tag = e.getKey(); JobModel jobModel = ((TaskContextImpl) context.getTaskContext()).getJobModel(); int numTasks = jobModel.getContainers().values().stream() .mapToInt(cm -> cm.getTasks().size()) .sum(); int effectiveRate = e.getValue() / numTasks; TaskName taskName = context.getTaskContext().getTaskModel().getTaskName(); LOGGER.info(String.format("Effective rate limit for task %s and tag %s is %d", taskName, tag, effectiveRate)); return new ImmutablePair<>(tag, com.google.common.util.concurrent.RateLimiter.create(effectiveRate)); }) .collect(Collectors.toMap(ImmutablePair::getKey, ImmutablePair::getValue)) ); initialized = true; }
@Before public void setup() { Map<String, String> configMap = new HashMap<>(); configMap.put("job.default.system", "kafka"); configMap.put("job.name", "jobName"); configMap.put("job.id", "jobId"); this.config = new MapConfig(configMap); this.context = new MockContext(); when(this.context.getJobContext().getConfig()).thenReturn(this.config); Serde storeKeySerde = new TimeSeriesKeySerde(new IntegerSerde()); Serde storeValSerde = KVSerde.of(new IntegerSerde(), new IntegerSerde()); TaskModel taskModel = mock(TaskModel.class); when(taskModel.getSystemStreamPartitions()).thenReturn(ImmutableSet .of(new SystemStreamPartition("kafka", "integTestExecutionPlannerers", new Partition(0)))); when(taskModel.getTaskName()).thenReturn(new TaskName("task 1")); when(this.context.getTaskContext().getTaskModel()).thenReturn(taskModel); when(this.context.getTaskContext().getTaskMetricsRegistry()).thenReturn(new MetricsRegistryMap()); when(this.context.getContainerContext().getContainerMetricsRegistry()).thenReturn(new MetricsRegistryMap()); when(this.context.getTaskContext().getStore("jobName-jobId-window-w1")) .thenReturn(new TestInMemoryStore<>(storeKeySerde, storeValSerde)); }
TaskModel taskModel = mock(TaskModel.class); when(taskModel.getTaskName()).thenReturn(mockTaskName); when(this.context.getTaskContext().getTaskModel()).thenReturn(taskModel); when(this.context.getTaskContext().getStore(eq("jobName-jobId-join-j1-L"))).thenReturn(mockLeftStore); KeyValueStore mockRightStore = mock(KeyValueStore.class); when(this.context.getTaskContext().getStore(eq("jobName-jobId-join-j1-R"))).thenReturn(mockRightStore); OperatorImplGraph opImplGraph = new OperatorImplGraph(graphSpec.getOperatorSpecGraph(), this.context, mock(Clock.class));
@Before public void setup() { this.context = new MockContext(); when(this.context.getTaskContext().getTaskMetricsRegistry()).thenReturn(new MetricsRegistryMap()); when(this.context.getTaskContext().getTaskModel()).thenReturn(mock(TaskModel.class)); when(this.context.getContainerContext().getContainerMetricsRegistry()).thenReturn(new MetricsRegistryMap()); }
/** * Constructor based on container context * * @param context {@link Context} for this task * @param table underlying table * @param tableId table Id */ public TableMetricsUtil(Context context, Table table, String tableId) { Preconditions.checkNotNull(context); Preconditions.checkNotNull(table); Preconditions.checkNotNull(tableId); this.metricsRegistry = context.getTaskContext().getTaskMetricsRegistry(); this.groupName = table.getClass().getSimpleName(); this.tableId = tableId; }
private StreamOperatorTask createStreamOperatorTask(Clock clock, StreamApplicationDescriptorImpl graphSpec) throws Exception { Map<String, String> mapConfig = new HashMap<>(); mapConfig.put("job.name", "jobName"); mapConfig.put("job.id", "jobId"); StreamTestUtils.addStreamConfigs(mapConfig, "inStream", "insystem", "instream"); StreamTestUtils.addStreamConfigs(mapConfig, "inStream2", "insystem", "instream2"); Context context = new MockContext(new MapConfig(mapConfig)); TaskModel taskModel = mock(TaskModel.class); when(taskModel.getSystemStreamPartitions()).thenReturn(ImmutableSet .of(new SystemStreamPartition("insystem", "instream", new Partition(0)), new SystemStreamPartition("insystem", "instream2", new Partition(0)))); when(context.getTaskContext().getTaskModel()).thenReturn(taskModel); when(context.getTaskContext().getTaskMetricsRegistry()).thenReturn(new MetricsRegistryMap()); when(context.getContainerContext().getContainerMetricsRegistry()).thenReturn(new MetricsRegistryMap()); // need to return different stores for left and right side IntegerSerde integerSerde = new IntegerSerde(); TimestampedValueSerde timestampedValueSerde = new TimestampedValueSerde(new KVSerde(integerSerde, integerSerde)); when(context.getTaskContext().getStore(eq("jobName-jobId-join-j1-L"))) .thenReturn(new TestInMemoryStore(integerSerde, timestampedValueSerde)); when(context.getTaskContext().getStore(eq("jobName-jobId-join-j1-R"))) .thenReturn(new TestInMemoryStore(integerSerde, timestampedValueSerde)); StreamOperatorTask sot = new StreamOperatorTask(graphSpec.getOperatorSpecGraph(), clock); sot.init(context); return sot; }
@Before public void setup() { this.context = new MockContext(); // individual tests can override this config if necessary when(this.context.getJobContext().getConfig()).thenReturn(mock(Config.class)); TaskModel taskModel = mock(TaskModel.class); when(taskModel.getTaskName()).thenReturn(new TaskName("task 0")); when(this.context.getTaskContext().getTaskModel()).thenReturn(taskModel); when(this.context.getTaskContext().getTaskMetricsRegistry()).thenReturn(new MetricsRegistryMap()); when(this.context.getContainerContext().getContainerMetricsRegistry()).thenReturn(new MetricsRegistryMap()); }
/** * Constructor based on container context * * @param context {@link Context} for this task * @param table underlying table * @param tableId table Id */ public TableMetricsUtil(Context context, Table table, String tableId) { Preconditions.checkNotNull(context); Preconditions.checkNotNull(table); Preconditions.checkNotNull(tableId); this.metricsRegistry = context.getTaskContext().getTaskMetricsRegistry(); this.groupName = table.getClass().getSimpleName(); this.tableId = tableId; }
@Override public void init(Context context) { String rightStoreName = joinOpSpec.getRightOpId(); rightStreamState = (KeyValueStore<Object, TimestampedValue<Object>>) context.getTaskContext().getStore(rightStoreName); // user-defined joinFn should only be initialized once, // so we do it only in left partial join function and not here again. }
@Override public void init(Context context) { this.tagToRateLimiterMap = Collections.unmodifiableMap(tagToTargetRateMap.entrySet().stream() .map(e -> { String tag = e.getKey(); JobModel jobModel = ((TaskContextImpl) context.getTaskContext()).getJobModel(); int numTasks = jobModel.getContainers().values().stream() .mapToInt(cm -> cm.getTasks().size()) .sum(); int effectiveRate = e.getValue() / numTasks; TaskName taskName = context.getTaskContext().getTaskModel().getTaskName(); LOGGER.info(String.format("Effective rate limit for task %s and tag %s is %d", taskName, tag, effectiveRate)); return new ImmutablePair<>(tag, com.google.common.util.concurrent.RateLimiter.create(effectiveRate)); }) .collect(Collectors.toMap(ImmutablePair::getKey, ImmutablePair::getValue)) ); initialized = true; }
SendToTableOperatorImpl(SendToTableOperatorSpec<K, V> sendToTableOpSpec, Context context) { this.sendToTableOpSpec = sendToTableOpSpec; this.table = context.getTaskContext().getTable(sendToTableOpSpec.getTableId()); }
/** * Constructor based on container context * * @param context {@link Context} for this task * @param table underlying table * @param tableId table Id */ public TableMetricsUtil(Context context, Table table, String tableId) { Preconditions.checkNotNull(context); Preconditions.checkNotNull(table); Preconditions.checkNotNull(tableId); this.metricsRegistry = context.getTaskContext().getTaskMetricsRegistry(); this.groupName = table.getClass().getSimpleName(); this.tableId = tableId; }
@Override public void init(Context context) { String rightStoreName = joinOpSpec.getRightOpId(); rightStreamState = (KeyValueStore<Object, TimestampedValue<Object>>) context.getTaskContext().getStore(rightStoreName); // user-defined joinFn should only be initialized once, // so we do it only in left partial join function and not here again. }
@Override public void init(Context context) { this.tagToRateLimiterMap = Collections.unmodifiableMap(tagToTargetRateMap.entrySet().stream() .map(e -> { String tag = e.getKey(); JobModel jobModel = ((TaskContextImpl) context.getTaskContext()).getJobModel(); int numTasks = jobModel.getContainers().values().stream() .mapToInt(cm -> cm.getTasks().size()) .sum(); int effectiveRate = e.getValue() / numTasks; TaskName taskName = context.getTaskContext().getTaskModel().getTaskName(); LOGGER.info(String.format("Effective rate limit for task %s and tag %s is %d", taskName, tag, effectiveRate)); return new ImmutablePair<>(tag, com.google.common.util.concurrent.RateLimiter.create(effectiveRate)); }) .collect(Collectors.toMap(ImmutablePair::getKey, ImmutablePair::getValue)) ); initialized = true; }
SendToTableOperatorImpl(SendToTableOperatorSpec<K, V> sendToTableOpSpec, Context context) { this.sendToTableOpSpec = sendToTableOpSpec; this.table = (ReadWriteTable) context.getTaskContext().getTable(sendToTableOpSpec.getTableSpec().getId()); }
/** * Constructor based on container context * * @param context {@link Context} for this task * @param table underlying table * @param tableId table Id */ public TableMetricsUtil(Context context, Table table, String tableId) { Preconditions.checkNotNull(context); Preconditions.checkNotNull(table); Preconditions.checkNotNull(tableId); this.metricsRegistry = context.getTaskContext().getTaskMetricsRegistry(); this.groupName = table.getClass().getSimpleName(); this.tableId = tableId; }
@Override public void init(Context context) { String rightStoreName = joinOpSpec.getRightOpId(); rightStreamState = (KeyValueStore<Object, TimestampedValue<Object>>) context.getTaskContext().getStore(rightStoreName); // user-defined joinFn should only be initialized once, // so we do it only in left partial join function and not here again. }
@Override public void init(Context context) { this.tagToRateLimiterMap = Collections.unmodifiableMap(tagToTargetRateMap.entrySet().stream() .map(e -> { String tag = e.getKey(); JobModel jobModel = ((TaskContextImpl) context.getTaskContext()).getJobModel(); int numTasks = jobModel.getContainers().values().stream() .mapToInt(cm -> cm.getTasks().size()) .sum(); int effectiveRate = e.getValue() / numTasks; TaskName taskName = context.getTaskContext().getTaskModel().getTaskName(); LOGGER.info(String.format("Effective rate limit for task %s and tag %s is %d", taskName, tag, effectiveRate)); return new ImmutablePair<>(tag, com.google.common.util.concurrent.RateLimiter.create(effectiveRate)); }) .collect(Collectors.toMap(ImmutablePair::getKey, ImmutablePair::getValue)) ); initialized = true; }