@Override protected void createMetrics() { super.createMetrics(); }
private long getRemovedSpecs() { return StreamingKafkaSpecConsumer.this._jobMonitor.getRemovedSpecs() != null? StreamingKafkaSpecConsumer.this._jobMonitor.getRemovedSpecs().getCount() : 0; }
@Override protected void createMetrics() { super.createMetrics(); this.messageParseFailures = this.getMetricContext().meter( RuntimeMetrics.GOBBLIN_JOB_MONITOR_KAFKA_MESSAGE_PARSE_FAILURES); }
MockedKafkaJobMonitor monitor = MockedKafkaJobMonitor.create(config); monitor.startAsync(); monitor.getMockKafkaStream().pushToStream("job1:1"); monitor.awaitExactlyNSpecs(1); Assert.assertTrue(monitor.getJobSpecs().containsKey(new URI("job1"))); Assert.assertEquals(monitor.getJobSpecs().get(new URI("job1")).getVersion(), "1"); monitor.getMockKafkaStream().pushToStream("job2:1"); monitor.awaitExactlyNSpecs(2); Assert.assertTrue(monitor.getJobSpecs().containsKey(new URI("job2"))); Assert.assertEquals(monitor.getJobSpecs().get(new URI("job2")).getVersion(), "1"); monitor.getMockKafkaStream().pushToStream(MockedKafkaJobMonitor.REMOVE + ":job1"); monitor.awaitExactlyNSpecs(1); Assert.assertFalse(monitor.getJobSpecs().containsKey(new URI("job1"))); Assert.assertTrue(monitor.getJobSpecs().containsKey(new URI("job2"))); monitor.getMockKafkaStream().pushToStream("job2:2,job1:2"); monitor.awaitExactlyNSpecs(2); Assert.assertTrue(monitor.getJobSpecs().containsKey(new URI("job1"))); Assert.assertEquals(monitor.getJobSpecs().get(new URI("job1")).getVersion(), "2"); Assert.assertTrue(monitor.getJobSpecs().containsKey(new URI("job2"))); Assert.assertEquals(monitor.getJobSpecs().get(new URI("job2")).getVersion(), "2"); monitor.shutDown();
@Override protected void createMetrics() { super.createMetrics(); this.rejectedEvents = getMetricContext().counter(RuntimeMetrics.GOBBLIN_JOB_MONITOR_SLAEVENT_REJECTEDEVENTS); }
@Override public JobSpecMonitor forJobCatalog(GobblinInstanceDriver instanceDriver, MutableJobCatalog jobCatalog) throws IOException { Config config = instanceDriver.getSysConfig().getConfig().getConfig(CONFIG_PREFIX).withFallback(DEFAULTS); return forConfig(config, jobCatalog); }
@Override public JobSpecMonitor forJobCatalog(GobblinInstanceDriver instanceDriver, MutableJobCatalog jobCatalog) throws IOException { Config config = instanceDriver.getSysConfig().getConfig().getConfig(CONFIG_PREFIX).withFallback(DEFAULTS); return forConfig(config, jobCatalog); }
private long getNewSpecs() { return StreamingKafkaSpecConsumer.this._jobMonitor.getNewSpecs() != null? StreamingKafkaSpecConsumer.this._jobMonitor.getNewSpecs().getCount() : 0; }
private long getMessageParseFailures() { return StreamingKafkaSpecConsumer.this._jobMonitor.getMessageParseFailures() != null? StreamingKafkaSpecConsumer.this._jobMonitor.getMessageParseFailures().getCount():0; } }
private MockedKafkaJobMonitor(Config config, Map<URI, JobSpec> jobSpecs) { super("topic", createMockCatalog(jobSpecs), config); this.jobSpecs = jobSpecs; this.mockKafkaStream = new MockKafkaStream(1); }
@Override protected void shutDown() { _jobMonitor.stopAsync().awaitTerminated(); }
@Override protected void createMetrics() { super.createMetrics(); this.newSpecs = this.getMetricContext().counter(RuntimeMetrics.GOBBLIN_JOB_MONITOR_KAFKA_NEW_SPECS); this.removedSpecs = this.getMetricContext().counter(RuntimeMetrics.GOBBLIN_JOB_MONITOR_KAFKA_REMOVED_SPECS); }
@Override protected List<Tag<?>> getTagsForMetrics() { List<Tag<?>> tags = super.getTagsForMetrics(); tags.add(new Tag<>(RuntimeMetrics.SCHEMA, this.schema.getName())); return tags; }
@Override protected void startUp() { // listener will add job specs to a blocking queue to send to callers of changedSpecs() // IMPORTANT: This addListener should be invoked after job catalog has been initialized. This is guaranteed because // StreamingKafkaSpecConsumer is boot after jobCatalog in GobblinClusterManager::startAppLauncherAndServices() _jobCatalog.addListener(new JobSpecListener()); _jobMonitor.startAsync().awaitRunning(); }
@Override protected List<KafkaStream<byte[], byte[]>> createStreams() { return this.mockKafkaStream.getMockStreams(); }
public static MockedKafkaJobMonitor create(Config config) { return new MockedKafkaJobMonitor(config, Maps.<URI, JobSpec>newConcurrentMap()); }
@Override public void shutDown() { this.mockKafkaStream.shutdown(); super.shutDown(); } }
public MockKafkaStream(int numStreams) { this.queues = Lists.newArrayList(); this.mockStreams = Lists.newArrayList(); this.offsets = Lists.newArrayList(); for (int i = 0; i < numStreams; i++) { BlockingQueue<FetchedDataChunk> queue = Queues.newLinkedBlockingQueue(); this.queues.add(queue); this.mockStreams.add(createMockStream(queue)); this.offsets.add(new AtomicLong(0)); } this.nextStream = new AtomicLong(-1); }
@Override protected List<KafkaStream<byte[], byte[]>> createStreams() { return this.mockKafkaStream.getMockStreams(); }