/** * Build as {@link GobblinTrackingEvent} */ public GobblinTrackingEvent build() { return new GobblinTrackingEvent(0L, namespace, name, metadata); } /**
/** * Build as {@link GobblinTrackingEvent} */ public GobblinTrackingEvent build() { if (rootCause != null) { metadata.put(ROOT_CAUSE, ExceptionUtils.getStackTrace(rootCause)); } return new GobblinTrackingEvent(0L, namespace, name, metadata); }
/** * Submits the {@link org.apache.gobblin.metrics.GobblinTrackingEvent} to the {@link org.apache.gobblin.metrics.MetricContext}. * @param name Name of the event. * @param additionalMetadata Additional metadata to be added to the event. */ public void submit(String name, Map<String, String> additionalMetadata) { if(this.metricContext.isPresent()) { Map<String, String> finalMetadata = Maps.newHashMap(this.metadata); if(!additionalMetadata.isEmpty()) { finalMetadata.putAll(additionalMetadata); } // Timestamp is set by metric context. this.metricContext.get().submitEvent(new GobblinTrackingEvent(0l, this.namespace, name, finalMetadata)); } }
@Override public GobblinTrackingEvent build() { Map<String, String> dataMap = Maps.newHashMap(metadata); dataMap.put(SOURCE, Descriptor.toJson(source)); dataMap.put(DESTINATION, Descriptor.toJson(destination)); return new GobblinTrackingEvent(0L, namespace, name, dataMap); }
private GobblinTrackingEvent createSLAEvent(String name, URI urn, Map<String, String> additionalMetadata) { Map<String, String> metadata = Maps.newHashMap(); metadata.put(SlaEventKeys.DATASET_URN_KEY, urn.toString()); metadata.putAll(additionalMetadata); return new GobblinTrackingEvent(0L, "namespace", name, metadata); }
/** * Extract the next metric from the Kafka iterator * Assumes existence of the metric has already been checked. * @param it Kafka ConsumerIterator * @return next metric in the stream * @throws IOException */ protected GobblinTrackingEvent nextEvent(Iterator<byte[]> it) throws IOException { Assert.assertTrue(it.hasNext()); return EventUtils.deserializeReportFromJson(new GobblinTrackingEvent(), it.next()); } }
private Pair<String, GobblinTrackingEvent> nextKVEvent(Iterator<Pair<String, byte[]>> it) throws IOException { Assert.assertTrue(it.hasNext()); Pair<String, byte[]> event = it.next(); return Pair.of(event.getKey(), EventUtils.deserializeReportFromAvroSerialization(new GobblinTrackingEvent(), event.getValue())); }
@Override @SuppressWarnings("unchecked") protected GobblinTrackingEvent nextEvent(Iterator<byte[]> it) throws IOException { Assert.assertTrue(it.hasNext()); return EventUtils.deserializeReportFromAvroSerialization(new GobblinTrackingEvent(), it.next()); } }
@Override public R2ResponseStatus handleResponse(Request<RestRequest> request, RestResponse response) { R2ResponseStatus status = new R2ResponseStatus(StatusType.OK); int statusCode = response.getStatus(); status.setStatusCode(statusCode); HttpUtils.updateStatusType(status, statusCode, errorCodeWhitelist); if (status.getType() == StatusType.OK) { status.setContent(response.getEntity()); status.setContentType(response.getHeader(CONTENT_TYPE_HEADER)); } else { log.info("Receive an unsuccessful response with status code: " + statusCode); Map<String, String> metadata = Maps.newHashMap(); metadata.put(HttpConstants.STATUS_CODE, String.valueOf(statusCode)); metadata.put(HttpConstants.REQUEST, request.toString()); if (status.getType() != StatusType.CONTINUE) { FailureEventBuilder failureEvent = new FailureEventBuilder(R2_FAILED_REQUEST_EVENT); failureEvent.addAdditionalMetadata(metadata); failureEvent.submit(metricsContext); } else { GobblinTrackingEvent event = new GobblinTrackingEvent(0L, R2_RESPONSE_EVENT_NAMESPACE, R2_FAILED_REQUEST_EVENT, metadata); metricsContext.submitEvent(event); } } return status; } }
final String eventNamespace = "testNamespace"; GobblinTrackingEvent event = new GobblinTrackingEvent(0L, eventNamespace, eventName, Maps.newHashMap());
@Test public void testSimple() throws Exception { TestKafkaAvroJobMonitor monitor = new TestKafkaAvroJobMonitor(GobblinTrackingEvent.SCHEMA$, new NoopSchemaVersionWriter()); monitor.buildMetricsContextAndMetrics(); AvroSerializer<GobblinTrackingEvent> serializer = new AvroBinarySerializer<>(GobblinTrackingEvent.SCHEMA$, new NoopSchemaVersionWriter()); GobblinTrackingEvent event = new GobblinTrackingEvent(0L, "namespace", "event", Maps.<String, String>newHashMap()); Collection<Either<JobSpec, URI>> results = monitor.parseJobSpec(serializer.serializeRecord(event)); Assert.assertEquals(results.size(), 1); Assert.assertEquals(monitor.events.size(), 1); Assert.assertEquals(monitor.events.get(0), event); monitor.shutdownMetrics(); }
@Test public void testUsingSchemaVersion() throws Exception { TestKafkaAvroJobMonitor monitor = new TestKafkaAvroJobMonitor(GobblinTrackingEvent.SCHEMA$, new FixedSchemaVersionWriter()); monitor.buildMetricsContextAndMetrics(); AvroSerializer<GobblinTrackingEvent> serializer = new AvroBinarySerializer<>(GobblinTrackingEvent.SCHEMA$, new FixedSchemaVersionWriter()); GobblinTrackingEvent event = new GobblinTrackingEvent(0L, "namespace", "event", Maps.<String, String>newHashMap()); Collection<Either<JobSpec, URI>> results = monitor.parseJobSpec(serializer.serializeRecord(event)); Assert.assertEquals(results.size(), 1); Assert.assertEquals(monitor.events.size(), 1); Assert.assertEquals(monitor.events.get(0), event); monitor.shutdownMetrics(); }
.withSchemaRegistry(registry).build("localhost:0000", "topic"); GobblinTrackingEvent event = new GobblinTrackingEvent(0l, "namespace", "name", Maps.<String, String>newHashMap());
@Test public void testWrongSchemaVersionWriter() throws Exception { TestKafkaAvroJobMonitor monitor = new TestKafkaAvroJobMonitor(GobblinTrackingEvent.SCHEMA$, new NoopSchemaVersionWriter()); monitor.buildMetricsContextAndMetrics(); AvroSerializer<GobblinTrackingEvent> serializer = new AvroBinarySerializer<>(GobblinTrackingEvent.SCHEMA$, new FixedSchemaVersionWriter()); GobblinTrackingEvent event = new GobblinTrackingEvent(0L, "namespace", "event", Maps.<String, String>newHashMap()); Collection<Either<JobSpec, URI>> results = monitor.parseJobSpec(serializer.serializeRecord(event)); Assert.assertEquals(results.size(), 0); Assert.assertEquals(monitor.events.size(), 0); Assert.assertEquals(monitor.getMessageParseFailures().getCount(), 1); monitor.shutdownMetrics(); }
@Override public GobblinTrackingEvent build() { try { GobblinTrackingEvent record = new GobblinTrackingEvent(); record.timestamp = fieldSetFlags()[0] ? this.timestamp : (java.lang.Long) defaultValue(fields()[0]); record.namespace = fieldSetFlags()[1] ? this.namespace : (java.lang.String) defaultValue(fields()[1]); record.name = fieldSetFlags()[2] ? this.name : (java.lang.String) defaultValue(fields()[2]); record.metadata = fieldSetFlags()[3] ? this.metadata : (java.util.Map<java.lang.String,java.lang.String>) defaultValue(fields()[3]); return record; } catch (Exception e) { throw new org.apache.avro.AvroRuntimeException(e); } } }
String eventName = "testEvent"; GobblinTrackingEvent event = new GobblinTrackingEvent(); event.setName(eventName); event.setNamespace(namespace);
String eventName = "testEvent"; GobblinTrackingEvent event = new GobblinTrackingEvent(); event.setName(eventName); event.setNamespace(namespace); Assert.assertNull(retrievedEvent.getKey()); event = new GobblinTrackingEvent(); event.setName(eventName); event.setNamespace(namespace);
String eventName = "testEvent"; GobblinTrackingEvent event = new GobblinTrackingEvent(); event.setName(eventName); event.setNamespace(namespace);
/** * Build as {@link GobblinTrackingEvent} */ public GobblinTrackingEvent build() { return new GobblinTrackingEvent(0L, namespace, name, metadata); } /**
/** * Build as {@link GobblinTrackingEvent} */ public GobblinTrackingEvent build() { if (rootCause != null) { metadata.put(ROOT_CAUSE, ExceptionUtils.getStackTrace(rootCause)); } return new GobblinTrackingEvent(0L, namespace, name, metadata); }