public void flush() { logger.info("crawled {} running app metrics", appMetricEntities.size()); HadoopQueueMessageId messageId = new HadoopQueueMessageId(DataType.METRIC, DataSource.RUNNING_APPS, System.currentTimeMillis()); List<GenericMetricEntity> metrics = new ArrayList<>(appMetricEntities.values()); collector.emit(new ValuesArray(DataSource.RUNNING_APPS, DataType.METRIC, metrics), messageId); logger.info("crawled {} accepted apps", acceptedApps.size()); messageId = new HadoopQueueMessageId(DataType.ENTITY, DataSource.RUNNING_APPS, System.currentTimeMillis()); List<YarnAppAPIEntity> entities = new ArrayList<>(acceptedApps); collector.emit(new ValuesArray(DataSource.RUNNING_APPS, DataType.ENTITY, entities), messageId); acceptedApps.clear(); appMetricEntities.clear(); }
public ValuesArray(Object... vals) { super(vals.length); for (Object o : vals) { add(o); } } }
public void flush() { LOG.info("Flushing {} RunningQueue metrics in memory", metricEntities.size()); HadoopQueueMessageId messageId = new HadoopQueueMessageId(HadoopClusterConstants.DataType.METRIC, HadoopClusterConstants.DataSource.SCHEDULER, System.currentTimeMillis()); List<GenericMetricEntity> metrics = new ArrayList<>(metricEntities); collector.emit(new ValuesArray(DataSource.SCHEDULER, DataType.METRIC, metrics), messageId); LOG.info("Flushing {} RunningQueueEntities in memory", runningQueueAPIEntities.size()); messageId = new HadoopQueueMessageId(DataType.ENTITY, DataSource.SCHEDULER, System.currentTimeMillis()); List<TaggedLogAPIEntity> entities = new ArrayList<>(runningQueueAPIEntities); collector.emit(new ValuesArray(DataSource.SCHEDULER, DataType.ENTITY, entities), messageId); runningQueueAPIEntities.clear(); metricEntities.clear(); }
public void flush() { HadoopQueueMessageId messageId = new HadoopQueueMessageId(DataType.METRIC, DataSource.CLUSTER_METRIC, System.currentTimeMillis()); List<GenericMetricEntity> metrics = new ArrayList<>(clusterMetricEntities.values()); this.collector.emit(new ValuesArray(DataSource.CLUSTER_METRIC, DataType.METRIC, metrics), messageId); reset(); }
@Override public void flush(TaskAttemptExecutionAPIEntity entity) { Map<String, Object> fields = new HashMap<>(entity.getTags()); fields.put("startTime", entity.getStartTime()); fields.put("endTime", entity.getEndTime()); fields.put("taskStatus", entity.getTaskStatus()); if (!fields.containsKey(MRJobTagName.ERROR_CATEGORY.toString())) { fields.put("errorCategory", ""); } collector.collect(stormStreamId, new ValuesArray(fields.get(MRJobTagName.TASK_ATTEMPT_ID.toString()), fields)); } }
@Override public void flush(JobExecutionAPIEntity entity) { Map<String, Object> fields = new HashMap<>(entity.getTags()); fields.put("submissionTime", entity.getSubmissionTime()); fields.put("startTime", entity.getStartTime()); fields.put("endTime", entity.getEndTime()); fields.put("currentState", entity.getCurrentState()); fields.put("trackingUrl", entity.getTrackingUrl()); collector.collect(stormStreamId, new ValuesArray(fields.get(MRJobTagName.JOB_ID.toString()), fields)); } }
collector.emit(new ValuesArray(appInfo.getUser(), mrJob.getId(), Constants.ResourceType.JOB_CONFIGURATION, hiveQueryLog), mrJob.getId());
@Override public void flush(JobRpcAnalysisAPIEntity entity) { Map<String, Object> fields = new HashMap<>(entity.getTags()); fields.put("trackingUrl", entity.getTrackingUrl()); fields.put("totalOpsPerSecond", entity.getTotalOpsPerSecond()); fields.put("mapOpsPerSecond", entity.getMapOpsPerSecond()); fields.put("reduceOpsPerSecond", entity.getReduceOpsPerSecond()); fields.put("avgOpsPerTask", entity.getAvgOpsPerTask()); fields.put("avgOpsPerMap", entity.getAvgOpsPerMap()); fields.put("avgOpsPerReduce", entity.getAvgOpsPerReduce()); fields.put("currentState", entity.getCurrentState()); fields.put("numTotalMaps", entity.getNumTotalMaps()); fields.put("numTotalReduces", entity.getNumTotalReduces()); fields.put("duration", entity.getDuration()); fields.put("avgMapTime", entity.getAvgMapTime()); fields.put("avgReduceTime", entity.getAvgReduceTime()); collector.collect(stormStreamId, new ValuesArray(fields.get(MRJobTagName.JOB_ID.toString()), fields)); }
collector.emit(new ValuesArray(appInfo.getUser(), mrJob.getId(), Constants.ResourceType.JOB_CONFIGURATION, hiveQueryLog), mrJob.getId());