@Override public String toString() { return this.dataset.datasetURN() + "@" + this.name; } }
public DatasetVerificationException (Dataset dataset, Throwable cause) { super ("Dataset:" + dataset.datasetURN() + " Exception:" + cause); this.dataset = dataset; this.cause = cause; } }
@Override public void run() { log.error ("Compaction job for " + dataset.datasetURN() + " is failed. Please take a look"); this.workingState = WorkUnitState.WorkingState.FAILED; }
@Override public void onSuccess(Void arg0) { DatasetCleaner.this.finishCleanSignal.get().countDown(); LOG.info("Successfully cleaned: " + dataset.datasetURN()); Instrumented.markMeter(DatasetCleaner.this.datasetsCleanSuccessMeter); }
private int findTier(Requestor<SimpleDatasetRequest> requestor) { Dataset dataset = ((SimpleDatasetRequestor) requestor).getDataset(); for (Map.Entry<Integer, Pattern> tier : tiersMap.entrySet()) { Pattern pattern = tier.getValue(); if (pattern.matcher(dataset.datasetURN()).find()) { return tier.getKey(); } } return Integer.MAX_VALUE; } }
private void submitEvent(String eventName) { Map<String, String> eventMetadataMap = ImmutableMap.of(CompactionSlaEventHelper.DATASET_URN, this.dataset.datasetURN()); this.eventSubmitter.submit(eventName, eventMetadataMap); }
@Override public void onFailure(Throwable throwable) { DatasetCleaner.this.finishCleanSignal.get().countDown(); LOG.warn("Exception caught when cleaning " + dataset.datasetURN() + ".", throwable); DatasetCleaner.this.throwables.add(throwable); Instrumented.markMeter(DatasetCleaner.this.datasetsCleanFailureMeter); DatasetCleaner.this.eventSubmitter.submit(RetentionEvents.CleanFailed.EVENT_NAME, ImmutableMap.of(RetentionEvents.CleanFailed.FAILURE_CONTEXT_METADATA_KEY, ExceptionUtils.getFullStackTrace(throwable), RetentionEvents.DATASET_URN_METADATA_KEY, dataset.datasetURN())); }
@Override public int compare(FileSet<CopyEntity> p1, FileSet<CopyEntity> p2) { int userProvidedCompare = this.userProvidedComparator.compare(p1, p2); if (userProvidedCompare == 0) { int datasetCompare = p1.getDataset().datasetURN().compareTo(p2.getDataset().datasetURN()); if (datasetCompare == 0) { return p1.getName().compareTo(p2.getName()); } return datasetCompare; } return userProvidedCompare; } }
if (!vd.verifiedResult.allVerificationPassed) { if (vd.verifiedResult.shouldRetry) { log.error ("Dataset {} verification has failure but should retry", vd.dataset.datasetURN()); failedDatasets.add(vd.dataset); } else { log.error ("Dataset {} verification has failure but no need to retry", vd.dataset.datasetURN()); log.info ("{} is timed out and give up the verification, adding a failed task", dataset.datasetURN());
/** * Below three steps are performed for a compaction task: * Do verifications before a map-reduce job is launched. * Start a map-reduce job and wait until it is finished * Do post-actions after map-reduce job is finished */ @Override public void run() { List<CompactionVerifier> verifiers = this.suite.getMapReduceVerifiers(); for (CompactionVerifier verifier : verifiers) { if (!verifier.verify(dataset)) { log.error("Verification {} for {} is not passed.", verifier.getName(), dataset.datasetURN()); this.onMRTaskComplete (false, new IOException("Compaction verification for MR is failed")); return; } } super.run(); }
log.info("Simulate mode enabled. Will not execute the copy."); for (Map.Entry<FileSet<CopyEntity>, Collection<WorkUnit>> entry : workUnitsMap.asMap().entrySet()) { log.info(String.format("Actions for dataset %s file set %s.", entry.getKey().getDataset().datasetURN(), entry.getKey().getName())); for (WorkUnit workUnit : entry.getValue()) {