@Override public String toString() { return this.dataset.datasetURN() + "@" + this.name; } }
@Override default String getUrn() { return datasetURN(); } }
public DatasetVerificationException (Dataset dataset, Throwable cause) { super ("Dataset:" + dataset.datasetURN() + " Exception:" + cause); this.dataset = dataset; this.cause = cause; } }
@Override public String getUrn() { return this.dataset.datasetURN(); } }
@Override public void run() { log.error ("Compaction job for " + dataset.datasetURN() + " is failed because of {}", failedReason); this.workingState = WorkUnitState.WorkingState.FAILED; }
@Override public void onSuccess(@Nullable Void result) { ComplianceRetentionJob.this.finishCleanSignal.get().countDown(); log.info("Successfully cleaned: " + dataset.datasetURN()); }
@Override public void onSuccess(@Nullable Void result) { ComplianceRestoreJob.this.finishCleanSignal.get().countDown(); log.info("Successfully restored: " + dataset.datasetURN()); }
@Override public void onSuccess(@Nullable Void result) { ComplianceValidationJob.this.finishCleanSignal.get().countDown(); log.info("Successfully validated: " + dataset.datasetURN()); }
private int findTier(Requestor<SimpleDatasetRequest> requestor) { Dataset dataset = ((SimpleDatasetRequestor) requestor).getDataset(); for (Map.Entry<Integer, Pattern> tier : tiersMap.entrySet()) { Pattern pattern = tier.getValue(); if (pattern.matcher(dataset.datasetURN()).find()) { return tier.getKey(); } } return Integer.MAX_VALUE; } }
private void submitEvent(String eventName) { Map<String, String> eventMetadataMap = ImmutableMap.of(CompactionSlaEventHelper.DATASET_URN, this.dataset.datasetURN()); this.eventSubmitter.submit(eventName, eventMetadataMap); }
@Override public void onSuccess(Void arg0) { DatasetCleaner.this.finishCleanSignal.get().countDown(); LOG.info("Successfully cleaned: " + dataset.datasetURN()); Instrumented.markMeter(DatasetCleaner.this.datasetsCleanSuccessMeter); } });
@Override public void onFailure(Throwable t) { ComplianceRestoreJob.this.finishCleanSignal.get().countDown(); log.warn("Exception caught when restoring " + dataset.datasetURN() + ".", t); ComplianceRestoreJob.this.throwables.add(t); ComplianceRestoreJob.this.eventSubmitter.submit(ComplianceEvents.Restore.FAILED_EVENT_NAME, ImmutableMap .of(ComplianceEvents.FAILURE_CONTEXT_METADATA_KEY, ExceptionUtils.getFullStackTrace(t), ComplianceEvents.DATASET_URN_METADATA_KEY, dataset.datasetURN())); } });
@Override public Void call() throws Exception { if (dataset instanceof ValidatableDataset) { ((ValidatableDataset) dataset).validate(); } else { log.warn( "Not an instance of " + ValidatableDataset.class + " Dataset won't be validated " + dataset.datasetURN()); } return null; } });
@Override public void onFailure(Throwable t) { ComplianceRetentionJob.this.finishCleanSignal.get().countDown(); log.warn("Exception caught when cleaning " + dataset.datasetURN() + ".", t); ComplianceRetentionJob.this.throwables.add(t); ComplianceRetentionJob.this.eventSubmitter.submit(ComplianceEvents.Retention.FAILED_EVENT_NAME, ImmutableMap .of(ComplianceEvents.FAILURE_CONTEXT_METADATA_KEY, ExceptionUtils.getFullStackTrace(t), ComplianceEvents.DATASET_URN_METADATA_KEY, dataset.datasetURN())); } });
@Override public void onFailure(Throwable throwable) { DatasetCleaner.this.finishCleanSignal.get().countDown(); LOG.warn("Exception caught when cleaning " + dataset.datasetURN() + ".", throwable); DatasetCleaner.this.throwables.add(throwable); Instrumented.markMeter(DatasetCleaner.this.datasetsCleanFailureMeter); DatasetCleaner.this.eventSubmitter.submit(RetentionEvents.CleanFailed.EVENT_NAME, ImmutableMap.of(RetentionEvents.CleanFailed.FAILURE_CONTEXT_METADATA_KEY, ExceptionUtils.getFullStackTrace(throwable), RetentionEvents.DATASET_URN_METADATA_KEY, dataset.datasetURN())); }
@Override public void onFailure(Throwable t) { ComplianceValidationJob.this.finishCleanSignal.get().countDown(); log.warn("Exception caught when validating " + dataset.datasetURN() + ".", t); ComplianceValidationJob.this.throwables.add(t); ComplianceValidationJob.this.eventSubmitter.submit(ComplianceEvents.Validation.FAILED_EVENT_NAME, ImmutableMap .of(ComplianceEvents.FAILURE_CONTEXT_METADATA_KEY, ExceptionUtils.getFullStackTrace(t), ComplianceEvents.DATASET_URN_METADATA_KEY, dataset.datasetURN())); } });
@Override public Void call() throws Exception { if (dataset instanceof CleanableDataset) { ((CleanableDataset) dataset).clean(); } else { log.warn( "Not an instance of " + CleanableDataset.class + " Dataset won't be cleaned " + dataset.datasetURN()); } return null; } });
@Override public Void call() throws Exception { if (dataset instanceof RestorableDataset) { log.info("Trying to restore"); ((RestorableDataset) dataset).restore(); } else { log.warn( "Not an instance of " + RestorableDataset.class + " Dataset won't be restored " + dataset.datasetURN()); } return null; } });
@Override public int compare(FileSet<CopyEntity> p1, FileSet<CopyEntity> p2) { int userProvidedCompare = this.userProvidedComparator.compare(p1, p2); if (userProvidedCompare == 0) { int datasetCompare = p1.getDataset().datasetURN().compareTo(p2.getDataset().datasetURN()); if (datasetCompare == 0) { return p1.getName().compareTo(p2.getName()); } return datasetCompare; } return userProvidedCompare; } }
/** * Below three steps are performed for a compaction task: * Do verifications before a map-reduce job is launched. * Start a map-reduce job and wait until it is finished * Do post-actions after map-reduce job is finished */ @Override public void run() { List<CompactionVerifier> verifiers = this.suite.getMapReduceVerifiers(); for (CompactionVerifier verifier : verifiers) { if (!verifier.verify(dataset).isSuccessful()) { log.error("Verification {} for {} is not passed.", verifier.getName(), dataset.datasetURN()); this.onMRTaskComplete (false, new IOException("Compaction verification for MR is failed")); return; } } super.run(); }