@Override public boolean shouldProcessExecution(Execution execution) { return CanaryAnalysisService.CANARY_ANALYSIS_PIPELINE_NAME.equals(execution.getName()); }
/** * Always run the GenerateCanaryAnalysisResultStage. */ private void addAlwaysRunResultStage(@Nonnull Stage parent, @Nonnull StageGraphBuilder graph) { graph.append(stage -> { stage.setType(GenerateCanaryAnalysisResultStage.STAGE_TYPE); stage.setName(GenerateCanaryAnalysisResultStage.STAGE_DESCRIPTION); stage.setContext(parent.getContext()); }); }
private static String resolveMetricSetListId(Stage stage) { Map<String, Object> outputs = stage.getOutputs(); String metricSetListId = (String)outputs.get("metricSetListId"); // TODO(duftler): Remove this once the risk of operating on out-of-date pipelines is low. if (StringUtils.isEmpty(metricSetListId)) { // Fallback to the older key name. metricSetListId = (String)outputs.get("metricSetId"); } return metricSetListId; } }
public String getCanaryExecutionRequestFromJudgeContext(Execution pipeline) { Stage contextStage = pipeline.getStages().stream() .filter(stage -> stage.getRefId().equals(CanaryStageNames.REFID_JUDGE)) .findFirst() .orElseThrow(() -> new IllegalArgumentException("Unable to find stage '" + CanaryStageNames.REFID_JUDGE + "' in pipeline ID '" + pipeline.getId() + "'")); Map<String, Object> context = contextStage.getContext(); return (String) context.get("canaryExecutionRequest"); }
@Override public Result cancel(Stage stage) { Map<String, Object> context = stage.getContext(); String canaryPipelineExecutionId = (String) context.getOrDefault("canaryPipelineExecutionId", null); stage.getId(), stage.getExecution().getId(), canaryPipelineExecutionId, stage.getContext()); if (pipeline.getStatus().isComplete()) { log.debug("Not changing status of pipeline execution {} to CANCELED since execution is already completed: {}", canaryPipelineExecutionId, pipeline.getStatus()); return new CancellableStage.Result(stage, new HashMap<>()); } catch (Exception e) { log.error("Failed to cancel stage (stageId: {}, executionId: {}), e: {}", stage.getId(), stage.getExecution().getId(), e.getMessage(), e); stage.getId(), stage.getExecution().getId(), stage.getContext());
@Test public void test_that_getRunCanaryStages_returns_the_expected_sorted_list_of_stages_sorted_by_the_number_in_the_stage_name() { Stage stage = mock(Stage.class); Execution execution = mock(Execution.class); when(stage.getExecution()).thenReturn(execution); when(execution.getStages()).thenReturn(ImmutableList.of( new Stage(null, STAGE_TYPE, "foo #1", Maps.newHashMap(ImmutableMap.of("index", "0"))), new Stage(null, STAGE_TYPE, "foo #3", Maps.newHashMap(ImmutableMap.of("index", "2"))), new Stage(null, STAGE_TYPE, "foo #2", Maps.newHashMap(ImmutableMap.of("index", "1"))), new Stage(null, STAGE_TYPE, "foo #4", Maps.newHashMap(ImmutableMap.of("index", "3"))) )); List<Stage> actual = task.getRunCanaryStages(stage); for (int i = 0; i < 4; i++) { assertEquals(String.valueOf(i), actual.get(i).getContext().get("index")); } }
/** * Initiates the canary analysis execution Orca pipeline. * * @param canaryAnalysisConfig The configuration for the canary analysis execution. * @return Wrapper object around the execution id. */ public CanaryAnalysisExecutionResponse initiateCanaryAnalysisExecution(CanaryAnalysisConfig canaryAnalysisConfig) { String application = canaryAnalysisConfig.getApplication(); PipelineBuilder pipelineBuilder = new PipelineBuilder(application) .withName(CANARY_ANALYSIS_PIPELINE_NAME) .withPipelineConfigId(application + "-canary-analysis-referee-pipeline") .withStage( SetupAndExecuteCanariesStage.STAGE_TYPE, SetupAndExecuteCanariesStage.STAGE_DESCRIPTION, Maps.newHashMap(ImmutableMap.of( CANARY_ANALYSIS_CONFIG_CONTEXT_KEY, canaryAnalysisConfig ))); Execution pipeline = pipelineBuilder.withLimitConcurrent(false).build(); executionRepository.store(pipeline); try { executionLauncher.start(pipeline); } catch (Throwable t) { log.error("Failed to start pipeline", t); handleStartupFailure(pipeline, t); throw new RuntimeException("Failed to start the canary analysis pipeline execution"); } return CanaryAnalysisExecutionResponse.builder().canaryAnalysisExecutionId(pipeline.getId()).build(); }
/** * Gets the run canary stages that contain the results */ @NotNull protected List<Stage> getRunCanaryStages(@Nonnull Stage stage) { // Collect the Run Canary Stages where the parent id is itself // Sorting by number after the # in the name return stage.getExecution().getStages().stream() .filter(s -> s.getType().equals(RunCanaryStage.STAGE_TYPE)) .sorted(Comparator.comparing(s -> Integer.valueOf(StringUtils.substringAfterLast(s.getName(), "#")))) .collect(Collectors.toList()); }
@Nonnull @Override public TaskResult execute(@Nonnull Stage stage) { Map<String, Object> context = stage.getContext(); Map judge1Result = (Map)context.get("judge1Result"); Map judge2Result = (Map)context.get("judge2Result"); // TODO: Now that the plumbing works, perform some kind of actual comparison. Map<String, Map> comparisonResult = ImmutableMap.<String, Map>builder() .put("judge1Result", judge1Result) .put("judge2Result", judge2Result) .build(); Map<String, Map> outputs = Collections.singletonMap("comparisonResult", comparisonResult); return new TaskResult(ExecutionStatus.SUCCEEDED, Collections.emptyMap(), outputs); } }
private void handleStartupFailure(Execution execution, Throwable failure) { final String canceledBy = "system"; final String reason = "Failed on startup: " + failure.getMessage(); final ExecutionStatus status = ExecutionStatus.TERMINAL; log.error("Failed to start {} {}", execution.getType(), execution.getId(), failure); executionRepository.updateStatus(execution.getType(), execution.getId(), status); executionRepository.cancel(execution.getType(), execution.getId(), canceledBy, reason); }
private List<String> getMetricSetListIds(Execution execution, String stagePrefix) { List<Stage> stages = execution.getStages(); return stages.stream() .filter(stage -> { String refId = stage.getRefId(); return refId != null && refId.startsWith(stagePrefix); }) .map(stage -> resolveMetricSetListId(stage)) .collect(Collectors.toList()); }
private String startPipeline(Map config) throws Exception { String json = kayentaObjectMapper.writeValueAsString(config); log.info("Requested pipeline: {}", json); Execution pipeline = executionLauncher.start(Execution.ExecutionType.PIPELINE, json); return pipeline.getId(); } }
@ApiOperation(value = "Cancel a pipeline execution") @RequestMapping(value = "/{executionId}/cancel", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.ACCEPTED) void cancel(@PathVariable String executionId) { log.info("Cancelling pipeline execution {}...", executionId); Execution pipeline = executionRepository.retrieve(Execution.ExecutionType.PIPELINE, executionId); if (pipeline.getStatus().isComplete()) { log.debug("Not changing status of pipeline execution {} to CANCELED since execution is already completed: {}", executionId, pipeline.getStatus()); return; } executionRepository.cancel(Execution.ExecutionType.PIPELINE, executionId); executionRepository.updateStatus(Execution.ExecutionType.PIPELINE, executionId, ExecutionStatus.CANCELED); }
public CanaryConfig getCanaryConfig(Execution pipeline) { Stage contextStage = pipeline.getStages().stream() .filter(stage -> stage.getRefId().equals(CanaryStageNames.REFID_SET_CONTEXT)) .findFirst() .orElseThrow(() -> new IllegalArgumentException("Unable to find stage '" + CanaryStageNames.REFID_SET_CONTEXT + "' in pipeline ID '" + pipeline.getId() + "'")); Map<String, Object> context = contextStage.getContext(); Map<String, Object> canaryConfigMap = (Map<String, Object>)context.get("canaryConfig"); return objectMapper.convertValue(canaryConfigMap, CanaryConfig.class); }
@Nonnull @Override public TaskResult execute(@Nonnull Stage stage) { Map<String, Object> context = stage.getContext(); String metricsAccountName = (String)context.get("metricsAccountName"); String storageAccountName = (String)context.get("storageAccountName"); Map<String, Object> canaryConfigMap = (Map<String, Object>)context.get("canaryConfig"); CanaryConfig canaryConfig = kayentaObjectMapper.convertValue(canaryConfigMap, CanaryConfig.class); int metricIndex = (Integer)stage.getContext().get("metricIndex"); CanaryScope canaryScope; try { canaryScope = kayentaObjectMapper.readValue((String)stage.getContext().get("canaryScope"), PrometheusCanaryScope.class); } catch (IOException e) { log.warn("Unable to parse JSON scope", e); throw new RuntimeException(e); } String resolvedMetricsAccountName = CredentialsHelper.resolveAccountByNameOrType(metricsAccountName, AccountCredentials.Type.METRICS_STORE, accountCredentialsRepository); String resolvedStorageAccountName = CredentialsHelper.resolveAccountByNameOrType(storageAccountName, AccountCredentials.Type.OBJECT_STORE, accountCredentialsRepository); return synchronousQueryProcessor.executeQueryAndProduceTaskResult(resolvedMetricsAccountName, resolvedStorageAccountName, canaryConfig, metricIndex, canaryScope); } }
private void handleStartupFailure(Execution execution, Throwable failure) { final String canceledBy = "system"; final String reason = "Failed on startup: " + failure.getMessage(); final ExecutionStatus status = ExecutionStatus.TERMINAL; log.error("Failed to start {} {}", execution.getType(), execution.getId(), failure); executionRepository.updateStatus(execution.getType(), execution.getId(), status); executionRepository.cancel(execution.getType(), execution.getId(), canceledBy, reason); registry.counter(failureId).increment(); }
@Override public void processCompletedPipelineExecution(Execution execution) { CanaryAnalysisExecutionStatusResponse canaryAnalysisExecution = canaryAnalysisService .getCanaryAnalysisExecution(execution.getId()); applicationEventPublisher.publishEvent(new StandaloneCanaryAnalysisExecutionCompletedEvent(this, canaryAnalysisExecution)); } }
@Override public boolean shouldProcessExecution(Execution execution) { return ExecutionMapper.PIPELINE_NAME.equals(execution.getName()); }
public CanaryExecutionStatusResponse fromExecution(Execution pipeline) { String canaryExecutionId = pipeline.getId(); Stage contextStage = pipeline.getStages().stream() .filter(stage -> stage.getRefId().equals(CanaryStageNames.REFID_SET_CONTEXT)) .findFirst() .orElseThrow(() -> new IllegalArgumentException("Unable to find stage '" + CanaryStageNames.REFID_SET_CONTEXT + "' in pipeline ID '" + canaryExecutionId + "'")); Map<String, Object> contextContext = contextStage.getContext(); String storageAccountName = (String)contextContext.get("storageAccountName"); return fromExecution(storageAccountName, pipeline); }
public CanaryExecutionRequest getCanaryExecutionRequest(Execution pipeline) { Stage contextStage = pipeline.getStages().stream() .filter(stage -> stage.getRefId().equals(CanaryStageNames.REFID_SET_CONTEXT)) .findFirst() .orElseThrow(() -> new IllegalArgumentException("Unable to find stage '" + CanaryStageNames.REFID_SET_CONTEXT + "' in pipeline ID '" + pipeline.getId() + "'")); Map<String, Object> context = contextStage.getContext(); String canaryExecutionRequestJSON = (String)context.get("canaryExecutionRequest"); if (canaryExecutionRequestJSON == null) { canaryExecutionRequestJSON = getCanaryExecutionRequestFromJudgeContext(pipeline); } if (canaryExecutionRequestJSON == null) { return null; } CanaryExecutionRequest canaryExecutionRequest = null; try { canaryExecutionRequest = objectMapper.readValue(canaryExecutionRequestJSON, CanaryExecutionRequest.class); } catch (IOException e) { log.error("Cannot deserialize canaryExecutionRequest", e); throw new IllegalArgumentException("Cannot deserialize canaryExecutionRequest", e); } return canaryExecutionRequest; }