Tabnine Logo
DeciderService
Code IndexAdd Tabnine to your IDE (free)

How to use
DeciderService
in
com.netflix.conductor.core.execution

Best Java code snippets using com.netflix.conductor.core.execution.DeciderService (Showing top 20 results out of 315)

origin: Netflix/conductor

@Test
public void testFork() throws IOException {
  InputStream stream = TestDeciderService.class.getResourceAsStream("/test.json");
  Workflow workflow = objectMapper.readValue(stream, Workflow.class);
  DeciderOutcome outcome = deciderService.decide(workflow);
  assertFalse(outcome.isComplete);
  assertEquals(5, outcome.tasksToBeScheduled.size());
  assertEquals(1, outcome.tasksToBeUpdated.size());
}
origin: Netflix/conductor

public List<Task> getTasksToBeScheduled(Workflow workflow,
                    WorkflowTask taskToSchedule, int retryCount) {
  return getTasksToBeScheduled(workflow, taskToSchedule, retryCount, null);
}
origin: Netflix/conductor

    checkForTimeout(taskDefinition.get(), pendingTask);
    if (isResponseTimedOut(taskDefinition.get(), pendingTask)) {
      timeoutTask(taskDefinition.get(), pendingTask);
      pendingTask.setStatus(COMPLETED_WITH_ERRORS);
    } else {
      Task retryTask = retry(taskDefinition.orElse(null), workflowTask, pendingTask, workflow);
      tasksToBeScheduled.put(retryTask.getReferenceTaskName(), retryTask);
      executedTaskRefNames.remove(retryTask.getReferenceTaskName());
    List<Task> nextTasks = getNextTask(workflow, pendingTask);
    nextTasks.forEach(nextTask -> tasksToBeScheduled.putIfAbsent(nextTask.getReferenceTaskName(), nextTask));
    outcome.tasksToBeUpdated.add(pendingTask);
  outcome.tasksToBeScheduled.addAll(unScheduledTasks);
if (outcome.tasksToBeScheduled.isEmpty() && checkForWorkflowCompletion(workflow)) {
  LOGGER.debug("Marking workflow as complete.  workflow=" + workflow.getWorkflowId() + ", tasks=" + workflow.getTasks());
  outcome.isComplete = true;
origin: Netflix/conductor

public DeciderOutcome decide(Workflow workflow) throws TerminateWorkflowException {
  //In case of a new workflow the list of tasks will be empty
  final List<Task> tasks = workflow.getTasks();
  //In case of a new workflow the list of executedTasks will also be empty
  List<Task> executedTasks = tasks.stream()
      .filter(t -> !t.getStatus().equals(SKIPPED) && !t.getStatus().equals(READY_FOR_RERUN) && !t.isExecuted())
      .collect(Collectors.toList());
  List<Task> tasksToBeScheduled = new LinkedList<>();
  if (executedTasks.isEmpty()) {
    //this is the flow that the new workflow will go through
    tasksToBeScheduled = startWorkflow(workflow);
    if (tasksToBeScheduled == null) {
      tasksToBeScheduled = new LinkedList<>();
    }
  }
  return decide(workflow, tasksToBeScheduled);
}
origin: Netflix/conductor

@VisibleForTesting
List<Task> getNextTask(Workflow workflow, Task task) {
  final WorkflowDef workflowDef = workflow.getWorkflowDefinition();
  // Get the following task after the last completed task
  if (SystemTaskType.is(task.getTaskType()) && SystemTaskType.DECISION.name().equals(task.getTaskType())) {
    if (task.getInputData().get("hasChildren") != null) {
      return Collections.emptyList();
    }
  }
  String taskReferenceName = task.getReferenceTaskName();
  WorkflowTask taskToSchedule = workflowDef.getNextTask(taskReferenceName);
  while (isTaskSkipped(taskToSchedule, workflow)) {
    taskToSchedule = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName());
  }
  if (taskToSchedule != null) {
    return getTasksToBeScheduled(workflow, taskToSchedule, 0);
  }
  return Collections.emptyList();
}
origin: Netflix/conductor

updateWorkflowOutput(workflow, task);
throw new TerminateWorkflowException(task.getReasonForIncompletion(), status, task);
Workflow workflowInstance = populateWorkflowAndTaskData(workflow);
Map<String, Object> taskInput = parametersUtils.getTaskInputV2(workflowTask.getInputParameters(), workflowInstance, rescheduled.getTaskId(), taskDefinition);
rescheduled.getInputData().putAll(taskInput);
origin: Netflix/conductor

new DeciderService(parametersUtils, queueDAO, metadataDAO, externalPayloadStorageUtils, taskMappers).decide(workflow);
origin: Netflix/conductor

@SuppressWarnings("unchecked")
@Test
public void testUpdateWorkflowOutput() {
  Workflow workflow = new Workflow();
  workflow.setWorkflowDefinition(new WorkflowDef());
  deciderService.updateWorkflowOutput(workflow, null);
  assertNotNull(workflow.getOutput());
  assertTrue(workflow.getOutput().isEmpty());
  Task task = new Task();
  Map<String, Object> taskOutput = new HashMap<>();
  taskOutput.put("taskKey", "taskValue");
  task.setOutputData(taskOutput);
  workflow.getTasks().add(task);
  WorkflowDef workflowDef = new WorkflowDef();
  when(metadataDAO.get(anyString(), anyInt())).thenReturn(Optional.of(workflowDef));
  deciderService.updateWorkflowOutput(workflow, null);
  assertNotNull(workflow.getOutput());
  assertEquals("taskValue", workflow.getOutput().get("taskKey"));
}
origin: Netflix/conductor

/**
 * Updates the workflow output.
 *
 * @param workflow the workflow instance
 * @param task     if not null, the output of this task will be copied to workflow output if no output parameters are specified in the workflow defintion
 *                 if null, the output of the last task in the workflow will be copied to workflow output of no output parameters are specified in the workflow definition
 */
void updateWorkflowOutput(final Workflow workflow, @Nullable Task task) {
  List<Task> allTasks = workflow.getTasks();
  if (allTasks.isEmpty()) {
    return;
  }
  Task last = Optional.ofNullable(task).orElse(allTasks.get(allTasks.size() - 1));
  WorkflowDef workflowDef = workflow.getWorkflowDefinition();
  Map<String, Object> output;
  if (workflowDef.getOutputParameters() != null && !workflowDef.getOutputParameters().isEmpty()) {
    Workflow workflowInstance = populateWorkflowAndTaskData(workflow);
    output = parametersUtils.getTaskInput(workflowDef.getOutputParameters(), workflowInstance, null, null);
  } else if (StringUtils.isNotBlank(last.getExternalOutputPayloadStoragePath())) {
    output = externalPayloadStorageUtils.downloadPayload(last.getExternalOutputPayloadStoragePath());
    Monitors.recordExternalPayloadStorageUsage(last.getTaskDefName(), ExternalPayloadStorage.Operation.READ.toString(), ExternalPayloadStorage.PayloadType.TASK_OUTPUT.toString());
  } else {
    output = last.getOutputData();
  }
  workflow.setOutput(output);
  externalPayloadStorageUtils.verifyAndUpload(workflow, ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT);
}
origin: Netflix/conductor

workflowTask.getInputParameters().put("env", env);
Task task2 = deciderService.retry(taskDef, workflowTask, task, workflow);
System.out.println(task.getTaskId() + ":\n" + task.getInputData());
System.out.println(task2.getTaskId() + ":\n" + task2.getInputData());
when(metadataDAO.get(anyString(), anyInt())).thenReturn(Optional.of(new WorkflowDef()));
exception.expect(TerminateWorkflowException.class);
deciderService.retry(taskDef, workflowTask, task3, workflow);
origin: Netflix/conductor

@Test
public void testIsResponsedTimeOut() {
  TaskDef taskDef = new TaskDef();
  taskDef.setName("test_rt");
  taskDef.setResponseTimeoutSeconds(10);
  Task task = new Task();
  task.setTaskDefName("test_rt");
  task.setStatus(Status.IN_PROGRESS);
  task.setTaskId("aa");
  task.setUpdateTime(System.currentTimeMillis() - TimeUnit.SECONDS.toMillis(11));
  boolean flag = deciderService.isResponseTimedOut(taskDef, task);
  assertNotNull(task);
  assertTrue(flag);
}
origin: Netflix/conductor

@Before
public void setup() {
  metadataDAO = mock(MetadataDAO.class);
  externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class);
  QueueDAO queueDAO = mock(QueueDAO.class);
  MetadataDAO metadataDAO = mock(MetadataDAO.class);
  TaskDef taskDef = new TaskDef();
  WorkflowDef workflowDef = new WorkflowDef();
  workflowDef.setName("TestDeciderService");
  workflowDef.setVersion(1);
  when(metadataDAO.getTaskDef(any())).thenReturn(taskDef);
  when(metadataDAO.getLatest(any())).thenReturn(Optional.of(workflowDef));
  parametersUtils = new ParametersUtils();
  Map<String, TaskMapper> taskMappers = new HashMap<>();
  taskMappers.put("DECISION", new DecisionTaskMapper());
  taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO));
  taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper());
  taskMappers.put("JOIN", new JoinTaskMapper());
  taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO));
  taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO));
  taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils));
  taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO));
  taskMappers.put("EVENT", new EventTaskMapper(parametersUtils));
  taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils));
  taskMappers.put("HTTP", new HTTPTaskMapper(parametersUtils, metadataDAO));
  deciderService = new DeciderService(parametersUtils, queueDAO, metadataDAO, externalPayloadStorageUtils, taskMappers);
}
origin: Netflix/conductor

while (isTaskSkipped(taskToSchedule, workflow)) {
  taskToSchedule = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName());
return getTasksToBeScheduled(workflow, taskToSchedule, 0);
origin: com.netflix.conductor/conductor-core

public DeciderOutcome decide(Workflow workflow) throws TerminateWorkflowException {
  //In case of a new workflow the list of tasks will be empty
  final List<Task> tasks = workflow.getTasks();
  //In case of a new workflow the list of executedTasks will also be empty
  List<Task> executedTasks = tasks.stream()
      .filter(t -> !t.getStatus().equals(SKIPPED) && !t.getStatus().equals(READY_FOR_RERUN) && !t.isExecuted())
      .collect(Collectors.toList());
  List<Task> tasksToBeScheduled = new LinkedList<>();
  if (executedTasks.isEmpty()) {
    //this is the flow that the new workflow will go through
    tasksToBeScheduled = startWorkflow(workflow);
    if (tasksToBeScheduled == null) {
      tasksToBeScheduled = new LinkedList<>();
    }
  }
  return decide(workflow, tasksToBeScheduled);
}
origin: com.netflix.conductor/conductor-core

updateWorkflowOutput(workflow, task);
throw new TerminateWorkflowException(task.getReasonForIncompletion(), status, task);
Workflow workflowInstance = populateWorkflowAndTaskData(workflow);
Map<String, Object> taskInput = parametersUtils.getTaskInputV2(workflowTask.getInputParameters(), workflowInstance, rescheduled.getTaskId(), taskDefinition);
rescheduled.getInputData().putAll(taskInput);
origin: Netflix/conductor

  workflow = metadataMapperService.populateWorkflowWithDefinitions(workflow);
deciderService.updateWorkflowOutput(workflow, null);
origin: Netflix/conductor

public List<Task> getTasksToBeScheduled(Workflow workflow,
                    WorkflowTask taskToSchedule, int retryCount, String retriedTaskId) {
  workflow = populateWorkflowAndTaskData(workflow);
  Map<String, Object> input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(),
      workflow, null, null);
origin: Netflix/conductor

@Before
public void init() {
  TestConfiguration config = new TestConfiguration();
  executionDAOFacade = mock(ExecutionDAOFacade.class);
  metadataDAO = mock(MetadataDAO.class);
  queueDAO = mock(QueueDAO.class);
  workflowStatusListener = mock(WorkflowStatusListener.class);
  ExternalPayloadStorageUtils externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class);
  ObjectMapper objectMapper = new ObjectMapper();
  ParametersUtils parametersUtils = new ParametersUtils();
  Map<String, TaskMapper> taskMappers = new HashMap<>();
  taskMappers.put("DECISION", new DecisionTaskMapper());
  taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO));
  taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper());
  taskMappers.put("JOIN", new JoinTaskMapper());
  taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO));
  taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO));
  taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils));
  taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO));
  taskMappers.put("EVENT", new EventTaskMapper(parametersUtils));
  taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils));
  taskMappers.put("HTTP", new HTTPTaskMapper(parametersUtils, metadataDAO));
  deciderService = new DeciderService(parametersUtils, queueDAO, metadataDAO, externalPayloadStorageUtils, taskMappers);
  MetadataMapperService metadataMapperService = new MetadataMapperService(metadataDAO);
  workflowExecutor = new WorkflowExecutor(deciderService, metadataDAO, queueDAO, metadataMapperService, workflowStatusListener, executionDAOFacade, config);
}
origin: com.netflix.conductor/conductor-core

    checkForTimeout(taskDefinition.get(), pendingTask);
    if (isResponseTimedOut(taskDefinition.get(), pendingTask)) {
      timeoutTask(taskDefinition.get(), pendingTask);
      pendingTask.setStatus(COMPLETED_WITH_ERRORS);
    } else {
      Task retryTask = retry(taskDefinition.orElse(null), workflowTask, pendingTask, workflow);
      tasksToBeScheduled.put(retryTask.getReferenceTaskName(), retryTask);
      executedTaskRefNames.remove(retryTask.getReferenceTaskName());
    List<Task> nextTasks = getNextTask(workflow, pendingTask);
    nextTasks.forEach(nextTask -> tasksToBeScheduled.putIfAbsent(nextTask.getReferenceTaskName(), nextTask));
    outcome.tasksToBeUpdated.add(pendingTask);
  outcome.tasksToBeScheduled.addAll(unScheduledTasks);
if (outcome.tasksToBeScheduled.isEmpty() && checkForWorkflowCompletion(workflow)) {
  LOGGER.debug("Marking workflow as complete.  workflow=" + workflow.getWorkflowId() + ", tasks=" + workflow.getTasks());
  outcome.isComplete = true;
origin: Netflix/conductor

@Test
public void testWorkflowWithNoTasks() throws Exception {
  InputStream stream = TestDeciderOutcomes.class.getResourceAsStream("/conditional_flow.json");
  WorkflowDef def = objectMapper.readValue(stream, WorkflowDef.class);
  assertNotNull(def);
  Workflow workflow = new Workflow();
  workflow.setWorkflowDefinition(def);
  workflow.setStartTime(0);
  workflow.getInput().put("param1", "nested");
  workflow.getInput().put("param2", "one");
  DeciderOutcome outcome = deciderService.decide(workflow);
  assertNotNull(outcome);
  assertFalse(outcome.isComplete);
  assertTrue(outcome.tasksToBeUpdated.isEmpty());
  assertEquals(3, outcome.tasksToBeScheduled.size());
  System.out.println(outcome.tasksToBeScheduled);
  outcome.tasksToBeScheduled.forEach(t -> t.setStatus(Status.COMPLETED));
  workflow.getTasks().addAll(outcome.tasksToBeScheduled);
  outcome = deciderService.decide(workflow);
  assertFalse(outcome.isComplete);
  assertEquals(outcome.tasksToBeUpdated.toString(), 3, outcome.tasksToBeUpdated.size());
  assertEquals(1, outcome.tasksToBeScheduled.size());
  assertEquals("junit_task_3", outcome.tasksToBeScheduled.get(0).getTaskDefName());
  System.out.println(outcome.tasksToBeScheduled);
}
com.netflix.conductor.core.executionDeciderService

Most used methods

  • decide
  • getTasksToBeScheduled
  • isResponseTimedOut
  • populateWorkflowAndTaskData
    Populates the workflow input data and the tasks input/output data if stored in external payload stor
  • retry
  • updateWorkflowOutput
    Updates the workflow output.
  • <init>
  • checkForTimeout
  • checkForWorkflowCompletion
  • getNextTask
  • getNextTasksToBeScheduled
  • isTaskSkipped
  • getNextTasksToBeScheduled,
  • isTaskSkipped,
  • startWorkflow,
  • timeoutTask

Popular in Java

  • Start an intent from android
  • scheduleAtFixedRate (Timer)
  • setScale (BigDecimal)
  • setRequestProperty (URLConnection)
  • Path (java.nio.file)
  • BitSet (java.util)
    The BitSet class implements abit array [http://en.wikipedia.org/wiki/Bit_array]. Each element is eit
  • Dictionary (java.util)
    Note: Do not use this class since it is obsolete. Please use the Map interface for new implementatio
  • Hashtable (java.util)
    A plug-in replacement for JDK1.5 java.util.Hashtable. This version is based on org.cliffc.high_scale
  • Handler (java.util.logging)
    A Handler object accepts a logging request and exports the desired messages to a target, for example
  • FileUtils (org.apache.commons.io)
    General file manipulation utilities. Facilities are provided in the following areas: * writing to a
  • From CI to AI: The AI layer in your organization
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now