private void refreshTaskDefs() { try { Map<String, TaskDef> map = new HashMap<>(); getAllTaskDefs().forEach(taskDef -> map.put(taskDef.getName(), taskDef)); this.taskDefCache = map; logger.debug("Refreshed task defs " + this.taskDefCache.size()); } catch (Exception e){ Monitors.error(className, "refreshTaskDefs"); logger.error("refresh TaskDefs failed ", e); } }
@Override public Response toResponse(ValidationException exception) { logException(exception); Response.ResponseBuilder responseBuilder; if (exception instanceof ConstraintViolationException) { responseBuilder = Response.status(Response.Status.BAD_REQUEST); } else { responseBuilder = Response.serverError(); Monitors.error("error", "error"); } Map<String, Object> entityMap = new HashMap<>(); entityMap.put("instance", host); responseBuilder.type(MediaType.APPLICATION_JSON_TYPE); responseBuilder.entity(toErrorResponse(exception)); return responseBuilder.build(); }
private void listen() { try { //noinspection InfiniteLoopStatement for(;;) { WorkflowSystemTask workflowSystemTask = queue.poll(60, TimeUnit.SECONDS); if(workflowSystemTask != null && workflowSystemTask.isAsync() && !listeningTasks.contains(workflowSystemTask)) { listen(workflowSystemTask); listeningTasks.add(workflowSystemTask); } } }catch(InterruptedException ie) { Monitors.error(className, "listen"); logger.warn("Error listening for workflow system tasks", ie); } }
private void refresh() { try { Set<String> events = metadataService.getEventHandlers().stream() .map(EventHandler::getEvent) .collect(Collectors.toSet()); List<ObservableQueue> createdQueues = new LinkedList<>(); events.forEach(event -> eventToQueueMap.computeIfAbsent(event, s -> { ObservableQueue q = eventQueues.getQueue(event); createdQueues.add(q); return q; } )); // start listening on all of the created queues createdQueues.stream() .filter(Objects::nonNull) .forEach(this::listen); } catch (Exception e) { Monitors.error(className, "refresh"); logger.error("refresh event queues failed", e); } }
@Override public Response toResponse(Throwable exception) { LOGGER.error(String.format("Error %s url: '%s'", exception.getClass().getSimpleName(), uriInfo.getPath()), exception); Monitors.error("error", "error"); ApplicationException applicationException = null; if (exception instanceof IllegalArgumentException || exception instanceof InvalidFormatException) { applicationException = new ApplicationException(Code.INVALID_INPUT, exception.getMessage(), exception); } else { applicationException = new ApplicationException(Code.INTERNAL_ERROR, exception.getMessage(), exception); } Map<String, Object> entityMap = applicationException.toMap(); entityMap.put("instance", host); return Response.status(applicationException.getHttpStatusCode()).entity(entityMap).type(MediaType.APPLICATION_JSON_TYPE).build(); }
@VisibleForTesting String lookupWorkflowIdFromTaskId(String taskId) { try { ResultSet resultSet = session.execute(selectTaskLookupStatement.bind(UUID.fromString(taskId))); return Optional.ofNullable(resultSet.one()) .map(row -> row.getUUID(WORKFLOW_ID_KEY).toString()) .orElse(null); } catch (Exception e) { Monitors.error(CLASS_NAME, "lookupWorkflowIdFromTaskId"); String errorMsg = String.format("Failed to lookup workflowId from taskId: %s", taskId); LOGGER.error(errorMsg, e); throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg, e); } } }
@Override public Response toResponse(ApplicationException e) { logException(e); Response.ResponseBuilder responseBuilder = Response.status(e.getHttpStatusCode()); if(e.getHttpStatusCode() == 500) { Monitors.error("error", "error"); } Map<String, Object> entityMap = e.toMap(); entityMap.put("instance", host); responseBuilder.type(MediaType.APPLICATION_JSON_TYPE); responseBuilder.entity(entityMap); return responseBuilder.build(); }
@Override public void removeWorkflow(String workflowId) { DeleteRequest request = new DeleteRequest(indexName, WORKFLOW_DOC_TYPE, workflowId); try { DeleteResponse response = elasticSearchClient.delete(request); if (response.getResult() == DocWriteResponse.Result.NOT_FOUND) { logger.error("Index removal failed - document not found by id: {}", workflowId); } } catch (IOException e) { logger.error("Failed to remove workflow {} from index", workflowId, e); Monitors.error(className, "remove"); } }
/** * Query persistence for all defined {@link TaskDef} data, and cache it in {@link #taskDefCache}. */ private void refreshTaskDefs() { try { withTransaction(tx -> { Map<String, TaskDef> map = new HashMap<>(); findAllTaskDefs(tx).forEach(taskDef -> map.put(taskDef.getName(), taskDef)); synchronized (taskDefCache) { taskDefCache.clear(); taskDefCache.putAll(map); } if (logger.isTraceEnabled()) { logger.trace("Refreshed {} TaskDefs", taskDefCache.size()); } }); } catch (Exception e){ Monitors.error(className, "refreshTaskDefs"); logger.error("refresh TaskDefs failed ", e); } }
public void init(WorkflowExecutor workflowExecutor) { ScheduledExecutorService deciderPool = Executors.newScheduledThreadPool(1); deciderPool.scheduleWithFixedDelay(() -> { try { boolean disable = config.disableSweep(); if (disable) { logger.info("Workflow sweep is disabled."); return; } List<String> workflowIds = queueDAO.pop(WorkflowExecutor.DECIDER_QUEUE, 2 * executorThreadPoolSize, 2000); int currentQueueSize = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE); logger.debug("Sweeper's current deciderqueue size: {}.", currentQueueSize); int retrievedWorkflows = (workflowIds != null) ? workflowIds.size() : 0; logger.debug("Sweeper retrieved {} workflows from the decider queue.", retrievedWorkflows); sweep(workflowIds, workflowExecutor); } catch (Exception e) { Monitors.error(className, "sweep"); logger.error("Error when sweeping workflow", e); } }, 500, 500, TimeUnit.MILLISECONDS); }
@Override public void removeWorkflow(String workflowId) { try { DeleteRequest request = new DeleteRequest(indexName, WORKFLOW_DOC_TYPE, workflowId); DeleteResponse response = elasticSearchClient.delete(request).actionGet(); if (response.getResult() == DocWriteResponse.Result.DELETED) { logger.error("Index removal failed - document not found by id: {}", workflowId); } } catch (Exception e) { logger.error("Failed to remove workflow {} from index", workflowId, e); Monitors.error(className, "remove"); } }
private void removeTaskLookup(Task task) { try { recordCassandraDaoRequests("removeTaskLookup", task.getTaskType(), task.getWorkflowType()); session.execute(deleteTaskLookupStatement.bind(UUID.fromString(task.getTaskId()))); } catch (Exception e) { Monitors.error(CLASS_NAME, "removeTaskLookup"); String errorMsg = String.format("Failed to remove task lookup: %s", task.getTaskId()); LOGGER.error(errorMsg, e); throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg); } }
@Override public boolean removeWorkflow(String workflowId) { Workflow workflow = getWorkflow(workflowId, true); boolean removed = false; // TODO: calculate number of shards and iterate if (workflow != null) { try { recordCassandraDaoRequests("removeWorkflow", "n/a", workflow.getWorkflowName()); ResultSet resultSet = session.execute(deleteWorkflowStatement.bind(UUID.fromString(workflowId), DEFAULT_SHARD_ID)); if (resultSet.wasApplied()) { removed = true; } } catch (Exception e) { Monitors.error(CLASS_NAME, "removeWorkflow"); String errorMsg = String.format("Failed to remove workflow: %s", workflowId); LOGGER.error(errorMsg, e); throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg); } workflow.getTasks().forEach(this::removeTaskLookup); } return removed; }
/** * Performs an index operation with a retry. * @param request The index request that we want to perform. * @param operationDescription The type of operation that we are performing. */ private void indexWithRetry(final IndexRequest request, final String operationDescription) { try { new RetryUtil<IndexResponse>().retryOnException(() -> { try { return elasticSearchClient.index(request); } catch (IOException e) { throw new RuntimeException(e); } }, null, null, RETRY_COUNT, operationDescription, "indexWithRetry"); } catch (Exception e) { Monitors.error(className, "index"); logger.error("Failed to index {} for request type: {}", request.id(), request.type(), e); } }
@Override public Task getTask(String taskId) { try { String workflowId = lookupWorkflowIdFromTaskId(taskId); if (workflowId == null) { return null; } // TODO: implement for query against multiple shards ResultSet resultSet = session.execute(selectTaskStatement.bind(UUID.fromString(workflowId), DEFAULT_SHARD_ID, taskId)); return Optional.ofNullable(resultSet.one()) .map(row -> { Task task = readValue(row.getString(PAYLOAD_KEY), Task.class); recordCassandraDaoRequests("getTask", task.getTaskType(), task.getWorkflowType()); recordCassandraDaoPayloadSize("getTask", toJson(task).length(), task.getTaskType(), task.getWorkflowType()); return task; }) .orElse(null); } catch (Exception e) { Monitors.error(CLASS_NAME, "getTask"); String errorMsg = String.format("Error getting task by id: %s", taskId); LOGGER.error(errorMsg, e); throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg); } }
private void updateWithRetry(UpdateRequest request, String operationDescription) { try { new RetryUtil<UpdateResponse>().retryOnException( () -> elasticSearchClient.update(request).actionGet(), null, null, RETRY_COUNT, operationDescription, "updateWithRetry" ); } catch (Exception e) { Monitors.error(className, "index"); logger.error("Failed to index {} for request type: {}", request.index(), request.type(), e); } }
@Override public String createWorkflow(Workflow workflow) { try { workflow.setCreateTime(System.currentTimeMillis()); List<Task> tasks = workflow.getTasks(); workflow.setTasks(new LinkedList<>()); String payload = toJson(workflow); recordCassandraDaoRequests("createWorkflow", "n/a", workflow.getWorkflowName()); recordCassandraDaoPayloadSize("createWorkflow", payload.length(), "n/a", workflow.getWorkflowName()); session.execute(insertWorkflowStatement.bind(UUID.fromString(workflow.getWorkflowId()), 1, "", payload, 0, 1)); workflow.setTasks(tasks); return workflow.getWorkflowId(); } catch (Exception e) { Monitors.error(CLASS_NAME, "createWorkflow"); String errorMsg = String.format("Error creating workflow: %s", workflow.getWorkflowId()); LOGGER.error(errorMsg, e); throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg, e); } }
@Override public String updateWorkflow(Workflow workflow) { try { workflow.setUpdateTime(System.currentTimeMillis()); if (workflow.getStatus().isTerminal()) { workflow.setEndTime(System.currentTimeMillis()); } List<Task> tasks = workflow.getTasks(); workflow.setTasks(new LinkedList<>()); String payload = toJson(workflow); recordCassandraDaoRequests("createWorkflow", "n/a", workflow.getWorkflowName()); recordCassandraDaoPayloadSize("createWorkflow", payload.length(), "n/a", workflow.getWorkflowName()); session.execute(updateWorkflowStatement.bind(payload, UUID.fromString(workflow.getWorkflowId()))); workflow.setTasks(tasks); return workflow.getWorkflowId(); } catch (Exception e) { Monitors.error(CLASS_NAME, "updateWorkflow"); String errorMsg = String.format("Failed to update workflow: %s", workflow.getWorkflowId()); LOGGER.error(errorMsg, e); throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg); } }
@Override public void updateTask(Task task) { try { task.setUpdateTime(System.currentTimeMillis()); if (task.getStatus().isTerminal() && task.getEndTime() == 0) { task.setEndTime(System.currentTimeMillis()); } // TODO: calculate the shard number the task belongs to String taskPayload = toJson(task); recordCassandraDaoRequests("updateTask", task.getTaskType(), task.getWorkflowType()); recordCassandraDaoPayloadSize("updateTask", taskPayload.length(), task.getTaskType(), task.getWorkflowType()); session.execute(insertTaskStatement.bind(UUID.fromString(task.getWorkflowInstanceId()), DEFAULT_SHARD_ID, task.getTaskId(), taskPayload)); } catch (Exception e) { Monitors.error(CLASS_NAME, "updateTask"); String errorMsg = String.format("Error updating task: %s in workflow: %s", task.getTaskId(), task.getWorkflowInstanceId()); LOGGER.error(errorMsg, e); throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg, e); } }
private boolean removeTask(Task task) { // TODO: calculate shard number based on seq and maxTasksPerShard try { // get total tasks for this workflow WorkflowMetadata workflowMetadata = getWorkflowMetadata(task.getWorkflowInstanceId()); int totalTasks = workflowMetadata.getTotalTasks(); // remove from task_lookup table removeTaskLookup(task); recordCassandraDaoRequests("removeTask", task.getTaskType(), task.getWorkflowType()); // delete task from workflows table and decrement total tasks by 1 BatchStatement batchStatement = new BatchStatement(); batchStatement.add(deleteTaskStatement.bind(UUID.fromString(task.getWorkflowInstanceId()), DEFAULT_SHARD_ID, task.getTaskId())); batchStatement.add(updateTotalTasksStatement.bind(totalTasks - 1, UUID.fromString(task.getWorkflowInstanceId()), DEFAULT_SHARD_ID)); ResultSet resultSet = session.execute(batchStatement); return resultSet.wasApplied(); } catch (Exception e) { Monitors.error(CLASS_NAME, "removeTask"); String errorMsg = String.format("Failed to remove task: %s", task.getTaskId()); LOGGER.error(errorMsg, e); throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg); } }