public MetricsWorkflowExecutorListener(MetricRegistry metricRegistry, ExecutorDao executors) { this.metricRegistry = metricRegistry; this.nflowExecutorGroup = executors.getExecutorGroup(); this.nflowExecutorId = executors.getExecutorId(); }
params.addValue("executor_group", executorInfo.getExecutorGroup()); sql += " where " + collectionToDelimitedString(conditions, " and ") + " order by w.created desc"; sql = sqlVariants.limit(sql, ":limit");
private void updateNextWorkflowInstancesWithMultipleUpdates(List<OptimisticLockKey> instances, List<Integer> ids) { boolean raceConditionDetected = false; for (OptimisticLockKey instance : instances) { int updated = jdbc.update(updateInstanceForExecutionQuery() + " where id = ? and modified = ? and executor_id is null", instance.id, instance.modified); if (updated == 1) { ids.add(instance.id); } else { raceConditionDetected = true; } } if (raceConditionDetected && ids.isEmpty()) { throw new PollingRaceConditionException("Race condition in polling workflow instances detected. " + "Multiple pollers using same name (" + executorInfo.getExecutorGroup() + ")"); } }
private void updateNextWorkflowInstancesWithBatchUpdate(List<OptimisticLockKey> instances, List<Integer> ids) { List<Object[]> batchArgs = new ArrayList<>(instances.size()); for (OptimisticLockKey instance : instances) { batchArgs.add(new Object[] { instance.id, instance.modified }); ids.add(instance.id); } int[] updateStatuses = jdbc .batchUpdate(updateInstanceForExecutionQuery() + " where id = ? and modified = ? and executor_id is null", batchArgs); Iterator<Integer> idIt = ids.iterator(); for (int status : updateStatuses) { idIt.next(); if (status == 0) { idIt.remove(); if (ids.isEmpty()) { throw new PollingRaceConditionException("Race condition in polling workflow instances detected. " + "Multiple pollers using same name (" + executorInfo.getExecutorGroup() + ")"); } continue; } if (status != 1 && status != Statement.SUCCESS_NO_INFO) { throw new PollingRaceConditionException("Race condition in polling workflow instances detected. " + "Multiple pollers using same name (" + executorInfo.getExecutorGroup() + ")"); } } } });
private int insertWorkflowInstanceWithCte(WorkflowInstance instance) { try { StringBuilder sqlb = new StringBuilder(256); sqlb.append("with wf as (" + insertWorkflowInstanceSql() + " returning id)"); Object[] instanceValues = new Object[] { instance.type, instance.rootWorkflowId, instance.parentWorkflowId, instance.parentActionId, instance.businessKey, instance.externalId, executorInfo.getExecutorGroup(), instance.status.name(), instance.state, abbreviate(instance.stateText, instanceStateTextLength), toTimestamp(instance.nextActivation) }; int pos = instanceValues.length; Object[] args = Arrays.copyOf(instanceValues, pos + instance.stateVariables.size() * 2); for (Entry<String, String> var : instance.stateVariables.entrySet()) { sqlb.append(", ins").append(pos).append(" as (").append(insertWorkflowInstanceStateSql()) .append(" select wf.id,0,?,? from wf)"); args[pos++] = var.getKey(); args[pos++] = var.getValue(); } sqlb.append(" select wf.id from wf"); return jdbc.queryForObject(sqlb.toString(), Integer.class, args); } catch (DuplicateKeyException e) { logger.warn("Failed to insert workflow instance", e); return -1; } }
public Map<String, Map<String, WorkflowDefinitionStatistics>> getWorkflowDefinitionStatistics(String type, DateTime createdAfter, DateTime createdBefore, DateTime modifiedAfter, DateTime modifiedBefore) { String executorGroup = executorInfo.getExecutorGroup(); List<Object> argsList = new ArrayList<>(); argsList.addAll(asList(executorGroup, type));
public void storeWorkflowDefinition(AbstractWorkflowDefinition<? extends WorkflowState> definition) { StoredWorkflowDefinition storedDefinition = convert(definition); MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue("type", definition.getType()); String serializedDefinition = serializeDefinition(storedDefinition); params.addValue("definition_sha1", sha1(serializedDefinition)); params.addValue("definition", serializedDefinition, sqlVariants.longTextType()); params.addValue("modified_by", executorInfo.getExecutorId()); params.addValue("executor_group", executorInfo.getExecutorGroup()); String sql = "update nflow_workflow_definition " + "set definition = :definition, modified_by = :modified_by, definition_sha1 = :definition_sha1 " + "where type = :type and executor_group = :executor_group and definition_sha1 <> :definition_sha1"; int updatedRows = namedJdbc.update(sql, params); if (updatedRows == 0) { sql = "insert into nflow_workflow_definition(type, definition_sha1, definition, modified_by, executor_group) " + "values (:type, :definition_sha1, :definition, :modified_by, :executor_group)"; try { namedJdbc.update(sql, params); } catch (DataIntegrityViolationException dex) { logger.debug("Another executor already stored the definition.", dex); } } }