@Override public Map<WorkflowInstance, RunState> readActiveStatesByTriggerId( String triggerId) throws IOException { return datastoreStorage.activeStatesByTriggerId(triggerId); }
static Key activeWorkflowInstanceIndexShardEntryKey(KeyFactory keyFactory, WorkflowInstance workflowInstance) { final String workflowInstanceKey = workflowInstance.toKey(); return activeWorkflowInstanceIndexShardEntryKey(keyFactory, workflowInstanceKey); }
Optional<RunState> readActiveState(WorkflowInstance instance) throws IOException { final Entity entity = datastore.get(activeWorkflowInstanceKey(instance)); if (entity == null) { return Optional.empty(); } else { return Optional.of(entityToRunState(entity, instance)); } }
StyxConfig config() throws IOException { final Entity entity = asBuilderOrNew( getOpt(datastore, globalConfigKey(datastore.newKeyFactory())), globalConfigKey(datastore.newKeyFactory())) .build(); return entityToConfig(entity); }
List<Backfill> getBackfills(boolean showAll) throws IOException { return backfillsForQuery(backfillQueryBuilder(showAll).build()); }
@Override public WorkflowId storeWorkflowWithNextNaturalTrigger(Workflow workflow, TriggerInstantSpec triggerSpec) throws IOException { final Key componentKey = DatastoreStorage.componentKey(tx.getDatastore().newKeyFactory(), workflow.componentId()); if (tx.get(componentKey) == null) { tx.put(Entity.newBuilder(componentKey).build()); } final String json = OBJECT_MAPPER.writeValueAsString(workflow); final Key workflowKey = DatastoreStorage.workflowKey(tx.getDatastore().newKeyFactory(), workflow.id()); final Optional<Entity> workflowOpt = DatastoreStorage.getOpt(tx, workflowKey); final Builder entity = DatastoreStorage.asBuilderOrNew(workflowOpt, workflowKey) .set(PROPERTY_WORKFLOW_JSON, StringValue.newBuilder(json).setExcludeFromIndexes(true).build()) .set(PROPERTY_NEXT_NATURAL_TRIGGER, instantToTimestamp(triggerSpec.instant())) .set(PROPERTY_NEXT_NATURAL_OFFSET_TRIGGER, instantToTimestamp(triggerSpec.offsetInstant())); tx.put(entity.build()); return workflow.id(); }
/** * Strongly consistently read all active states */ Map<WorkflowInstance, RunState> readActiveStates() throws IOException { // Strongly read active state keys from index shards in parallel final List<Key> keys = gatherIO(activeWorkflowInstanceIndexShardKeys(datastore.newKeyFactory()).stream() .map(key -> asyncIO(() -> datastore.query(Query.newEntityQueryBuilder() .setFilter(PropertyFilter.hasAncestor(key)) .setKind(KIND_ACTIVE_WORKFLOW_INSTANCE_INDEX_SHARD_ENTRY) .build()))) .collect(toList()), 30, TimeUnit.SECONDS) .stream() .flatMap(Collection::stream) .map(entity -> entity.getKey().getName()) .map(name -> activeWorkflowInstanceKey(datastore.newKeyFactory(), name)) .collect(toList()); // Strongly consistently read values for the above keys in parallel return gatherIO(Lists.partition(keys, MAX_NUMBER_OF_ENTITIES_IN_ONE_BATCH_READ).stream() .map(batch -> asyncIO(() -> readRunStateBatch(batch))) .collect(toList()), 30, TimeUnit.SECONDS) .stream() .flatMap(Collection::stream) .collect(toMap(RunState::workflowInstance, Function.identity())); }
public Map<WorkflowId, Workflow> workflows(Set<WorkflowId> workflowIds) { final Iterable<List<WorkflowId>> batches = Iterables.partition(workflowIds, MAX_NUMBER_OF_ENTITIES_IN_ONE_BATCH_READ); return StreamSupport.stream(batches.spliterator(), false) .map(batch -> asyncIO(() -> this.getBatchOfWorkflows(batch))) // `collect and stream` is crucial to make tasks running in parallel, otherwise they will // be processed sequentially. Without `collect`, it will try to submit and wait for each task // while iterating through the stream. This is somewhat subtle, so think twice. .collect(toList()) .stream() .flatMap(task -> task.join().stream()) .collect(toMap(Workflow::id, Function.identity())); }
@Override public Optional<Backfill> backfill(String id) throws IOException { final Key key = DatastoreStorage.backfillKey(tx.getDatastore().newKeyFactory(), id); final Entity entity = tx.get(key); if (entity == null) { return Optional.empty(); } return Optional.of(entityToBackfill(entity)); } }
@Override public WorkflowInstance deleteActiveState(WorkflowInstance instance) throws IOException { tx.delete(activeWorkflowInstanceIndexShardEntryKey(tx.getDatastore().newKeyFactory(), instance)); tx.delete(activeWorkflowInstanceKey(tx.getDatastore().newKeyFactory(), instance)); return instance; }
@Override public WorkflowInstance writeActiveState(WorkflowInstance instance, RunState state) throws IOException { // Note: the parent entity need not actually exist final Key indexEntryKey = activeWorkflowInstanceIndexShardEntryKey(tx.getDatastore().newKeyFactory(), instance); final Entity indexEntry = Entity.newBuilder(indexEntryKey).build(); tx.add(indexEntry); tx.add(runStateToEntity(tx.getDatastore().newKeyFactory(), instance, state)); return instance; }
@Override public Backfill store(Backfill backfill) throws IOException { final Key key = DatastoreStorage.backfillKey(tx.getDatastore().newKeyFactory(), backfill.id()); Entity.Builder builder = Entity.newBuilder(key) .set(PROPERTY_CONCURRENCY, backfill.concurrency()) .set(PROPERTY_START, instantToTimestamp(backfill.start())) .set(PROPERTY_END, instantToTimestamp(backfill.end())) .set(PROPERTY_COMPONENT, backfill.workflowId().componentId()) .set(PROPERTY_WORKFLOW, backfill.workflowId().id()) .set(PROPERTY_SCHEDULE, backfill.schedule().toString()) .set(PROPERTY_NEXT_TRIGGER, instantToTimestamp(backfill.nextTrigger())) .set(PROPERTY_ALL_TRIGGERED, backfill.allTriggered()) .set(PROPERTY_HALTED, backfill.halted()) .set(PROPERTY_REVERSE, backfill.reverse()); backfill.description().ifPresent(x -> builder.set(PROPERTY_DESCRIPTION, StringValue .newBuilder(x).setExcludeFromIndexes(true).build())); if (backfill.triggerParameters().isPresent()) { final String json = OBJECT_MAPPER.writeValueAsString(backfill.triggerParameters().get()); builder.set(PROPERTY_TRIGGER_PARAMETERS, StringValue.newBuilder(json).setExcludeFromIndexes(true).build()); } tx.put(builder.build()); return backfill; }
static Entity runStateToEntity(KeyFactory keyFactory, WorkflowInstance wfi, RunState state) throws JsonProcessingException { final Key key = activeWorkflowInstanceKey(keyFactory, wfi); final Entity.Builder entity = Entity.newBuilder(key) .set(PROPERTY_COMPONENT, wfi.workflowId().componentId()) .set(PROPERTY_STATE_RETRY_COST, state.data().retryCost()) .set(PROPERTY_STATE_MESSAGES, jsonValue(state.data().messages())); state.data().executionId().ifPresent(v -> entity.set(PROPERTY_STATE_EXECUTION_ID, v)); if (state.data().triggerParameters().isPresent()) { entity.set(PROPERTY_STATE_TRIGGER_PARAMETERS, jsonValue(state.data().triggerParameters().get())); entity.set(PROPERTY_STATE_EXECUTION_DESCRIPTION, jsonValue(state.data().executionDescription().get())); entity.set(PROPERTY_STATE_RESOURCE_IDS, jsonValue(state.data().resourceIds().get()));
private static String activeWorkflowInstanceIndexShardName(String workflowInstanceKey) { final long hash = Hashing.murmur3_32().hashString(workflowInstanceKey, StandardCharsets.UTF_8).asInt(); final long index = Long.remainderUnsigned(hash, ACTIVE_WORKFLOW_INSTANCE_INDEX_SHARDS); return activeWorkflowInstanceIndexShardName(index); }
static Key activeWorkflowInstanceKey(KeyFactory keyFactory, WorkflowInstance workflowInstance) { final String name = workflowInstance.toKey(); return activeWorkflowInstanceKey(keyFactory, name); }
public AggregateStorage(Connection connection, Datastore datastore, Duration retryBaseDelay) { this(new BigtableStorage(connection, retryBaseDelay), new DatastoreStorage(new CheckedDatastore(datastore), retryBaseDelay)); }
@Override public WorkflowId store(Workflow workflow) throws IOException { final Key componentKey = DatastoreStorage.componentKey(tx.getDatastore().newKeyFactory(), workflow.componentId()); if (tx.get(componentKey) == null) { tx.put(Entity.newBuilder(componentKey).build()); } final String json = OBJECT_MAPPER.writeValueAsString(workflow); final Key workflowKey = DatastoreStorage.workflowKey(tx.getDatastore().newKeyFactory(), workflow.id()); final Optional<Entity> workflowOpt = DatastoreStorage.getOpt(tx, workflowKey); final Entity workflowEntity = DatastoreStorage.asBuilderOrNew(workflowOpt, workflowKey) .set(PROPERTY_WORKFLOW_JSON, StringValue.newBuilder(json).setExcludeFromIndexes(true).build()) .build(); tx.put(workflowEntity); return workflow.id(); }
List<Backfill> getBackfillsForWorkflow(boolean showAll, String workflow) throws IOException { final EntityQuery query = backfillQueryBuilder(showAll, PropertyFilter.eq(PROPERTY_WORKFLOW, workflow)) .build(); return backfillsForQuery(query); }
private static Key activeWorkflowInstanceIndexShardEntryKey(KeyFactory keyFactory, String workflowInstanceKey) { return keyFactory.setKind(KIND_ACTIVE_WORKFLOW_INSTANCE_INDEX_SHARD_ENTRY) .addAncestor(PathElement.of(KIND_ACTIVE_WORKFLOW_INSTANCE_INDEX_SHARD, activeWorkflowInstanceIndexShardName(workflowInstanceKey))) .newKey(workflowInstanceKey); }
private Key activeWorkflowInstanceKey(WorkflowInstance workflowInstance) { return activeWorkflowInstanceKey(datastore.newKeyFactory(), workflowInstance); }