private List<Backfill> backfillsForQuery(EntityQuery query) throws IOException { final List<Backfill> backfills = Lists.newArrayList(); datastore.query(query, entity -> backfills.add(entityToBackfill(entity))); return backfills; }
private Map<WorkflowInstance, RunState> queryActiveStates(EntityQuery activeStatesQuery) throws IOException { final ImmutableMap.Builder<WorkflowInstance, RunState> mapBuilder = ImmutableMap.builder(); datastore.query(activeStatesQuery, entity -> { final WorkflowInstance instance = parseWorkflowInstance(entity); mapBuilder.put(instance, entityToRunState(entity, instance)); }); return mapBuilder.build(); }
List<Resource> getResources() throws IOException { final EntityQuery query = Query.newEntityQueryBuilder().setKind(KIND_COUNTER_LIMIT).build(); final List<Resource> resources = Lists.newArrayList(); datastore.query(query, entity -> resources.add(entityToResource(entity))); return resources; }
public Map<WorkflowId, Workflow> workflows() throws IOException { final Map<WorkflowId, Workflow> map = Maps.newHashMap(); final EntityQuery query = Query.newEntityQueryBuilder().setKind(KIND_WORKFLOW).build(); datastore.query(query, entity -> { final Workflow workflow; try { workflow = OBJECT_MAPPER.readValue(entity.getString(PROPERTY_WORKFLOW_JSON), Workflow.class); } catch (IOException e) { LOG.warn("Failed to read workflow {}.", entity.getKey(), e); return; } map.put(workflow.id(), workflow); }); return map; }
Set<WorkflowId> enabled() throws IOException { final EntityQuery queryWorkflows = EntityQuery.newEntityQueryBuilder().setKind(KIND_WORKFLOW).build(); final Set<WorkflowId> enabledWorkflows = Sets.newHashSet(); datastore.query(queryWorkflows, workflow -> { final boolean enabled = workflow.contains(PROPERTY_WORKFLOW_ENABLED) && workflow.getBoolean(PROPERTY_WORKFLOW_ENABLED); if (enabled) { enabledWorkflows.add(parseWorkflowId(workflow)); } }); return enabledWorkflows; }
/** * Strongly consistently read all active states */ Map<WorkflowInstance, RunState> readActiveStates() throws IOException { // Strongly read active state keys from index shards in parallel final List<Key> keys = gatherIO(activeWorkflowInstanceIndexShardKeys(datastore.newKeyFactory()).stream() .map(key -> asyncIO(() -> datastore.query(Query.newEntityQueryBuilder() .setFilter(PropertyFilter.hasAncestor(key)) .setKind(KIND_ACTIVE_WORKFLOW_INSTANCE_INDEX_SHARD_ENTRY) .build()))) .collect(toList()), 30, TimeUnit.SECONDS) .stream() .flatMap(Collection::stream) .map(entity -> entity.getKey().getName()) .map(name -> activeWorkflowInstanceKey(datastore.newKeyFactory(), name)) .collect(toList()); // Strongly consistently read values for the above keys in parallel return gatherIO(Lists.partition(keys, MAX_NUMBER_OF_ENTITIES_IN_ONE_BATCH_READ).stream() .map(batch -> asyncIO(() -> readRunStateBatch(batch))) .collect(toList()), 30, TimeUnit.SECONDS) .stream() .flatMap(Collection::stream) .collect(toMap(RunState::workflowInstance, Function.identity())); }
private void deleteShardsForCounter(String counterId) throws IOException { final List<Key> shards = new ArrayList<>(); datastore.query(EntityQuery.newEntityQueryBuilder() .setKind(KIND_COUNTER_SHARD) .setFilter(PropertyFilter.eq(PROPERTY_COUNTER_ID, counterId)) .build(), entity -> shards.add(entity.getKey())); // this is a safe guard to not to exceed max number of entities in one batch write // because in practice number of shards is much smaller for (List<Key> batch : Lists.partition(shards, MAX_NUMBER_OF_ENTITIES_IN_ONE_BATCH_WRITE)) { storeWithRetries(() -> { datastore.delete(batch.toArray(new Key[0])); return null; }); } }
public List<Workflow> workflows(String componentId) throws IOException { final Key componentKey = componentKey(datastore.newKeyFactory(), componentId); final List<Workflow> workflows = Lists.newArrayList(); final EntityQuery query = Query.newEntityQueryBuilder() .setKind(KIND_WORKFLOW) .setFilter(PropertyFilter.hasAncestor(componentKey)) .build(); datastore.query(query, entity -> { final Workflow workflow; if (entity.contains(PROPERTY_WORKFLOW_JSON)) { try { workflow = OBJECT_MAPPER.readValue(entity.getString(PROPERTY_WORKFLOW_JSON), Workflow.class); } catch (IOException e) { LOG.warn("Failed to read workflow {}.", entity.getKey(), e); return; } workflows.add(workflow); } }); return workflows; }
final EntityQuery query = Query.newEntityQueryBuilder().setKind(KIND_WORKFLOW).build(); datastore.query(query, entity -> { final Workflow workflow; try {