.setNamespace(NAMESPACE) .setKind(COMMENT_KIND) .setFilter(PropertyFilter.hasAncestor(userKey)) .setLimit(limit) .build();
/** Example of running a query to find all entities with a matching property value. */ // [TARGET run(Query, ReadOption...)] // [VARIABLE "my_kind"] // [VARIABLE "my_property"] // [VARIABLE "my_value"] public List<Entity> runQueryOnProperty(String kind, String property, String value) { // TODO change so that it's not necessary to hold the entities in a list for integration testing // [START runQueryOnProperty] StructuredQuery<Entity> query = Query.newEntityQueryBuilder() .setKind(kind) .setFilter(PropertyFilter.eq(property, value)) .build(); QueryResults<Entity> results = datastore.run(query); List<Entity> entities = Lists.newArrayList(); while (results.hasNext()) { Entity result = results.next(); // do something with result entities.add(result); } // [END runQueryOnProperty] return entities; } }
.setFilter(CompositeFilter.and(PropertyFilter.eq(_model.getId(), id), PropertyFilter.eq(_model.getContextPath(), _context.getCanonicalContextPath()), PropertyFilter.eq(_model.getVhost(), _context.getVhost())))
.setFilter(CompositeFilter.and(PropertyFilter.gt(_model.getExpiry(), 0), PropertyFilter.le(_model.getExpiry(), System.currentTimeMillis()))) .setLimit(_maxResults) .build();
private <T> void resolveDescendantProperties(DatastorePersistentEntity datastorePersistentEntity, BaseEntity entity, T convertedObject) { datastorePersistentEntity .doWithDescendantProperties((descendantPersistentProperty) -> { Class descendantType = descendantPersistentProperty .getComponentType(); EntityQuery descendantQuery = Query.newEntityQueryBuilder() .setKind(this.datastoreMappingContext .getPersistentEntity(descendantType).kindName()) .setFilter(PropertyFilter.hasAncestor((Key) entity.getKey())) .build(); datastorePersistentEntity.getPropertyAccessor(convertedObject) .setProperty(descendantPersistentProperty, // Converting the collection type. this.datastoreEntityConverter.getConversions() .convertOnRead( convertEntitiesForRead( getDatastoreReadWriter() .run(descendantQuery), descendantType), descendantPersistentProperty .getType(), descendantType)); }); }
/** * Strongly consistently read all active states */ Map<WorkflowInstance, RunState> readActiveStates() throws IOException { // Strongly read active state keys from index shards in parallel final List<Key> keys = gatherIO(activeWorkflowInstanceIndexShardKeys(datastore.newKeyFactory()).stream() .map(key -> asyncIO(() -> datastore.query(Query.newEntityQueryBuilder() .setFilter(PropertyFilter.hasAncestor(key)) .setKind(KIND_ACTIVE_WORKFLOW_INSTANCE_INDEX_SHARD_ENTRY) .build()))) .collect(toList()), 30, TimeUnit.SECONDS) .stream() .flatMap(Collection::stream) .map(entity -> entity.getKey().getName()) .map(name -> activeWorkflowInstanceKey(datastore.newKeyFactory(), name)) .collect(toList()); // Strongly consistently read values for the above keys in parallel return gatherIO(Lists.partition(keys, MAX_NUMBER_OF_ENTITIES_IN_ONE_BATCH_READ).stream() .map(batch -> asyncIO(() -> readRunStateBatch(batch))) .collect(toList()), 30, TimeUnit.SECONDS) .stream() .flatMap(Collection::stream) .collect(toMap(RunState::workflowInstance, Function.identity())); }
private <T> void resolveDescendantProperties(DatastorePersistentEntity datastorePersistentEntity, BaseEntity entity, T convertedObject) { datastorePersistentEntity .doWithDescendantProperties((descendantPersistentProperty) -> { Class descendantType = descendantPersistentProperty .getComponentType(); EntityQuery descendantQuery = Query.newEntityQueryBuilder() .setKind(this.datastoreMappingContext .getPersistentEntity(descendantType).kindName()) .setFilter(PropertyFilter.hasAncestor((Key) entity.getKey())) .build(); datastorePersistentEntity.getPropertyAccessor(convertedObject) .setProperty(descendantPersistentProperty, // Converting the collection type. this.datastoreEntityConverter.getConversions() .convertOnRead( convertEntitiesForRead( getDatastoreReadWriter() .run(descendantQuery), descendantType), descendantPersistentProperty .getType(), descendantType)); }); }
public List<Workflow> workflows(String componentId) throws IOException { final Key componentKey = componentKey(datastore.newKeyFactory(), componentId); final List<Workflow> workflows = Lists.newArrayList(); final EntityQuery query = Query.newEntityQueryBuilder() .setKind(KIND_WORKFLOW) .setFilter(PropertyFilter.hasAncestor(componentKey)) .build(); datastore.query(query, entity -> { final Workflow workflow; if (entity.contains(PROPERTY_WORKFLOW_JSON)) { try { workflow = OBJECT_MAPPER.readValue(entity.getString(PROPERTY_WORKFLOW_JSON), Workflow.class); } catch (IOException e) { LOG.warn("Failed to read workflow {}.", entity.getKey(), e); return; } workflows.add(workflow); } }); return workflows; }
private EntityQuery.Builder backfillQueryBuilder(boolean showAll, Filter... filters) { final EntityQuery.Builder queryBuilder = Query.newEntityQueryBuilder().setKind(KIND_BACKFILL); final List<Filter> andedFilters = Lists.newArrayList(filters); if (!showAll) { andedFilters.add(PropertyFilter.eq(PROPERTY_ALL_TRIGGERED, false)); andedFilters.add(PropertyFilter.eq(PROPERTY_HALTED, false)); } if (!andedFilters.isEmpty()) { final Filter head = andedFilters.get(0); final Filter[] tail = andedFilters.stream().skip(1).toArray(Filter[]::new); queryBuilder.setFilter(CompositeFilter.and(head, tail)); } return queryBuilder; }
private void deleteShardsForCounter(String counterId) throws IOException { final List<Key> shards = new ArrayList<>(); datastore.query(EntityQuery.newEntityQueryBuilder() .setKind(KIND_COUNTER_SHARD) .setFilter(PropertyFilter.eq(PROPERTY_COUNTER_ID, counterId)) .build(), entity -> shards.add(entity.getKey())); // this is a safe guard to not to exceed max number of entities in one batch write // because in practice number of shards is much smaller for (List<Key> batch : Lists.partition(shards, MAX_NUMBER_OF_ENTITIES_IN_ONE_BATCH_WRITE)) { storeWithRetries(() -> { datastore.delete(batch.toArray(new Key[0])); return null; }); } }
public Map<WorkflowInstance, RunState> activeStatesByTriggerId( String triggerId) throws IOException { final EntityQuery query = Query.newEntityQueryBuilder().setKind(KIND_ACTIVE_WORKFLOW_INSTANCE) .setFilter(PropertyFilter.eq(PROPERTY_STATE_TRIGGER_ID, triggerId)) .build(); return queryActiveStates(query); }
Map<WorkflowInstance, RunState> readActiveStates(String componentId) throws IOException { final EntityQuery query = Query.newEntityQueryBuilder().setKind(KIND_ACTIVE_WORKFLOW_INSTANCE) .setFilter(PropertyFilter.eq(PROPERTY_COMPONENT, componentId)) .build(); return queryActiveStates(query); }
.setNamespace(NAMESPACE) .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .build(); .setNamespace(NAMESPACE) .setKind(KIND2) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setOrderBy(OrderBy.asc("__key__")) .build(); Query.newEntityQueryBuilder() .setNamespace(NAMESPACE) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setKind("bla") .build();
Query.newEntityQueryBuilder() .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setOrderBy(OrderBy.asc("__key__")) .build();
Query.newEntityQueryBuilder() .setKind("Person") .setFilter(PropertyFilter.eq("favorite_food", "pizza")) .build(); QueryResults<Entity> results = datastore.run(query);
Query.newEntityQueryBuilder() .setKind(KIND2) .setFilter(PropertyFilter.hasAncestor(KEY2)) .setNamespace(NAMESPACE) .build();
@Test public void testTransactionWithQuery() { Query<Entity> query = Query.newEntityQueryBuilder() .setKind(KIND2) .setFilter(PropertyFilter.hasAncestor(KEY2)) .build(); Transaction transaction = datastore.newTransaction(); QueryResults<Entity> results = transaction.run(query); assertEquals(ENTITY2, results.next()); assertFalse(results.hasNext()); transaction.add(ENTITY3); transaction.commit(); assertEquals(ENTITY3, datastore.get(KEY3)); transaction = datastore.newTransaction(); results = transaction.run(query); assertEquals(ENTITY2, results.next()); transaction.delete(ENTITY3.getKey()); // update entity2 during the transaction datastore.put(Entity.newBuilder(ENTITY2).clear().build()); try { transaction.commit(); fail("Expecting a failure"); } catch (DatastoreException expected) { assertEquals("ABORTED", expected.getReason()); } }
/** Example of running a query to find all entities with an ancestor. */ // [TARGET run(Query)] // [VARIABLE "my_parent_key_name"] public List<Entity> run(String parentKeyName) { Datastore datastore = transaction.getDatastore(); // [START run] KeyFactory keyFactory = datastore.newKeyFactory().setKind("ParentKind"); Key parentKey = keyFactory.newKey(parentKeyName); // Build a query Query<Entity> query = Query.newEntityQueryBuilder() .setKind("MyKind") .setFilter(PropertyFilter.hasAncestor(parentKey)) .build(); QueryResults<Entity> results = transaction.run(query); List<Entity> entities = Lists.newArrayList(); while (results.hasNext()) { Entity result = results.next(); // do something with result entities.add(result); } transaction.commit(); // [END run] return entities; }