/** Example of running a query to find all entities with a matching property value. */ // [TARGET run(Query, ReadOption...)] // [VARIABLE "my_kind"] // [VARIABLE "my_property"] // [VARIABLE "my_value"] public List<Entity> runQueryOnProperty(String kind, String property, String value) { // TODO change so that it's not necessary to hold the entities in a list for integration testing // [START runQueryOnProperty] StructuredQuery<Entity> query = Query.newEntityQueryBuilder() .setKind(kind) .setFilter(PropertyFilter.eq(property, value)) .build(); QueryResults<Entity> results = datastore.run(query); List<Entity> entities = Lists.newArrayList(); while (results.hasNext()) { Entity result = results.next(); // do something with result entities.add(result); } // [END runQueryOnProperty] return entities; } }
/** * Finds the ID of the first {@link PortabilityJob} in state {@code jobState} in Datastore, or * null if none found. * * <p>TODO(rtannenbaum): Order by creation time so we can process jobs in a FIFO manner. Trying * to OrderBy.asc("created") currently fails because we don't yet have an index set up. */ @Override public UUID findFirst(JobAuthorization.State jobState) { Query<Key> query = Query.newKeyQueryBuilder() .setKind(KIND) .setFilter(PropertyFilter.eq(PortabilityJob.AUTHORIZATION_STATE, jobState.name())) // .setOrderBy(OrderBy.asc("created")) .setLimit(1) .build(); QueryResults<Key> results = datastore.run(query); if (!results.hasNext()) { return null; } Key key = results.next(); return UUID.fromString(key.getName()); }
Query.newEntityQueryBuilder() .setKind("Person") .setFilter(PropertyFilter.eq("favorite_food", "pizza")) .build(); QueryResults<Entity> results = datastore.run(query);
.setFilter(PropertyFilter.eq(_model.getId(), s)) .build(); QueryResults<Key> res = _datastore.run(q);
break; case SIMPLE_PROPERTY: filter = PropertyFilter.eq(fieldName, converter.convertOnWriteSingle(it.next())); equalityComparedFields.add(fieldName);
.setKind(_model.getKind()) .setProjection(_model.getExpiry()) .setFilter(CompositeFilter.and(PropertyFilter.eq(_model.getId(), id), PropertyFilter.eq(_model.getContextPath(), _context.getCanonicalContextPath()), PropertyFilter.eq(_model.getVhost(), _context.getVhost()))) .setFilter(CompositeFilter.and(PropertyFilter.eq(_model.getId(), id), PropertyFilter.eq(_model.getContextPath(), _context.getCanonicalContextPath()), PropertyFilter.eq(_model.getVhost(), _context.getVhost())))
break; case SIMPLE_PROPERTY: filter = PropertyFilter.eq(fieldName, converter.convertOnWriteSingle(it.next())); equalityComparedFields.add(fieldName);
filters.add(StructuredQuery.PropertyFilter.eq(fieldName, value));
/** * Check to see if indexes are available, in which case * we can do more performant queries. * @return <code>true</code> if indexes are available */ protected boolean checkIndexes() { try { Query<ProjectionEntity> query = Query.newProjectionEntityQueryBuilder() .setKind(_model.getKind()) .setProjection(_model.getExpiry()) .setFilter(PropertyFilter.eq(_model.getId(), "-")) .build(); _datastore.run(query); return true; } catch (DatastoreException e) { //need to assume that the problem is the index doesn't exist, because there //is no specific code for that if (LOG.isDebugEnabled()) LOG.debug("Check for indexes", e); return false; } }
private void deleteShardsForCounter(String counterId) throws IOException { final List<Key> shards = new ArrayList<>(); datastore.query(EntityQuery.newEntityQueryBuilder() .setKind(KIND_COUNTER_SHARD) .setFilter(PropertyFilter.eq(PROPERTY_COUNTER_ID, counterId)) .build(), entity -> shards.add(entity.getKey())); // this is a safe guard to not to exceed max number of entities in one batch write // because in practice number of shards is much smaller for (List<Key> batch : Lists.partition(shards, MAX_NUMBER_OF_ENTITIES_IN_ONE_BATCH_WRITE)) { storeWithRetries(() -> { datastore.delete(batch.toArray(new Key[0])); return null; }); } }
private EntityQuery.Builder backfillQueryBuilder(boolean showAll, Filter... filters) { final EntityQuery.Builder queryBuilder = Query.newEntityQueryBuilder().setKind(KIND_BACKFILL); final List<Filter> andedFilters = Lists.newArrayList(filters); if (!showAll) { andedFilters.add(PropertyFilter.eq(PROPERTY_ALL_TRIGGERED, false)); andedFilters.add(PropertyFilter.eq(PROPERTY_HALTED, false)); } if (!andedFilters.isEmpty()) { final Filter head = andedFilters.get(0); final Filter[] tail = andedFilters.stream().skip(1).toArray(Filter[]::new); queryBuilder.setFilter(CompositeFilter.and(head, tail)); } return queryBuilder; }
List<Backfill> getBackfillsForWorkflowId(boolean showAll, WorkflowId workflowId) throws IOException { final EntityQuery query = backfillQueryBuilder( showAll, PropertyFilter.eq(PROPERTY_COMPONENT, workflowId.componentId()), PropertyFilter.eq(PROPERTY_WORKFLOW, workflowId.id())) .build(); return backfillsForQuery(query); }
public Map<WorkflowInstance, RunState> activeStatesByTriggerId( String triggerId) throws IOException { final EntityQuery query = Query.newEntityQueryBuilder().setKind(KIND_ACTIVE_WORKFLOW_INSTANCE) .setFilter(PropertyFilter.eq(PROPERTY_STATE_TRIGGER_ID, triggerId)) .build(); return queryActiveStates(query); }
List<Backfill> getBackfillsForWorkflow(boolean showAll, String workflow) throws IOException { final EntityQuery query = backfillQueryBuilder(showAll, PropertyFilter.eq(PROPERTY_WORKFLOW, workflow)) .build(); return backfillsForQuery(query); }
Map<WorkflowInstance, RunState> readActiveStates(String componentId) throws IOException { final EntityQuery query = Query.newEntityQueryBuilder().setKind(KIND_ACTIVE_WORKFLOW_INSTANCE) .setFilter(PropertyFilter.eq(PROPERTY_COMPONENT, componentId)) .build(); return queryActiveStates(query); }
List<Backfill> getBackfillsForComponent(boolean showAll, String component) throws IOException { final EntityQuery query = backfillQueryBuilder(showAll, PropertyFilter.eq(PROPERTY_COMPONENT, component)) .build(); return backfillsForQuery(query); }