Entity johnEntity = datastore.get(key); Entity.newBuilder(janeKey) .set("name", "Jane Doe") .set("age", 44) Key joeKey = keyFactory.newKey("joe.shmoe@gmail.com"); Entity joeEntity = Entity.newBuilder(joeKey) .set("name", "Joe Shmoe") .set("age", 27) Query.newEntityQueryBuilder() .setKind("Person") .setFilter(PropertyFilter.eq("favorite_food", "pizza")) .build(); QueryResults<Entity> results = datastore.run(query); while (results.hasNext()) { Entity currentEntity = results.next(); System.out.println(currentEntity.getString("name") + ", you're invited to a pizza party!");
return; "User '%s' email is '%s', phone is '%s'.%n", userKey.getName(), email, phone); System.out.printf("User '%s' has %d comment[s].%n", userKey.getName(), user.getLong("count")); int limit = 200; Map<Timestamp, String> sortedComments = new TreeMap<>(); StructuredQuery<Entity> query = Query.newEntityQueryBuilder() .setNamespace(NAMESPACE) .setKind(COMMENT_KIND) .setFilter(PropertyFilter.hasAncestor(userKey)) .setLimit(limit) .build(); while (true) { QueryResults<Entity> results = tx.run(query); int resultCount = 0; while (results.hasNext()) { Entity result = results.next(); sortedComments.put(result.getTimestamp("timestamp"), result.getString("content")); resultCount++; break; query = query.toBuilder().setStartCursor(results.getCursorAfter()).build();
@Test public void testRunStructuredQuery() { Query<Entity> query = Query.newEntityQueryBuilder().setKind(KIND1).setOrderBy(OrderBy.asc("__key__")).build(); QueryResults<Entity> results1 = datastore.run(query); assertTrue(results1.hasNext()); assertEquals(ENTITY1, results1.next()); assertFalse(results1.hasNext()); Query<Key> keyOnlyQuery = Query.newKeyQueryBuilder().setKind(KIND1).build(); QueryResults<Key> results2 = datastore.run(keyOnlyQuery); assertTrue(results2.hasNext()); assertEquals(ENTITY1.getKey(), results2.next()); assertFalse(results2.hasNext()); .setKind(KIND2) .setProjection("age") .setFilter(PropertyFilter.gt("age", 18)) .setDistinctOn("age") .setOrderBy(OrderBy.asc("age")) assertTrue(results4.hasNext()); ProjectionEntity entity = results4.next(); assertEquals(ENTITY2.getKey(), entity.getKey()); assertEquals(20, entity.getLong("age")); assertEquals(1, entity.getProperties().size());
@Test public void testTransactionWithQuery() throws InterruptedException { Query<Entity> query = Query.newEntityQueryBuilder() .setKind(KIND2) .setFilter(PropertyFilter.hasAncestor(KEY2)) .setNamespace(NAMESPACE) .build(); Transaction transaction = DATASTORE.newTransaction(); QueryResults<Entity> results = transaction.run(query); assertTrue(results.hasNext()); assertEquals(ENTITY2, results.next()); assertFalse(results.hasNext()); transaction.add(ENTITY3); transaction.commit(); assertEquals(ENTITY3, DATASTORE.get(KEY3)); transaction = DATASTORE.newTransaction(); results = transaction.run(query); assertTrue(results.hasNext()); assertEquals(ENTITY2, results.next()); assertFalse(results.hasNext()); transaction.delete(ENTITY3.getKey()); DATASTORE.put(Entity.newBuilder(ENTITY2).clear().build()); try { transaction.commit();
/** Example of running a query to find all entities of one kind. */ // [TARGET run(Query, ReadOption...)] // [VARIABLE "my_kind"] public List<Entity> runQuery(String kind) { // TODO change so that it's not necessary to hold the entities in a list for integration testing // [START runQuery] StructuredQuery<Entity> query = Query.newEntityQueryBuilder().setKind(kind).build(); QueryResults<Entity> results = datastore.run(query); List<Entity> entities = Lists.newArrayList(); while (results.hasNext()) { Entity result = results.next(); // do something with result entities.add(result); } // [END runQuery] return entities; }
Query<Entity> query = Query.newEntityQueryBuilder() .setKind(_model.getKind()) .setFilter(CompositeFilter.and(PropertyFilter.gt(_model.getExpiry(), 0), PropertyFilter.le(_model.getExpiry(), System.currentTimeMillis()))) .setLimit(_maxResults) .build(); results = _datastore.run(query); LOG.debug("Expiry query no index in {}ms", System.currentTimeMillis()-start); results = _datastore.run(query); while (results.hasNext()) Entity entity = results.next(); info.add(new ExpiryInfo(entity.getString(_model.getId()),entity.getString(_model.getLastNode()), entity.getLong(_model.getExpiry())));
/** Example of running a query to find all entities with an ancestor. */ // [TARGET run(Query)] // [VARIABLE "my_parent_key_name"] public List<Entity> run(String parentKeyName) { Datastore datastore = transaction.getDatastore(); // [START run] KeyFactory keyFactory = datastore.newKeyFactory().setKind("ParentKind"); Key parentKey = keyFactory.newKey(parentKeyName); // Build a query Query<Entity> query = Query.newEntityQueryBuilder() .setKind("MyKind") .setFilter(PropertyFilter.hasAncestor(parentKey)) .build(); QueryResults<Entity> results = transaction.run(query); List<Entity> entities = Lists.newArrayList(); while (results.hasNext()) { Entity result = results.next(); // do something with result entities.add(result); } transaction.commit(); // [END run] return entities; }
public List<Workflow> workflows(String componentId) throws IOException { final Key componentKey = componentKey(datastore.newKeyFactory(), componentId); final List<Workflow> workflows = Lists.newArrayList(); final EntityQuery query = Query.newEntityQueryBuilder() .setKind(KIND_WORKFLOW) .setFilter(PropertyFilter.hasAncestor(componentKey)) .build(); datastore.query(query, entity -> { final Workflow workflow; if (entity.contains(PROPERTY_WORKFLOW_JSON)) { try { workflow = OBJECT_MAPPER.readValue(entity.getString(PROPERTY_WORKFLOW_JSON), Workflow.class); } catch (IOException e) { LOG.warn("Failed to read workflow {}.", entity.getKey(), e); return; } workflows.add(workflow); } }); return workflows; }
presults = _datastore.run(query); LOG.debug("Exists query by index in {}ms", System.currentTimeMillis()-start); presults = _datastore.run(query); if (presults.hasNext()) ProjectionEntity pe = presults.next(); return !isExpired(pe.getLong(_model.getExpiry())); Query<Entity> query = Query.newEntityQueryBuilder() .setKind(_model.getKind()) .setFilter(CompositeFilter.and(PropertyFilter.eq(_model.getId(), id), PropertyFilter.eq(_model.getContextPath(), _context.getCanonicalContextPath()), PropertyFilter.eq(_model.getVhost(), _context.getVhost()))) .build(); results = _datastore.run(query); LOG.debug("Exists query no index in {}ms", System.currentTimeMillis()-start); results = _datastore.run(query); if (results.hasNext()) Entity entity = results.next(); return !isExpired(entity.getLong(_model.getExpiry()));
@Test public void testRunGqlQueryNoCasting() throws InterruptedException { Query<Entity> query1 = Query.newGqlQueryBuilder(ResultType.ENTITY, "select * from " + KIND1) .setNamespace(NAMESPACE) .build(); Query<Entity> scQuery1 = Query.newEntityQueryBuilder() .setNamespace(NAMESPACE) .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .build(); assertFalse(results1.hasNext()); .setNamespace(NAMESPACE) .setKind(KIND2) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setOrderBy(OrderBy.asc("__key__")) .build(); .setNamespace(NAMESPACE) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setKind("bla") .build(); results1 = getStronglyConsistentResults(scQuery1, query1); assertFalse(results1.hasNext());
Query<Entity> query1 = (Query<Entity>) Query.newGqlQueryBuilder("select * from " + KIND1).setNamespace(NAMESPACE).build(); Query<Entity> scQuery1 = Query.newEntityQueryBuilder() .setNamespace(NAMESPACE) .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .build(); Iterator<Entity> results1 = getStronglyConsistentResults(scQuery1, query1); assertTrue(results1.hasNext()); Query.newGqlQueryBuilder("select * from " + KIND1).setNamespace(NAMESPACE).build(); QueryResults<?> results2 = DATASTORE.run(query2); assertSame(Entity.class, results2.getResultClass()); Query.newEntityQueryBuilder() .setNamespace(NAMESPACE) .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .build();
private List<RunQueryResponse> buildResponsesForQueryPaginationWithLimit() { Entity entity4 = Entity.newBuilder(KEY4).set("value", StringValue.of("value")).build(); Entity entity5 = Entity.newBuilder(KEY5).set("value", "value").build(); datastore.add(ENTITY3, entity4, entity5); DatastoreRpc datastoreRpc = datastore.getOptions().getDatastoreRpcV1(); List<RunQueryResponse> responses = new ArrayList<>(); Query<Entity> query = Query.newEntityQueryBuilder().build(); RunQueryRequest.Builder requestPb = RunQueryRequest.newBuilder(); query.populatePb(requestPb); QueryResultBatch queryResultBatchPb = RunQueryResponse.newBuilder()
@Test public void testRunStructuredQuery() throws InterruptedException { Query<Entity> query = Query.newEntityQueryBuilder().setKind(KIND1).setOrderBy(OrderBy.asc("__key__")).build(); Query.newEntityQueryBuilder() .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setOrderBy(OrderBy.asc("__key__")) .build(); assertFalse(results1.hasNext()); Query<Key> keyOnlyQuery = Query.newKeyQueryBuilder().setKind(KIND1).build(); .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .build(); assertEquals(ENTITY1.getKey(), results2.next()); assertFalse(results2.hasNext()); .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setProjection("__key__") .build();
public Map<WorkflowId, Workflow> workflows() throws IOException { final Map<WorkflowId, Workflow> map = Maps.newHashMap(); final EntityQuery query = Query.newEntityQueryBuilder().setKind(KIND_WORKFLOW).build(); datastore.query(query, entity -> { final Workflow workflow; try { workflow = OBJECT_MAPPER.readValue(entity.getString(PROPERTY_WORKFLOW_JSON), Workflow.class); } catch (IOException e) { LOG.warn("Failed to read workflow {}.", entity.getKey(), e); return; } map.put(workflow.id(), workflow); }); return map; }
/** Example of creating and running an entity query. */ // [TARGET newEntityQueryBuilder()] // [VARIABLE "my_kind"] public QueryResults<Entity> newEntityQuery(String kind) { // [START newEntityQuery] Query<Entity> query = Query.newEntityQueryBuilder().setKind(kind).build(); QueryResults<Entity> results = datastore.run(query); // Use results // [END newEntityQuery] return results; }
private EntityQuery.Builder backfillQueryBuilder(boolean showAll, Filter... filters) { final EntityQuery.Builder queryBuilder = Query.newEntityQueryBuilder().setKind(KIND_BACKFILL); final List<Filter> andedFilters = Lists.newArrayList(filters); if (!showAll) { andedFilters.add(PropertyFilter.eq(PROPERTY_ALL_TRIGGERED, false)); andedFilters.add(PropertyFilter.eq(PROPERTY_HALTED, false)); } if (!andedFilters.isEmpty()) { final Filter head = andedFilters.get(0); final Filter[] tail = andedFilters.stream().skip(1).toArray(Filter[]::new); queryBuilder.setFilter(CompositeFilter.and(head, tail)); } return queryBuilder; }
/** * Strongly consistently read all active states */ Map<WorkflowInstance, RunState> readActiveStates() throws IOException { // Strongly read active state keys from index shards in parallel final List<Key> keys = gatherIO(activeWorkflowInstanceIndexShardKeys(datastore.newKeyFactory()).stream() .map(key -> asyncIO(() -> datastore.query(Query.newEntityQueryBuilder() .setFilter(PropertyFilter.hasAncestor(key)) .setKind(KIND_ACTIVE_WORKFLOW_INSTANCE_INDEX_SHARD_ENTRY) .build()))) .collect(toList()), 30, TimeUnit.SECONDS) .stream() .flatMap(Collection::stream) .map(entity -> entity.getKey().getName()) .map(name -> activeWorkflowInstanceKey(datastore.newKeyFactory(), name)) .collect(toList()); // Strongly consistently read values for the above keys in parallel return gatherIO(Lists.partition(keys, MAX_NUMBER_OF_ENTITIES_IN_ONE_BATCH_READ).stream() .map(batch -> asyncIO(() -> readRunStateBatch(batch))) .collect(toList()), 30, TimeUnit.SECONDS) .stream() .flatMap(Collection::stream) .collect(toMap(RunState::workflowInstance, Function.identity())); }
Set<WorkflowId> enabled() throws IOException { final EntityQuery queryWorkflows = EntityQuery.newEntityQueryBuilder().setKind(KIND_WORKFLOW).build(); final Set<WorkflowId> enabledWorkflows = Sets.newHashSet(); datastore.query(queryWorkflows, workflow -> { final boolean enabled = workflow.contains(PROPERTY_WORKFLOW_ENABLED) && workflow.getBoolean(PROPERTY_WORKFLOW_ENABLED); if (enabled) { enabledWorkflows.add(parseWorkflowId(workflow)); } }); return enabledWorkflows; }
private void deleteShardsForCounter(String counterId) throws IOException { final List<Key> shards = new ArrayList<>(); datastore.query(EntityQuery.newEntityQueryBuilder() .setKind(KIND_COUNTER_SHARD) .setFilter(PropertyFilter.eq(PROPERTY_COUNTER_ID, counterId)) .build(), entity -> shards.add(entity.getKey())); // this is a safe guard to not to exceed max number of entities in one batch write // because in practice number of shards is much smaller for (List<Key> batch : Lists.partition(shards, MAX_NUMBER_OF_ENTITIES_IN_ONE_BATCH_WRITE)) { storeWithRetries(() -> { datastore.delete(batch.toArray(new Key[0])); return null; }); } }
@Test public void mergeFrom() { compareMergedQuery( ENTITY_QUERY, new EntityQuery.Builder().mergeFrom(ENTITY_QUERY.toPb()).build()); compareMergedQuery(KEY_QUERY, new KeyQuery.Builder().mergeFrom(KEY_QUERY.toPb()).build()); compareMergedQuery( PROJECTION_QUERY, new ProjectionEntityQuery.Builder().mergeFrom(PROJECTION_QUERY.toPb()).build()); }