.setNamespace(NAMESPACE) .setKind(COMMENT_KIND) .setFilter(PropertyFilter.hasAncestor(userKey)) .setLimit(limit) .build();
@Override public void run(Transaction tx, Key userKey, Void arg) { Entity user = tx.get(userKey); if (user == null) { System.out.println("Nothing to delete, user does not exist."); return; } Query<Key> query = Query.newKeyQueryBuilder() .setNamespace(NAMESPACE) .setKind(COMMENT_KIND) .setFilter(PropertyFilter.hasAncestor(userKey)) .build(); QueryResults<Key> comments = tx.run(query); int count = 0; while (comments.hasNext()) { tx.delete(comments.next()); count++; } tx.delete(userKey); System.out.printf("Deleting user '%s' and %d comment[s].%n", userKey.getName(), count); }
.setNamespace(NAMESPACE) .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .build(); Iterator<Entity> results1 = getStronglyConsistentResults(scQuery1, query1); .setNamespace(NAMESPACE) .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .build();
private <T> void resolveDescendantProperties(DatastorePersistentEntity datastorePersistentEntity, BaseEntity entity, T convertedObject) { datastorePersistentEntity .doWithDescendantProperties((descendantPersistentProperty) -> { Class descendantType = descendantPersistentProperty .getComponentType(); EntityQuery descendantQuery = Query.newEntityQueryBuilder() .setKind(this.datastoreMappingContext .getPersistentEntity(descendantType).kindName()) .setFilter(PropertyFilter.hasAncestor((Key) entity.getKey())) .build(); datastorePersistentEntity.getPropertyAccessor(convertedObject) .setProperty(descendantPersistentProperty, // Converting the collection type. this.datastoreEntityConverter.getConversions() .convertOnRead( convertEntitiesForRead( getDatastoreReadWriter() .run(descendantQuery), descendantType), descendantPersistentProperty .getType(), descendantType)); }); }
/** * Strongly consistently read all active states */ Map<WorkflowInstance, RunState> readActiveStates() throws IOException { // Strongly read active state keys from index shards in parallel final List<Key> keys = gatherIO(activeWorkflowInstanceIndexShardKeys(datastore.newKeyFactory()).stream() .map(key -> asyncIO(() -> datastore.query(Query.newEntityQueryBuilder() .setFilter(PropertyFilter.hasAncestor(key)) .setKind(KIND_ACTIVE_WORKFLOW_INSTANCE_INDEX_SHARD_ENTRY) .build()))) .collect(toList()), 30, TimeUnit.SECONDS) .stream() .flatMap(Collection::stream) .map(entity -> entity.getKey().getName()) .map(name -> activeWorkflowInstanceKey(datastore.newKeyFactory(), name)) .collect(toList()); // Strongly consistently read values for the above keys in parallel return gatherIO(Lists.partition(keys, MAX_NUMBER_OF_ENTITIES_IN_ONE_BATCH_READ).stream() .map(batch -> asyncIO(() -> readRunStateBatch(batch))) .collect(toList()), 30, TimeUnit.SECONDS) .stream() .flatMap(Collection::stream) .collect(toMap(RunState::workflowInstance, Function.identity())); }
public List<Workflow> workflows(String componentId) throws IOException { final Key componentKey = componentKey(datastore.newKeyFactory(), componentId); final List<Workflow> workflows = Lists.newArrayList(); final EntityQuery query = Query.newEntityQueryBuilder() .setKind(KIND_WORKFLOW) .setFilter(PropertyFilter.hasAncestor(componentKey)) .build(); datastore.query(query, entity -> { final Workflow workflow; if (entity.contains(PROPERTY_WORKFLOW_JSON)) { try { workflow = OBJECT_MAPPER.readValue(entity.getString(PROPERTY_WORKFLOW_JSON), Workflow.class); } catch (IOException e) { LOG.warn("Failed to read workflow {}.", entity.getKey(), e); return; } workflows.add(workflow); } }); return workflows; }
private <T> void resolveDescendantProperties(DatastorePersistentEntity datastorePersistentEntity, BaseEntity entity, T convertedObject) { datastorePersistentEntity .doWithDescendantProperties((descendantPersistentProperty) -> { Class descendantType = descendantPersistentProperty .getComponentType(); EntityQuery descendantQuery = Query.newEntityQueryBuilder() .setKind(this.datastoreMappingContext .getPersistentEntity(descendantType).kindName()) .setFilter(PropertyFilter.hasAncestor((Key) entity.getKey())) .build(); datastorePersistentEntity.getPropertyAccessor(convertedObject) .setProperty(descendantPersistentProperty, // Converting the collection type. this.datastoreEntityConverter.getConversions() .convertOnRead( convertEntitiesForRead( getDatastoreReadWriter() .run(descendantQuery), descendantType), descendantPersistentProperty .getType(), descendantType)); }); }
/** Modifies the instance */ void setAncestor(final Object keyOrEntity) { final com.google.cloud.datastore.Key key = loader.ofy.factory().keys().anythingToRawKey(keyOrEntity); this.actual = this.actual.andFilter(PropertyFilter.hasAncestor(key)); }
.setNamespace(NAMESPACE) .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .build(); .setNamespace(NAMESPACE) .setKind(KIND2) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setOrderBy(OrderBy.asc("__key__")) .build(); Query.newEntityQueryBuilder() .setNamespace(NAMESPACE) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setKind("bla") .build(); Query.newKeyQueryBuilder() .setNamespace(NAMESPACE) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setKind(KIND1) .build(); .setNamespace(NAMESPACE) .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .build();
Query.newEntityQueryBuilder() .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setOrderBy(OrderBy.asc("__key__")) .build(); Query.newKeyQueryBuilder() .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .build(); Query.newProjectionEntityQueryBuilder() .setKind(KIND1) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setProjection("__key__") .build(); Query.newProjectionEntityQueryBuilder() .setKind(KIND2) .setFilter(PropertyFilter.hasAncestor(ROOT_KEY)) .setProjection("age") .setFilter(PropertyFilter.gt("age", 18))
Query.newEntityQueryBuilder() .setKind(KIND2) .setFilter(PropertyFilter.hasAncestor(KEY2)) .setNamespace(NAMESPACE) .build();
@Test public void testTransactionWithQuery() { Query<Entity> query = Query.newEntityQueryBuilder() .setKind(KIND2) .setFilter(PropertyFilter.hasAncestor(KEY2)) .build(); Transaction transaction = datastore.newTransaction(); QueryResults<Entity> results = transaction.run(query); assertEquals(ENTITY2, results.next()); assertFalse(results.hasNext()); transaction.add(ENTITY3); transaction.commit(); assertEquals(ENTITY3, datastore.get(KEY3)); transaction = datastore.newTransaction(); results = transaction.run(query); assertEquals(ENTITY2, results.next()); transaction.delete(ENTITY3.getKey()); // update entity2 during the transaction datastore.put(Entity.newBuilder(ENTITY2).clear().build()); try { transaction.commit(); fail("Expecting a failure"); } catch (DatastoreException expected) { assertEquals("ABORTED", expected.getReason()); } }
/** Example of running a query to find all entities with an ancestor. */ // [TARGET run(Query)] // [VARIABLE "my_parent_key_name"] public List<Entity> run(String parentKeyName) { Datastore datastore = transaction.getDatastore(); // [START run] KeyFactory keyFactory = datastore.newKeyFactory().setKind("ParentKind"); Key parentKey = keyFactory.newKey(parentKeyName); // Build a query Query<Entity> query = Query.newEntityQueryBuilder() .setKind("MyKind") .setFilter(PropertyFilter.hasAncestor(parentKey)) .build(); QueryResults<Entity> results = transaction.run(query); List<Entity> entities = Lists.newArrayList(); while (results.hasNext()) { Entity result = results.next(); // do something with result entities.add(result); } transaction.commit(); // [END run] return entities; }