Author: rhauch
Date: 2008-12-17 10:54:36 -0500 (Wed, 17 Dec 2008)
New Revision: 690
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/CopyBranchRequest.java
trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties
trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
Log:
DNA-40
Completed the connector's method to process branch copy requests, and added more unit
tests. Fixed a bug in the CopyBranchRequest class - when setting the actual locations,
the request was incorrectly checking that the actual location matched that of the
"into" location (which is the parent of the new copy), rather than the actual
location of the new copy.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java 2008-12-17 15:51:44
UTC (rev 689)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java 2008-12-17 15:54:36
UTC (rev 690)
@@ -67,6 +67,7 @@
public static I18n multipleErrorsWhileExecutingRequests;
public static I18n unableToAddMoreRequestsToAlreadyExecutedBatch;
public static I18n actualLocationIsNotSameAsInputLocation;
+ public static I18n actualLocationIsNotChildOfInputLocation;
public static I18n actualLocationMustHavePath;
public static I18n actualNewLocationIsNotSameAsInputLocation;
public static I18n actualNewLocationMustHavePath;
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/CopyBranchRequest.java
===================================================================
---
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/CopyBranchRequest.java 2008-12-17
15:51:44 UTC (rev 689)
+++
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/CopyBranchRequest.java 2008-12-17
15:54:36 UTC (rev 690)
@@ -118,7 +118,7 @@
* processing the request, and the actual location must have a {@link
Location#getPath() path}.
*
* @param oldLocation the actual location of the node before being renamed
- * @param newLocation the actual location of the node after being renamed
+ * @param newLocation the actual location of the new copy of the node
* @throws IllegalArgumentException if the either location is null, if the old
location does not represent the
* {@link Location#isSame(Location) same location} as the {@link #from() from
location}, if the new location does not
* represent the {@link Location#isSame(Location) same location} as the
{@link #into() into location}, or if the
@@ -129,9 +129,7 @@
if (!from.isSame(oldLocation)) { // not same if actual is null
throw new
IllegalArgumentException(GraphI18n.actualLocationIsNotSameAsInputLocation.text(oldLocation,
from));
}
- if (!into.isSame(newLocation, false)) { // not same if actual is null
- throw new
IllegalArgumentException(GraphI18n.actualLocationIsNotSameAsInputLocation.text(newLocation,
into));
- }
+ CheckArg.isNotNull(newLocation, "newLocation");
assert oldLocation != null;
assert newLocation != null;
if (!oldLocation.hasPath()) {
@@ -140,6 +138,10 @@
if (!newLocation.hasPath()) {
throw new
IllegalArgumentException(GraphI18n.actualNewLocationMustHavePath.text(newLocation));
}
+ // The 'into' should be the parent of the 'newLocation' ...
+ if (into.hasPath() &&
!newLocation.getPath().getParent().equals(into.getPath())) {
+ throw new
IllegalArgumentException(GraphI18n.actualLocationIsNotChildOfInputLocation.text(newLocation,
into));
+ }
this.actualNewLocation = newLocation;
}
Modified: trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties
===================================================================
--- trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties 2008-12-17
15:51:44 UTC (rev 689)
+++ trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties 2008-12-17
15:54:36 UTC (rev 690)
@@ -54,6 +54,7 @@
multipleErrorsWhileExecutingRequests = {0} of the {1} requests resulted in errors
unableToAddMoreRequestsToAlreadyExecutedBatch = Unable to add more requests to a batch of
graph requests that has already been executed
actualLocationIsNotSameAsInputLocation = The actual location of {0} is not the same as
the current location of {1}
+actualLocationIsNotChildOfInputLocation = The actual location of {0} is not a child of
the specified location {1}
actualLocationMustHavePath = The actual location of {0} must have a path
actualNewLocationIsNotSameAsInputLocation = The actual new location of {0} is not the
same as the input location of {1}
actualNewLocationMustHavePath = The actual new location of {0} must have a path
Modified: trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java
===================================================================
--- trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java 2008-12-17 15:51:44
UTC (rev 689)
+++ trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java 2008-12-17 15:54:36
UTC (rev 690)
@@ -777,8 +777,16 @@
@Override
public void process( CopyBranchRequest request ) {
- // Just update the actual location
- request.setActualLocations(actualLocationOf(request.from()),
actualLocationOf(request.into()));
+ // Create a child under the new parent ...
+ if (request.into().hasPath()) {
+ Path childPath =
context.getValueFactories().getPathFactory().create(request.into().getPath(),
"child");
+ Location newChild = actualLocationOf(new Location(childPath));
+ // Just update the actual location
+ request.setActualLocations(actualLocationOf(request.from()), newChild);
+ } else {
+ // Just update the actual location
+ request.setActualLocations(actualLocationOf(request.from()),
actualLocationOf(request.into()));
+ }
}
@Override
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-17
15:51:44 UTC (rev 689)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-17
15:54:36 UTC (rev 690)
@@ -34,6 +34,7 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
@@ -185,71 +186,14 @@
String childNsUri = childName.getNamespaceUri();
NamespaceEntity ns = namespaces.get(childNsUri, true);
assert ns != null;
-
- // Figure out the next SNS index and index-in-parent for this new child ...
- int nextSnsIndex = 1; // SNS index is 1-based
- int nextIndexInParent = 0; // index-in-parent is 0-based
final Path parentPath = actual.location.getPath();
assert parentPath != null;
- // Look in the cache for the children of the parent node.
- LinkedList<Location> childrenOfParent =
cache.getAllChildren(parentPath);
- if (childrenOfParent != null) {
- // The cache had the complete list of children for the parent node, which
means
- // we know about all of the children and can walk the children to figure
out the next indexes.
- nextIndexInParent = childrenOfParent.size();
- if (nextIndexInParent > 1) {
- // Since we want the last indexes, process the list backwards ...
- ListIterator<Location> iter =
childrenOfParent.listIterator(childrenOfParent.size());
- while (iter.hasPrevious()) {
- Location existing = iter.previous();
- Path.Segment segment = existing.getPath().getLastSegment();
- if (!segment.getName().equals(childName)) continue;
- // Otherwise the name matched, so get the indexes ...
- nextSnsIndex = segment.getIndex() + 1;
- }
- }
- } else {
- // The cache did not have the complete list of children for the parent
node,
- // so we need to look the values up by querying the database ...
- // Find the largest SNS index in the existing ChildEntity objects with
the same name ...
- String childLocalName = childName.getLocalName();
- Query query =
entities.createNamedQuery("ChildEntity.findMaximumSnsIndex");
- query.setParameter("parentUuid", parentUuidString);
- query.setParameter("ns", ns.getId());
- query.setParameter("childName", childLocalName);
- try {
- Integer result = (Integer)query.getSingleResult();
- nextSnsIndex = result != null ? result + 1 : 1; // SNS index is
1-based
- } catch (NoResultException e) {
- }
+ // Figure out the next SNS index and index-in-parent for this new child ...
+ actualLocation = addNewChild(actual, uuidString, childName);
- // Find the largest child index in the existing ChildEntity objects ...
- query =
entities.createNamedQuery("ChildEntity.findMaximumChildIndex");
- query.setParameter("parentUuid", parentUuidString);
- try {
- Integer result = (Integer)query.getSingleResult();
- nextIndexInParent = result != null ? result + 1 : 0; //
index-in-parent is 0-based
- } catch (NoResultException e) {
- }
- }
-
- // Create the new ChildEntity ...
- ChildId id = new ChildId(parentUuidString, uuidString);
- ChildEntity entity = new ChildEntity(id, nextIndexInParent, ns,
childName.getLocalName(), nextSnsIndex);
- entities.persist(entity);
-
- // Set the actual path, regardless of the supplied path...
- Path path = pathFactory.create(parentPath, childName, nextSnsIndex);
- actualLocation = new Location(path, UUID.fromString(uuidString));
-
- // Finally, update the cache with the information we know ...
- if (childrenOfParent != null) {
- // Add to the cached list of children ...
- childrenOfParent.add(actualLocation);
- }
// Since we've just created this node, we know about all the children
(actually, there are none).
- cache.setAllChildren(path, new LinkedList<Location>());
+ cache.setAllChildren(actualLocation.getPath(), new
LinkedList<Location>());
// Flush the entities ...
// entities.flush();
@@ -262,6 +206,89 @@
request.setActualLocationOfNode(actualLocation);
}
+ protected Location addNewChild( ActualLocation parent,
+ String childUuid,
+ Name childName ) {
+ int nextSnsIndex = 1; // SNS index is 1-based
+ int nextIndexInParent = 0; // index-in-parent is 0-based
+ String childNsUri = childName.getNamespaceUri();
+ NamespaceEntity ns = namespaces.get(childNsUri, true);
+ assert ns != null;
+
+ final Path parentPath = parent.location.getPath();
+ assert parentPath != null;
+
+ // Look in the cache for the children of the parent node.
+ LinkedList<Location> childrenOfParent = cache.getAllChildren(parentPath);
+ if (childrenOfParent != null) {
+ // The cache had the complete list of children for the parent node, which
means
+ // we know about all of the children and can walk the children to figure out
the next indexes.
+ nextIndexInParent = childrenOfParent.size();
+ if (nextIndexInParent > 1) {
+ // Since we want the last indexes, process the list backwards ...
+ ListIterator<Location> iter =
childrenOfParent.listIterator(childrenOfParent.size());
+ while (iter.hasPrevious()) {
+ Location existing = iter.previous();
+ Path.Segment segment = existing.getPath().getLastSegment();
+ if (!segment.getName().equals(childName)) continue;
+ // Otherwise the name matched, so get the indexes ...
+ nextSnsIndex = segment.getIndex() + 1;
+ }
+ }
+ } else {
+ // The cache did not have the complete list of children for the parent node,
+ // so we need to look the values up by querying the database ...
+
+ // Find the largest SNS index in the existing ChildEntity objects with the
same name ...
+ String childLocalName = childName.getLocalName();
+ Query query =
entities.createNamedQuery("ChildEntity.findMaximumSnsIndex");
+ query.setParameter("parentUuid", parent.uuid);
+ query.setParameter("ns", ns.getId());
+ query.setParameter("childName", childLocalName);
+ try {
+ Integer result = (Integer)query.getSingleResult();
+ nextSnsIndex = result != null ? result + 1 : 1; // SNS index is 1-based
+ } catch (NoResultException e) {
+ }
+
+ // Find the largest child index in the existing ChildEntity objects ...
+ query =
entities.createNamedQuery("ChildEntity.findMaximumChildIndex");
+ query.setParameter("parentUuid", parent.uuid);
+ try {
+ Integer result = (Integer)query.getSingleResult();
+ nextIndexInParent = result != null ? result + 1 : 0; // index-in-parent
is 0-based
+ } catch (NoResultException e) {
+ }
+ }
+
+ // Create the new ChildEntity ...
+ ChildId id = new ChildId(parent.uuid, childUuid);
+ ChildEntity entity = new ChildEntity(id, nextIndexInParent, ns,
childName.getLocalName(), nextSnsIndex);
+ entities.persist(entity);
+
+ // Set the actual path, regardless of the supplied path...
+ Path path = pathFactory.create(parentPath, childName, nextSnsIndex);
+ Location actualLocation = new Location(path, UUID.fromString(childUuid));
+
+ // Finally, update the cache with the information we know ...
+ if (childrenOfParent != null) {
+ // Add to the cached list of children ...
+ childrenOfParent.add(actualLocation);
+ }
+ return actualLocation;
+ }
+
+ protected class NextChildIndexes {
+ protected final int nextIndexInParent;
+ protected final int nextSnsIndex;
+
+ protected NextChildIndexes( int nextIndexInParent,
+ int nextSnsIndex ) {
+ this.nextIndexInParent = nextIndexInParent;
+ this.nextSnsIndex = nextSnsIndex;
+ }
+ }
+
/**
* {@inheritDoc}
*
@@ -875,6 +902,85 @@
@Override
public void process( CopyBranchRequest request ) {
logger.trace(request.toString());
+ Location actualFromLocation = null;
+ Location actualToLocation = null;
+ try {
+ Location fromLocation = request.from();
+ ActualLocation actualFrom = getActualLocation(fromLocation);
+ actualFromLocation = actualFrom.location;
+ Path fromPath = actualFromLocation.getPath();
+
+ Location newParentLocation = request.into();
+ ActualLocation actualNewParent = getActualLocation(newParentLocation);
+ assert actualNewParent != null;
+
+ // Create a map that we'll use to record the new UUID for each of the
original nodes ...
+ Map<String, String> originalToNewUuid = new HashMap<String,
String>();
+
+ // Compute the subgraph, including the top node in the subgraph ...
+ SubgraphQuery query = SubgraphQuery.create(getExecutionContext(), entities,
actualFromLocation.getUuid(), fromPath, 0);
+ try {
+ // Walk through the original nodes, creating new ChildEntity object
(i.e., copy) for each original ...
+ List<ChildEntity> originalNodes = query.getNodes(true, true);
+ Iterator<ChildEntity> originalIter = originalNodes.iterator();
+
+ // Start with the original (top-level) node first, since we need to add
it to the list of children ...
+ if (originalIter.hasNext()) {
+ ChildEntity original = originalIter.next();
+
+ // Create a new UUID for the copy ...
+ String copyUuid = UUID.randomUUID().toString();
+ originalToNewUuid.put(original.getId().getChildUuidString(),
copyUuid);
+
+ // Now add the new copy of the original ...
+ Name childName = fromPath.getLastSegment().getName();
+ actualToLocation = addNewChild(actualNewParent, copyUuid,
childName);
+ }
+
+ // Now process the children in the subgraph ...
+ while (originalIter.hasNext()) {
+ ChildEntity original = originalIter.next();
+ String newParentUuidOfCopy =
originalToNewUuid.get(original.getId().getParentUuidString());
+ assert newParentUuidOfCopy != null;
+
+ // Create a new UUID for the copy ...
+ String copyUuid = UUID.randomUUID().toString();
+ originalToNewUuid.put(original.getId().getChildUuidString(),
copyUuid);
+
+ // Create the copy ...
+ ChildEntity copy = new ChildEntity(new ChildId(newParentUuidOfCopy,
copyUuid), original.getIndexInParent(),
+ original.getChildNamespace(),
original.getChildName(),
+
original.getSameNameSiblingIndex());
+ entities.persist(copy);
+ }
+ entities.flush();
+
+ // Now process the properties, creating a copy (note references are not
changed) ...
+ for (PropertiesEntity original : query.getProperties(true, true)) {
+ // Find the UUID of the copy ...
+ String copyUuid =
originalToNewUuid.get(original.getId().getUuidString());
+ assert copyUuid != null;
+
+ // Create the copy ...
+ PropertiesEntity copy = new PropertiesEntity(new NodeId(copyUuid));
+ copy.setCompressed(original.isCompressed());
+ copy.setData(original.getData());
+ copy.setPropertyCount(original.getPropertyCount());
+
copy.setReferentialIntegrityEnforced(original.isReferentialIntegrityEnforced());
+ entities.persist(copy);
+ }
+ entities.flush();
+
+ } finally {
+ // Close and release the temporary data used for this operation ...
+ query.close();
+ }
+
+ } catch (Throwable e) { // Includes PathNotFoundException
+ request.setError(e);
+ return;
+ }
+ request.setActualLocations(actualFromLocation, actualToLocation);
}
/**
@@ -892,7 +998,7 @@
actualLocation = actual.location;
Path path = actualLocation.getPath();
- // Compute the subgraph, including the root ...
+ // Compute the subgraph, including the top node in the subgraph ...
SubgraphQuery query = SubgraphQuery.create(getExecutionContext(), entities,
actualLocation.getUuid(), path, 0);
try {
ChildEntity deleted = query.getNode();
@@ -905,13 +1011,30 @@
List<Location> deletedLocations = query.getNodeLocations(true,
true);
// Now delete the subgraph ...
- SubgraphQuery.Resolver resolver = new SubgraphQuery.Resolver() {
- public Location getLocationFor( UUID uuid ) {
- ActualLocation actual = getActualLocation(new Location(uuid));
- return (actual != null) ? actual.location : null;
+ query.deleteSubgraph(true);
+
+ // Verify referential integrity: that none of the deleted nodes are
referenced by nodes not being deleted.
+ List<ReferenceEntity> invalidReferences =
query.getInvalidReferences();
+ if (invalidReferences.size() > 0) {
+ // Some of the references that remain will be invalid, since they
point to nodes that
+ // have just been deleted. Build up the information necessary to
produce a useful exception ...
+ ValueFactory<Reference> refFactory =
getExecutionContext().getValueFactories().getReferenceFactory();
+ Map<Location, List<Reference>> invalidRefs = new
HashMap<Location, List<Reference>>();
+ for (ReferenceEntity entity : invalidReferences) {
+ UUID fromUuid =
UUID.fromString(entity.getId().getFromUuidString());
+ ActualLocation actualFromLocation = getActualLocation(new
Location(fromUuid));
+ Location fromLocation = actualFromLocation.location;
+ List<Reference> refs = invalidRefs.get(fromLocation);
+ if (refs == null) {
+ refs = new ArrayList<Reference>();
+ invalidRefs.put(fromLocation, refs);
+ }
+ UUID toUuid = UUID.fromString(entity.getId().getToUuidString());
+ refs.add(refFactory.create(toUuid));
}
- };
- query.deleteSubgraph(true, resolver);
+ String msg =
JpaConnectorI18n.unableToDeleteBecauseOfReferences.text();
+ throw new ReferentialIntegrityException(invalidRefs, msg);
+ }
// And adjust the SNS index and indexes ...
ChildEntity.adjustSnsIndexesAndIndexesAfterRemoving(entities,
parentUuidString, childName, nsId, indexInParent);
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-17
15:51:44 UTC (rev 689)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-17
15:54:36 UTC (rev 690)
@@ -48,8 +48,9 @@
@NamedQuery( name =
"SubgraphNodeEntity.getPropertiesEntitiesWithLargeValues", query = "select
props from PropertiesEntity props, SubgraphNodeEntity node where props.id.uuidString =
node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and
size(props.largeValues) > 0" ),
@NamedQuery( name = "SubgraphNodeEntity.getChildEntities", query =
"select child from ChildEntity child, SubgraphNodeEntity node where
child.id.childUuidString = node.nodeUuid and node.queryId = :queryId and node.depth >=
:depth and node.depth <= :maxDepth order by node.depth, node.parentIndexInParent,
node.indexInParent" ),
@NamedQuery( name = "SubgraphNodeEntity.getReferenceThatWillBeInvalid",
query = "select ref from ReferenceEntity as ref where ref.id.toUuidString in ( select
node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and
ref.id.fromUuidString not in (select node.nodeUuid from SubgraphNodeEntity node where
node.queryId = :queryId)" ),
- @NamedQuery( name = "SubgraphNodeEntity.deletePropertiesEntities", query =
"delete PropertiesEntity props where props.id.uuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId and node.depth >= :depth
)" ),
- @NamedQuery( name = "SubgraphNodeEntity.deleteChildEntities", query =
"delete ChildEntity child where child.id.childUuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId and node.depth >= :depth
)" ),
+ @NamedQuery( name = "SubgraphNodeEntity.deletePropertiesEntities", query =
"delete PropertiesEntity props where props.id.uuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId )" ),
+ @NamedQuery( name = "SubgraphNodeEntity.deleteChildEntities", query =
"delete ChildEntity child where child.id.childUuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId )" ),
+ @NamedQuery( name = "SubgraphNodeEntity.deleteReferences", query =
"delete ReferenceEntity as ref where ref.id.fromUuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId )" ),
@NamedQuery( name = "SubgraphNodeEntity.deleteByQueryId", query =
"delete SubgraphNodeEntity where queryId = :queryId" )} )
public class SubgraphNodeEntity {
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-17
15:51:44 UTC (rev 689)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-17
15:54:36 UTC (rev 690)
@@ -21,7 +21,6 @@
*/
package org.jboss.dna.connector.store.jpa.models.basic;
-import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@@ -30,16 +29,12 @@
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
-import org.jboss.dna.connector.store.jpa.JpaConnectorI18n;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.Location;
import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.NameFactory;
import org.jboss.dna.graph.properties.Path;
import org.jboss.dna.graph.properties.PathFactory;
-import org.jboss.dna.graph.properties.Reference;
-import org.jboss.dna.graph.properties.ReferentialIntegrityException;
-import org.jboss.dna.graph.properties.ValueFactory;
/**
* Represents a temporary working area for a query that efficiently retrieves the nodes
in a subgraph. This class uses the
@@ -55,10 +50,6 @@
*/
public class SubgraphQuery {
- public interface Resolver {
- Location getLocationFor( UUID uuid );
- }
-
/**
* Create a query that returns a subgraph at and below the node with the supplied
path and the supplied UUID.
*
@@ -293,40 +284,28 @@
}
/**
- * Delete the nodes in the subgraph. This method first checks for
+ * Determine whether there are any invalid references (typically called after {@link
#deleteSubgraph(boolean)}).
*
- * @param includeRoot true if the root node should also be deleted
- * @param resolver the resolver that should be used to resolve UUIDs to the
corresponding paths; may not be null
- * @throws ReferentialIntegrityException if the repository's references after the
delete would be invalid because they would
- * reference nodes that are to be deleted
+ * @return the list of references that are no longer valid
*/
@SuppressWarnings( "unchecked" )
- public void deleteSubgraph( boolean includeRoot,
- Resolver resolver ) throws ReferentialIntegrityException
{
- if (query == null) throw new IllegalStateException();
-
+ public List<ReferenceEntity> getInvalidReferences() {
// Verify referential integrity: that none of the deleted nodes are referenced by
nodes not being deleted.
Query references =
manager.createNamedQuery("SubgraphNodeEntity.getReferenceThatWillBeInvalid");
references.setParameter("queryId", query.getId());
- List<ReferenceEntity> invalidReferences = references.getResultList();
- if (invalidReferences.size() > 0) {
- ValueFactory<Reference> refFactory =
context.getValueFactories().getReferenceFactory();
- Map<Location, List<Reference>> invalidRefs = new
HashMap<Location, List<Reference>>();
- for (ReferenceEntity entity : invalidReferences) {
- UUID fromUuid = UUID.fromString(entity.getId().getFromUuidString());
- Location location = resolver.getLocationFor(fromUuid);
- List<Reference> refs = invalidRefs.get(location);
- if (refs == null) {
- refs = new ArrayList<Reference>();
- invalidRefs.put(location, refs);
- }
- UUID toUuid = UUID.fromString(entity.getId().getToUuidString());
- refs.add(refFactory.create(toUuid));
- }
- String msg = JpaConnectorI18n.unableToDeleteBecauseOfReferences.text();
- throw new ReferentialIntegrityException(invalidRefs, msg);
- }
+ return references.getResultList();
+ }
+ /**
+ * Delete the nodes in the subgraph. This method first does not check for referential
integrity (see
+ * {@link #getInvalidReferences()}).
+ *
+ * @param includeRoot true if the root node should also be deleted
+ */
+ @SuppressWarnings( "unchecked" )
+ public void deleteSubgraph( boolean includeRoot ) {
+ if (query == null) throw new IllegalStateException();
+
// Delete the PropertiesEntities ...
//
// Right now, Hibernate is not able to support deleting PropertiesEntity in bulk
because of the
@@ -349,15 +328,18 @@
// Delete the PropertiesEntities, none of which will have large values ...
Query delete =
manager.createNamedQuery("SubgraphNodeEntity.deletePropertiesEntities");
delete.setParameter("queryId", query.getId());
- delete.setParameter("depth", includeRoot ? 0 : 1);
delete.executeUpdate();
// Delete the ChildEntities ...
delete =
manager.createNamedQuery("SubgraphNodeEntity.deleteChildEntities");
delete.setParameter("queryId", query.getId());
- delete.setParameter("depth", includeRoot ? 0 : 1);
delete.executeUpdate();
+ // Delete references ...
+ delete =
manager.createNamedQuery("SubgraphNodeEntity.deleteReferences");
+ delete.setParameter("queryId", query.getId());
+ delete.executeUpdate();
+
// Delete unused large values ...
LargeValueEntity.deleteUnused(manager);
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-17
15:51:44 UTC (rev 689)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-17
15:54:36 UTC (rev 690)
@@ -692,6 +692,188 @@
}
@Test
+ public void shouldCopyNodeWithChildren() {
+ // Create the tree (at total of 40 nodes, plus the extra 6 added later)...
+ // /
+ // /node1
+ // /node1/node1
+ // /node1/node1/node1
+ // /node1/node1/node2
+ // /node1/node1/node3
+ // /node1/node2
+ // /node1/node2/node1
+ // /node1/node2/node2
+ // /node1/node2/node3
+ // /node1/node3
+ // /node1/node3/node1
+ // /node1/node3/node2
+ // /node1/node3/node3
+ // /node2
+ // /node2/node1
+ // /node2/node1/node1
+ // /node2/node1/node2
+ // /node2/node1/node3
+ // /node2/node2
+ // /node2/node2/node1
+ // /node2/node2/node2
+ // /node2/node2/node3
+ // /node2/node3
+ // /node2/node3/node1
+ // /node2/node3/node2
+ // /node2/node3/node3
+ // /node3
+ // /node3/node1
+ // /node3/node1/node1
+ // /node3/node1/node2
+ // /node3/node1/node3
+ // /node3/node2
+ // /node3/node2/node1
+ // /node3/node2/node2
+ // /node3/node2/node3
+ // /node3/node3
+ // /node3/node3/node1
+ // /node3/node3/node2
+ // /node3/node3/node3
+ // /secondBranch1
+ // /secondBranch1/secondBranch1
+ // /secondBranch1/secondBranch2
+ // /secondBranch2
+ // /secondBranch2/secondBranch1
+ // /secondBranch2/secondBranch2
+
+ numPropsOnEach = 3;
+ createTree("", 3, 3, numPropsOnEach, null, true, false);
+
+ // Copy a branches ...
+ graph.copy("/node2").into("/node3");
+
+ assertThat(graph.getChildren().of("/node1"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node1/node1"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node1/node2"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node1/node3"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node1/node3/node1"),
hasChildren());
+
+ // The original of the copy should still exist ...
+ assertThat(graph.getChildren().of("/node2"),
hasChildren(child("node1"), child("node3")));
+ assertThat(graph.getChildren().of("/node2/node1"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node2/node3"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node2/node3/node1"),
hasChildren());
+
+ assertThat(graph.getChildren().of("/node3"),
hasChildren(child("node2[1]"), child("node3"),
child("node2[2]")));
+ assertThat(graph.getChildren().of("/node3/node2[1]"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node3/node3"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node3/node3/node1"),
hasChildren());
+ assertThat(graph.getChildren().of("/node3/node2[2]"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node3/node2[2]/node1"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node3/node2[2]/node2"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node3/node2[2]/node3"),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(graph.getChildren().of("/node3/node2[2]/node1/node1"),
hasChildren());
+
+ Subgraph subgraph = graph.getSubgraphOfDepth(4).at("/node3");
+ assertThat(subgraph, is(notNullValue()));
+ assertThat(subgraph.getNode(".").getChildren(),
hasChildren(child("node2"), child("node3")));
+ assertThat(subgraph.getNode("."), hasProperty("property1",
"The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("."), hasProperty("property2",
"The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("."), hasProperty("property3",
"The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[1]").getChildren(),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(subgraph.getNode("node2[1]"),
hasProperty("property1", "The quick brown fox jumped over the moon. What?
"));
+ assertThat(subgraph.getNode("node2[1]"),
hasProperty("property2", "The quick brown fox jumped over the moon. What?
"));
+ assertThat(subgraph.getNode("node2[1]"),
hasProperty("property3", "The quick brown fox jumped over the moon. What?
"));
+ assertThat(subgraph.getNode("node3").getChildren(), isEmpty());
+ assertThat(subgraph.getNode("node3"),
hasProperty("property1", "The quick brown fox jumped over the moon. What?
"));
+ assertThat(subgraph.getNode("node3"),
hasProperty("property2", "The quick brown fox jumped over the moon. What?
"));
+ assertThat(subgraph.getNode("node3"),
hasProperty("property3", "The quick brown fox jumped over the moon. What?
"));
+ assertThat(subgraph.getNode("node2[2]").getChildren(),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(subgraph.getNode("node2[2]"),
hasProperty("property1", "The quick brown fox jumped over the moon. What?
"));
+ assertThat(subgraph.getNode("node2[2]"),
hasProperty("property2", "The quick brown fox jumped over the moon. What?
"));
+ assertThat(subgraph.getNode("node2[2]"),
hasProperty("property3", "The quick brown fox jumped over the moon. What?
"));
+ assertThat(subgraph.getNode("node2[2]/node1").getChildren(),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(subgraph.getNode("node2[2]/node1"),
hasProperty("property1",
+ "The quick brown
fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1"),
hasProperty("property2",
+ "The quick brown
fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1"),
hasProperty("property3",
+ "The quick brown
fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node1").getChildren(),
isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node1/node1"),
hasProperty("property1",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node1"),
hasProperty("property2",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node1"),
hasProperty("property3",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node2").getChildren(),
isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node1/node2"),
hasProperty("property1",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node2"),
hasProperty("property2",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node2"),
hasProperty("property3",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node3").getChildren(),
isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node1/node3"),
hasProperty("property1",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node3"),
hasProperty("property2",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node3"),
hasProperty("property3",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2").getChildren(),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(subgraph.getNode("node2[2]/node2"),
hasProperty("property1",
+ "The quick brown
fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2"),
hasProperty("property2",
+ "The quick brown
fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2"),
hasProperty("property3",
+ "The quick brown
fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node1").getChildren(),
isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node2/node1"),
hasProperty("property1",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node1"),
hasProperty("property2",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node1"),
hasProperty("property3",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node2").getChildren(),
isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node2/node2"),
hasProperty("property1",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node2"),
hasProperty("property2",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node2"),
hasProperty("property3",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node3").getChildren(),
isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node2/node3"),
hasProperty("property1",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node3"),
hasProperty("property2",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node3"),
hasProperty("property3",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3").getChildren(),
hasChildren(child("node1"), child("node2"),
child("node3")));
+ assertThat(subgraph.getNode("node2[2]/node3"),
hasProperty("property1",
+ "The quick brown
fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3"),
hasProperty("property2",
+ "The quick brown
fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3"),
hasProperty("property3",
+ "The quick brown
fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node1").getChildren(),
isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node3/node1"),
hasProperty("property1",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node1"),
hasProperty("property2",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node1"),
hasProperty("property3",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node2").getChildren(),
isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node3/node2"),
hasProperty("property1",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node2"),
hasProperty("property2",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node2"),
hasProperty("property3",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node3").getChildren(),
isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node3/node3"),
hasProperty("property1",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node3"),
hasProperty("property2",
+ "The quick
brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node3"),
hasProperty("property3",
+ "The quick
brown fox jumped over the moon. What? "));
+ }
+
+ @Test
public void shouldReadRangeOfChildren() {
// Create a shallow tree with many children under one node ...
// /
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-17
15:51:44 UTC (rev 689)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-17
15:54:36 UTC (rev 690)
@@ -27,6 +27,7 @@
import static org.junit.matchers.IsCollectionContaining.hasItems;
import java.io.UnsupportedEncodingException;
import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -67,7 +68,6 @@
private List<Location> locations;
private String[] validLargeValues;
private SubgraphQuery query;
- private SubgraphQuery.Resolver resolver;
@BeforeClass
public static void beforeAll() throws Exception {
@@ -98,11 +98,6 @@
factory = configurator.buildEntityManagerFactory();
manager = factory.createEntityManager();
namespaces = new Namespaces(manager);
- resolver = new SubgraphQuery.Resolver() {
- public Location getLocationFor( UUID uuid ) {
- return new Location(uuid);
- }
- };
manager.getTransaction().begin();
@@ -189,6 +184,23 @@
uuidByPath.put(path, childUuid);
}
+ protected ReferenceEntity createReferenceBetween( String fromPathStr,
+ String toPathStr ) {
+ Path fromPath = path(fromPathStr);
+ Path toPath = path(toPathStr);
+
+ // Look up the UUIDs ...
+ UUID fromUuid = uuidByPath.get(fromPath);
+ UUID toUuid = uuidByPath.get(toPath);
+ assert fromUuid != null;
+ assert toUuid != null;
+
+ // Now create a reference entity ...
+ ReferenceEntity entity = new ReferenceEntity(new ReferenceId(fromUuid.toString(),
toUuid.toString()));
+ manager.persist(entity);
+ return entity;
+ }
+
protected UUID uuidForPath( String pathStr ) {
Path path = path(pathStr);
return uuidByPath.get(path);
@@ -446,7 +458,8 @@
verifyNextLocationIs("/a/a1/a2");
verifyNextLocationIs("/a/a1/a3");
verifyNoMoreLocations();
- query.deleteSubgraph(true, resolver);
+ query.deleteSubgraph(true);
+ assertThat(query.getInvalidReferences().isEmpty(), is(true));
query.close();
// Commit the transaction, and start another ...
@@ -494,8 +507,92 @@
// Now, load the one node remaining with
}
- // @Test
- // public void shouldCreateMultipleSubgraphQueriesInDatabase() {
- // }
+ @Test
+ public void
shouldNotDeleteSubgraphThatHasNodesReferencedByOtherNodesNotBeingDeleted() throws
Exception {
+ // Verify that all the nodes with large values do indeed have them ...
+ verifyNodesHaveLargeValues("/a/a1", "/a/a2",
"/a/a2/a1");
+ // Count the number of objects ...
+ assertThat((Long)manager.createQuery("select count(*) from
LargeValueEntity").getSingleResult(), is(3L));
+ assertThat((Long)manager.createQuery("select count(*) from
PropertiesEntity").getSingleResult(), is(14L));
+ assertThat((Long)manager.createQuery("select count(*) from
ChildEntity").getSingleResult(), is(14L));
+
+ // Create references from the "/a/a2" (not being deleted) branch, to
the branch being deleted...
+ List<ReferenceEntity> expectedInvalidRefs = new
ArrayList<ReferenceEntity>();
+ expectedInvalidRefs.add(createReferenceBetween("/a/a2",
"/a/a1"));
+ expectedInvalidRefs.add(createReferenceBetween("/a/a2/a1",
"/a/a1/a1"));
+ expectedInvalidRefs.add(createReferenceBetween("/a/a2/a2",
"/a/a1/a2"));
+
+ // Create references between nodes in the branch being deleted (these
shouldn't matter) ...
+ createReferenceBetween("/a/a1", "/a/a1/a1");
+ createReferenceBetween("/a/a1/a2", "/a/a1/a3");
+
+ // Delete "/a/a1". Note that "/a/a1" has a large value that
is shared by "/a/a2", but it's also the only
+ // user of large value #1.
+ Path path = path("/a/a1");
+ UUID uuid = uuidByPath.get(path);
+
+ query = SubgraphQuery.create(context, manager, uuid, path, Integer.MAX_VALUE);
+ locations = query.getNodeLocations(true, true);
+ verifyNextLocationIs("/a/a1");
+ verifyNextLocationIs("/a/a1/a1");
+ verifyNextLocationIs("/a/a1/a2");
+ verifyNextLocationIs("/a/a1/a3");
+ verifyNoMoreLocations();
+ query.deleteSubgraph(true);
+
+ // Now there should be invalid references ...
+ List<ReferenceEntity> invalidReferences = query.getInvalidReferences();
+ assertThat(invalidReferences.size(), is(3));
+ invalidReferences.removeAll(invalidReferences);
+ assertThat(invalidReferences.size(), is(0));
+ query.close();
+ }
+
+ @SuppressWarnings( "unchecked" )
+ @Test
+ public void shouldDeleteSubgraphThatHasInternalReferences() throws Exception {
+ // Verify that all the nodes with large values do indeed have them ...
+ verifyNodesHaveLargeValues("/a/a1", "/a/a2",
"/a/a2/a1");
+
+ // Count the number of objects ...
+ assertThat((Long)manager.createQuery("select count(*) from
LargeValueEntity").getSingleResult(), is(3L));
+ assertThat((Long)manager.createQuery("select count(*) from
PropertiesEntity").getSingleResult(), is(14L));
+ assertThat((Long)manager.createQuery("select count(*) from
ChildEntity").getSingleResult(), is(14L));
+
+ // Create references from the nodes that aren't being deleted (these
won't matter, but will remain)...
+ List<ReferenceEntity> expectedValidRefs = new
ArrayList<ReferenceEntity>();
+ expectedValidRefs.add(createReferenceBetween("/a/a2",
"/a/a2/a1"));
+
+ // Create references between nodes in the branch being deleted (these
shouldn't matter) ...
+ createReferenceBetween("/a/a1", "/a/a1/a1");
+ createReferenceBetween("/a/a1/a2", "/a/a1/a3");
+
+ // Delete "/a/a1". Note that "/a/a1" has a large value that
is shared by "/a/a2", but it's also the only
+ // user of large value #1.
+ Path path = path("/a/a1");
+ UUID uuid = uuidByPath.get(path);
+
+ query = SubgraphQuery.create(context, manager, uuid, path, Integer.MAX_VALUE);
+ locations = query.getNodeLocations(true, true);
+ verifyNextLocationIs("/a/a1");
+ verifyNextLocationIs("/a/a1/a1");
+ verifyNextLocationIs("/a/a1/a2");
+ verifyNextLocationIs("/a/a1/a3");
+ verifyNoMoreLocations();
+ query.deleteSubgraph(true);
+
+ // Now there should be invalid references ...
+ List<ReferenceEntity> invalidReferences = query.getInvalidReferences();
+ assertThat(invalidReferences.size(), is(0));
+ query.close();
+
+ // There should be no references any more ...
+ Query refQuery = manager.createQuery("select ref from ReferenceEntity as
ref");
+ List<ReferenceEntity> remainingReferences = refQuery.getResultList();
+ assertThat(remainingReferences.size(), is(1));
+ remainingReferences.removeAll(expectedValidRefs);
+ assertThat(remainingReferences.size(), is(0));
+ }
+
}