Author: rhauch
Date: 2008-12-16 12:55:11 -0500 (Tue, 16 Dec 2008)
New Revision: 687
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/connectors/RepositoryConnection.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/RequestProcessorCacheTest.java
Log:
DNA-40
Added referential integrity checks to the delete process.
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/connectors/RepositoryConnection.java
===================================================================
---
trunk/dna-graph/src/main/java/org/jboss/dna/graph/connectors/RepositoryConnection.java 2008-12-16
16:19:33 UTC (rev 686)
+++
trunk/dna-graph/src/main/java/org/jboss/dna/graph/connectors/RepositoryConnection.java 2008-12-16
17:55:11 UTC (rev 687)
@@ -26,6 +26,8 @@
import net.jcip.annotations.NotThreadSafe;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.cache.CachePolicy;
+import org.jboss.dna.graph.properties.PathNotFoundException;
+import org.jboss.dna.graph.properties.ReferentialIntegrityException;
import org.jboss.dna.graph.requests.Request;
/**
@@ -85,6 +87,9 @@
*
* @param context the environment in which the commands are being executed; never
null
* @param request the request to be executed; never null
+ * @throws PathNotFoundException if the request(s) contain paths to nodes that do not
exist
+ * @throws ReferentialIntegrityException if the request is or contains a delete
operation, where the delete could not be
+ * performed because some references to deleted nodes would have remained
after the delete operation completed
* @throws RepositorySourceException if there is a problem loading the node data
*/
void execute( ExecutionContext context,
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java
===================================================================
---
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java 2008-12-16
16:19:33 UTC (rev 686)
+++
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java 2008-12-16
17:55:11 UTC (rev 687)
@@ -36,6 +36,7 @@
import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.Path;
import org.jboss.dna.graph.properties.Property;
+import org.jboss.dna.graph.properties.ReferentialIntegrityException;
import org.jboss.dna.graph.properties.basic.BasicEmptyProperty;
import org.jboss.dna.graph.requests.CompositeRequest;
import org.jboss.dna.graph.requests.CopyBranchRequest;
@@ -213,6 +214,8 @@
* </p>
*
* @param request the delete request
+ * @throws ReferentialIntegrityException if the delete could not be performed because
some references to deleted nodes would
+ * have remained after the delete operation completed
*/
public abstract void process( DeleteBranchRequest request );
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java 2008-12-16
16:19:33 UTC (rev 686)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java 2008-12-16
17:55:11 UTC (rev 687)
@@ -42,6 +42,7 @@
public static I18n unableToMoveRootNode;
public static I18n locationShouldHavePathAndOrProperty;
public static I18n invalidReferences;
+ public static I18n unableToDeleteBecauseOfReferences;
public static I18n basicModelDescription;
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-16
16:19:33 UTC (rev 686)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-16
17:55:11 UTC (rev 687)
@@ -792,67 +792,72 @@
int maxDepth = request.maximumDepth();
SubgraphQuery query = SubgraphQuery.create(getExecutionContext(), entities,
actualLocation.getUuid(), path, maxDepth);
- // Record all of the children ...
- Path parent = path;
- String parentUuid = actual.uuid;
- Location parentLocation = actualLocation;
- List<Location> children = new LinkedList<Location>();
- boolean includeChildrenOfNodesAtMaxDepth = true;
- for (ChildEntity child : query.getNodes(false,
includeChildrenOfNodesAtMaxDepth)) {
- String namespaceUri = child.getChildNamespace().getUri();
- String localName = child.getChildName();
- Name childName = nameFactory.create(namespaceUri, localName);
- int sns = child.getSameNameSiblingIndex();
- // Figure out who the parent is ...
- String childParentUuid = child.getId().getParentUuidString();
- if (!parentUuid.equals(childParentUuid)) {
- // The parent isn't the last parent, so record the children found
so far ...
+ try {
+ // Record all of the children ...
+ Path parent = path;
+ String parentUuid = actual.uuid;
+ Location parentLocation = actualLocation;
+ List<Location> children = new LinkedList<Location>();
+ boolean includeChildrenOfNodesAtMaxDepth = true;
+ for (ChildEntity child : query.getNodes(false,
includeChildrenOfNodesAtMaxDepth)) {
+ String namespaceUri = child.getChildNamespace().getUri();
+ String localName = child.getChildName();
+ Name childName = nameFactory.create(namespaceUri, localName);
+ int sns = child.getSameNameSiblingIndex();
+ // Figure out who the parent is ...
+ String childParentUuid = child.getId().getParentUuidString();
+ if (!parentUuid.equals(childParentUuid)) {
+ // The parent isn't the last parent, so record the children
found so far ...
+ request.setChildren(parentLocation, children);
+ // And find the correct parent ...
+ parentLocation = locationsByUuid.get(childParentUuid);
+ parent = parentLocation.getPath();
+ parentUuid = childParentUuid;
+ children = new LinkedList<Location>();
+ }
+ Path childPath = pathFactory.create(parent, childName, sns);
+ String childUuidString = child.getId().getChildUuidString();
+ Location childLocation = new Location(childPath,
UUID.fromString(childUuidString));
+ locationsByUuid.put(childUuidString, childLocation);
+ children.add(childLocation);
+ }
+ if (!children.isEmpty()) {
request.setChildren(parentLocation, children);
- // And find the correct parent ...
- parentLocation = locationsByUuid.get(childParentUuid);
- parent = parentLocation.getPath();
- parentUuid = childParentUuid;
- children = new LinkedList<Location>();
}
- Path childPath = pathFactory.create(parent, childName, sns);
- String childUuidString = child.getId().getChildUuidString();
- Location childLocation = new Location(childPath,
UUID.fromString(childUuidString));
- locationsByUuid.put(childUuidString, childLocation);
- children.add(childLocation);
- }
- if (!children.isEmpty()) {
- request.setChildren(parentLocation, children);
- }
- // Note that we've found children for nodes that are at the maximum
depth. This is so that the nodes
- // in the subgraph all have the correct children. However, we don't want
to store the properties for
- // any node whose depth is greater than the maximum depth. Therefore, only
get the properties that
- // include nodes within the maximum depth...
- includeChildrenOfNodesAtMaxDepth = false;
+ // Note that we've found children for nodes that are at the maximum
depth. This is so that the nodes
+ // in the subgraph all have the correct children. However, we don't
want to store the properties for
+ // any node whose depth is greater than the maximum depth. Therefore,
only get the properties that
+ // include nodes within the maximum depth...
+ includeChildrenOfNodesAtMaxDepth = false;
- // Now record all of the properties ...
- for (PropertiesEntity props : query.getProperties(true,
includeChildrenOfNodesAtMaxDepth)) {
- boolean compressed = props.isCompressed();
- int propertyCount = props.getPropertyCount();
- Collection<Property> properties = new
ArrayList<Property>(propertyCount);
- Location nodeLocation =
locationsByUuid.get(props.getId().getUuidString());
- assert nodeLocation != null;
- // Record the UUID as a property, since it's not stored in the
serialized properties...
- properties.add(actualLocation.getIdProperty(DnaLexicon.UUID));
- // Deserialize all the properties (except the UUID)...
- byte[] data = props.getData();
- if (data != null) {
- LargeValueSerializer largeValues = new LargeValueSerializer(props);
- ByteArrayInputStream bais = new ByteArrayInputStream(data);
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
- try {
- serializer.deserializeAllProperties(ois, properties,
largeValues);
- request.setProperties(nodeLocation, properties);
- } finally {
- ois.close();
+ // Now record all of the properties ...
+ for (PropertiesEntity props : query.getProperties(true,
includeChildrenOfNodesAtMaxDepth)) {
+ boolean compressed = props.isCompressed();
+ int propertyCount = props.getPropertyCount();
+ Collection<Property> properties = new
ArrayList<Property>(propertyCount);
+ Location nodeLocation =
locationsByUuid.get(props.getId().getUuidString());
+ assert nodeLocation != null;
+ // Record the UUID as a property, since it's not stored in the
serialized properties...
+ properties.add(actualLocation.getIdProperty(DnaLexicon.UUID));
+ // Deserialize all the properties (except the UUID)...
+ byte[] data = props.getData();
+ if (data != null) {
+ LargeValueSerializer largeValues = new
LargeValueSerializer(props);
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.deserializeAllProperties(ois, properties,
largeValues);
+ request.setProperties(nodeLocation, properties);
+ } finally {
+ ois.close();
+ }
}
}
+ } finally {
+ // Close and release the temporary data used for this operation ...
+ query.close();
}
} catch (Throwable e) { // Includes PathNotFoundException
@@ -889,24 +894,35 @@
// Compute the subgraph, including the root ...
SubgraphQuery query = SubgraphQuery.create(getExecutionContext(), entities,
actualLocation.getUuid(), path, 0);
- ChildEntity deleted = query.getNode();
- String parentUuidString = deleted.getId().getParentUuidString();
- String childName = deleted.getChildName();
- long nsId = deleted.getChildNamespace().getId();
- int indexInParent = deleted.getIndexInParent();
+ try {
+ ChildEntity deleted = query.getNode();
+ String parentUuidString = deleted.getId().getParentUuidString();
+ String childName = deleted.getChildName();
+ long nsId = deleted.getChildNamespace().getId();
+ int indexInParent = deleted.getIndexInParent();
- // Get the locations of all deleted nodes, which will be required by events
...
- List<Location> deletedLocations = query.getNodeLocations(true, true);
+ // Get the locations of all deleted nodes, which will be required by
events ...
+ List<Location> deletedLocations = query.getNodeLocations(true,
true);
- // Now delete the subgraph ...
- query.deleteSubgraph(true);
+ // Now delete the subgraph ...
+ SubgraphQuery.Resolver resolver = new SubgraphQuery.Resolver() {
+ public Location getLocationFor( UUID uuid ) {
+ ActualLocation actual = getActualLocation(new Location(uuid));
+ return (actual != null) ? actual.location : null;
+ }
+ };
+ query.deleteSubgraph(true, resolver);
- // And adjust the SNS index and indexes ...
- ChildEntity.adjustSnsIndexesAndIndexesAfterRemoving(entities,
parentUuidString, childName, nsId, indexInParent);
- entities.flush();
+ // And adjust the SNS index and indexes ...
+ ChildEntity.adjustSnsIndexesAndIndexesAfterRemoving(entities,
parentUuidString, childName, nsId, indexInParent);
+ entities.flush();
- // Remove from the cache of children locations all entries for deleted nodes
...
- cache.removeBranch(deletedLocations);
+ // Remove from the cache of children locations all entries for deleted
nodes ...
+ cache.removeBranch(deletedLocations);
+ } finally {
+ // Close and release the temporary data used for this operation ...
+ query.close();
+ }
} catch (Throwable e) { // Includes PathNotFoundException
request.setError(e);
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-16
16:19:33 UTC (rev 686)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-16
17:55:11 UTC (rev 687)
@@ -39,13 +39,15 @@
*/
@Entity
@Table( name = "DNA_SUBGRAPH_NODES" )
-(a)org.hibernate.annotations.Table( appliesTo = "DNA_SUBGRAPH_NODES", indexes =
@Index( name = "QUERYID_INX", columnNames = {"QUERY_ID"} ) )
+(a)org.hibernate.annotations.Table( appliesTo = "DNA_SUBGRAPH_NODES", indexes =
@Index( name = "QUERYID_INX", columnNames = {
+ "QUERY_ID", "UUID"} ) )
@NamedQueries( {
@NamedQuery( name = "SubgraphNodeEntity.insertChildren", query =
"insert into
SubgraphNodeEntity(queryId,nodeUuid,depth,parentIndexInParent,indexInParent) select
parentNode.queryId, child.id.childUuidString, parentNode.depth+1,
parentNode.indexInParent, child.indexInParent from ChildEntity child, SubgraphNodeEntity
parentNode where child.id.parentUuidString = parentNode.nodeUuid and parentNode.queryId =
:queryId and parentNode.depth = :parentDepth" ),
@NamedQuery( name = "SubgraphNodeEntity.getCount", query = "select
count(*) from SubgraphNodeEntity where queryId = :queryId" ),
@NamedQuery( name = "SubgraphNodeEntity.getPropertiesEntities", query =
"select props from PropertiesEntity props, SubgraphNodeEntity node where
props.id.uuidString = node.nodeUuid and node.queryId = :queryId and node.depth >=
:depth and node.depth <= :maxDepth order by node.depth, node.parentIndexInParent,
node.indexInParent" ),
@NamedQuery( name =
"SubgraphNodeEntity.getPropertiesEntitiesWithLargeValues", query = "select
props from PropertiesEntity props, SubgraphNodeEntity node where props.id.uuidString =
node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and
size(props.largeValues) > 0" ),
@NamedQuery( name = "SubgraphNodeEntity.getChildEntities", query =
"select child from ChildEntity child, SubgraphNodeEntity node where
child.id.childUuidString = node.nodeUuid and node.queryId = :queryId and node.depth >=
:depth and node.depth <= :maxDepth order by node.depth, node.parentIndexInParent,
node.indexInParent" ),
+ @NamedQuery( name = "SubgraphNodeEntity.getReferenceThatWillBeInvalid",
query = "select ref from ReferenceEntity as ref where ref.id.toUuidString in ( select
node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and
ref.id.fromUuidString not in (select node.nodeUuid from SubgraphNodeEntity node where
node.queryId = :queryId)" ),
@NamedQuery( name = "SubgraphNodeEntity.deletePropertiesEntities", query =
"delete PropertiesEntity props where props.id.uuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId and node.depth >= :depth
)" ),
@NamedQuery( name = "SubgraphNodeEntity.deleteChildEntities", query =
"delete ChildEntity child where child.id.childUuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId and node.depth >= :depth
)" ),
@NamedQuery( name = "SubgraphNodeEntity.deleteByQueryId", query =
"delete SubgraphNodeEntity where queryId = :queryId" )} )
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-16
16:19:33 UTC (rev 686)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-16
17:55:11 UTC (rev 687)
@@ -21,6 +21,7 @@
*/
package org.jboss.dna.connector.store.jpa.models.basic;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@@ -29,12 +30,16 @@
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
+import org.jboss.dna.connector.store.jpa.JpaConnectorI18n;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.Location;
import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.NameFactory;
import org.jboss.dna.graph.properties.Path;
import org.jboss.dna.graph.properties.PathFactory;
+import org.jboss.dna.graph.properties.Reference;
+import org.jboss.dna.graph.properties.ReferentialIntegrityException;
+import org.jboss.dna.graph.properties.ValueFactory;
/**
* Represents a temporary working area for a query that efficiently retrieves the nodes
in a subgraph. This class uses the
@@ -50,6 +55,10 @@
*/
public class SubgraphQuery {
+ public interface Resolver {
+ Location getLocationFor( UUID uuid );
+ }
+
/**
* Create a query that returns a subgraph at and below the node with the supplied
path and the supplied UUID.
*
@@ -283,10 +292,41 @@
return locations;
}
+ /**
+ * Delete the nodes in the subgraph. This method first checks for
+ *
+ * @param includeRoot true if the root node should also be deleted
+ * @param resolver the resolver that should be used to resolve UUIDs to the
corresponding paths; may not be null
+ * @throws ReferentialIntegrityException if the repository's references after the
delete would be invalid because they would
+ * reference nodes that are to be deleted
+ */
@SuppressWarnings( "unchecked" )
- public void deleteSubgraph( boolean includeRoot ) {
+ public void deleteSubgraph( boolean includeRoot,
+ Resolver resolver ) throws ReferentialIntegrityException
{
if (query == null) throw new IllegalStateException();
+ // Verify referential integrity: that none of the deleted nodes are referenced by
nodes not being deleted.
+ Query references =
manager.createNamedQuery("SubgraphNodeEntity.getReferenceThatWillBeInvalid");
+ references.setParameter("queryId", query.getId());
+ List<ReferenceEntity> invalidReferences = references.getResultList();
+ if (invalidReferences.size() > 0) {
+ ValueFactory<Reference> refFactory =
context.getValueFactories().getReferenceFactory();
+ Map<Location, List<Reference>> invalidRefs = new
HashMap<Location, List<Reference>>();
+ for (ReferenceEntity entity : invalidReferences) {
+ UUID fromUuid = UUID.fromString(entity.getId().getFromUuidString());
+ Location location = resolver.getLocationFor(fromUuid);
+ List<Reference> refs = invalidRefs.get(location);
+ if (refs == null) {
+ refs = new ArrayList<Reference>();
+ invalidRefs.put(location, refs);
+ }
+ UUID toUuid = UUID.fromString(entity.getId().getToUuidString());
+ refs.add(refFactory.create(toUuid));
+ }
+ String msg = JpaConnectorI18n.unableToDeleteBecauseOfReferences.text();
+ throw new ReferentialIntegrityException(invalidRefs, msg);
+ }
+
// Delete the PropertiesEntities ...
//
// Right now, Hibernate is not able to support deleting PropertiesEntity in bulk
because of the
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties 2008-12-16
16:19:33 UTC (rev 686)
+++
trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties 2008-12-16
17:55:11 UTC (rev 687)
@@ -32,5 +32,6 @@
unableToMoveRootNode = Unable to move the root node to another location in {0}
locationShouldHavePathAndOrProperty = The source {0} is unable to find a node without a
path or a {1} property
invalidReferences = One or more references were invalid in {0}
+unableToDeleteBecauseOfReferences = At least one deleted node is referenced by a node
that is not being deleted
basicModelDescription = Database model that stores node properties as opaque records and
children as transparent records. Large property values are stored separately.
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-16
16:19:33 UTC (rev 686)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-16
17:55:11 UTC (rev 687)
@@ -67,6 +67,7 @@
private List<Location> locations;
private String[] validLargeValues;
private SubgraphQuery query;
+ private SubgraphQuery.Resolver resolver;
@BeforeClass
public static void beforeAll() throws Exception {
@@ -97,6 +98,11 @@
factory = configurator.buildEntityManagerFactory();
manager = factory.createEntityManager();
namespaces = new Namespaces(manager);
+ resolver = new SubgraphQuery.Resolver() {
+ public Location getLocationFor( UUID uuid ) {
+ return new Location(uuid);
+ }
+ };
manager.getTransaction().begin();
@@ -440,7 +446,7 @@
verifyNextLocationIs("/a/a1/a2");
verifyNextLocationIs("/a/a1/a3");
verifyNoMoreLocations();
- query.deleteSubgraph(true);
+ query.deleteSubgraph(true, resolver);
query.close();
// Commit the transaction, and start another ...
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/RequestProcessorCacheTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/RequestProcessorCacheTest.java 2008-12-16
16:19:33 UTC (rev 686)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/RequestProcessorCacheTest.java 2008-12-16
17:55:11 UTC (rev 687)
@@ -222,15 +222,15 @@
assertThat(cache.getLocationFor(children2[6].getPath()), is(children2[6]));
assertThat(cache.getLocationFor(children2[7].getPath()), is(children2[7]));
- System.out.println("Before:");
- System.out.println(cache.getString(namespaces));
+ // System.out.println("Before:");
+ // System.out.println(cache.getString(namespaces));
// Move the branch (without a known index) ...
assertThat(cache.moveNode(oldLocation, -1, newLocation), is(true));
- System.out.println("After moving " +
oldLocation.getPath().getString(namespaces) + " to "
- + newLocation.getPath().getString(namespaces));
- System.out.println(cache.getString(namespaces));
+ // System.out.println("After moving " +
oldLocation.getPath().getString(namespaces) + " to "
+ // + newLocation.getPath().getString(namespaces));
+ // System.out.println(cache.getString(namespaces));
// Check the cache content, which should no longer have any content below the old
and new locations ...
LinkedList<Location> afterRemoval =
cache.getAllChildren(location.getPath());
@@ -347,8 +347,8 @@
assertThat(cache.getLocationFor(children2[6].getPath()), is(children2[6]));
assertThat(cache.getLocationFor(children2[7].getPath()), is(children2[7]));
- System.out.println("Before:");
- System.out.println(cache.getString(namespaces));
+ // System.out.println("Before:");
+ // System.out.println(cache.getString(namespaces));
// Create the locations that in the branch to be removed ...
List<Location> locationsToRemove = new LinkedList<Location>();
@@ -363,8 +363,8 @@
// Remove the branch ...
assertThat(cache.removeBranch(locationsToRemove), is(true));
- System.out.println("After removing " +
locationsToRemove.get(0).getString(namespaces));
- System.out.println(cache.getString(namespaces));
+ // System.out.println("After removing " +
locationsToRemove.get(0).getString(namespaces));
+ // System.out.println(cache.getString(namespaces));
// Check the cache content, which should no longer have any content below the old
and new locations ...
LinkedList<Location> afterRemoval =
cache.getAllChildren(location.getPath());