Author: rhauch
Date: 2008-12-18 17:52:23 -0500 (Thu, 18 Dec 2008)
New Revision: 693
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java
Log:
DNA-40
Per JSR-283 section 5.1.7, corrected this connector's copy behavior for how
"internal" references (from nodes within the subgraph to nodes also within the
subgraph) are remapped in the copy, so that the copy's references are also internal to
the copy. Also verified that "outward" references (from nodes within the
subgraph to nodes outside of the subgraph) are copied correctly, and that
"inward" references (from nodes outside of the original subgraph to nodes in the
original subgraph) are not affected by the copy operation (since the original subgraph is
not modified).
Note that JSR-170 does not define the semantics of how references are handled in a copy
operation.
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-18
22:46:16 UTC (rev 692)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-18
22:52:23 UTC (rev 693)
@@ -937,7 +937,7 @@
actualToLocation = addNewChild(actualNewParent, copyUuid,
childName);
}
- // Now process the children in the subgraph ...
+ // Now create copies of all children in the subgraph, assigning new UUIDs
to each new child ...
while (originalIter.hasNext()) {
ChildEntity original = originalIter.next();
String newParentUuidOfCopy =
originalToNewUuid.get(original.getId().getParentUuidString());
@@ -955,6 +955,29 @@
}
entities.flush();
+ // Now create copies of all the intra-subgraph references, replacing the
UUIDs on both ends ...
+ Set<String> newNodesWithReferenceProperties = new
HashSet<String>();
+ for (ReferenceEntity reference : query.getInternalReferences()) {
+ String newFromUuid =
originalToNewUuid.get(reference.getId().getFromUuidString());
+ assert newFromUuid != null;
+ String newToUuid =
originalToNewUuid.get(reference.getId().getToUuidString());
+ assert newToUuid != null;
+ ReferenceEntity copy = new ReferenceEntity(new
ReferenceId(newFromUuid, newToUuid));
+ entities.persist(copy);
+ newNodesWithReferenceProperties.add(newFromUuid);
+ }
+
+ // Now create copies of all the references owned by the subgraph but
pointing to non-subgraph nodes,
+ // so we only replaced the 'from' UUID ...
+ for (ReferenceEntity reference : query.getOutwardReferences()) {
+ String oldToUuid = reference.getId().getToUuidString();
+ String newFromUuid =
originalToNewUuid.get(reference.getId().getFromUuidString());
+ assert newFromUuid != null;
+ ReferenceEntity copy = new ReferenceEntity(new
ReferenceId(newFromUuid, oldToUuid));
+ entities.persist(copy);
+ newNodesWithReferenceProperties.add(newFromUuid);
+ }
+
// Now process the properties, creating a copy (note references are not
changed) ...
for (PropertiesEntity original : query.getProperties(true, true)) {
// Find the UUID of the copy ...
@@ -962,9 +985,33 @@
assert copyUuid != null;
// Create the copy ...
+ boolean compressed = original.isCompressed();
+ byte[] originalData = original.getData();
PropertiesEntity copy = new PropertiesEntity(new NodeId(copyUuid));
- copy.setCompressed(original.isCompressed());
- copy.setData(original.getData());
+ copy.setCompressed(compressed);
+ if (newNodesWithReferenceProperties.contains(copyUuid)) {
+
+ // This node has internal or outward references that must be
adjusted ...
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ OutputStream os = compressed ? new GZIPOutputStream(baos) :
baos;
+ ObjectOutputStream oos = new ObjectOutputStream(os);
+ ByteArrayInputStream bais = new
ByteArrayInputStream(originalData);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.adjustReferenceProperties(ois, oos,
originalToNewUuid);
+ } finally {
+ try {
+ ois.close();
+ } finally {
+ oos.close();
+ }
+ }
+ copy.setData(baos.toByteArray());
+ } else {
+ // No references to adjust, so just copy the original data ...
+ copy.setData(originalData);
+ }
copy.setPropertyCount(original.getPropertyCount());
copy.setReferentialIntegrityEnforced(original.isReferentialIntegrityEnforced());
entities.persist(copy);
@@ -1014,7 +1061,7 @@
query.deleteSubgraph(true);
// Verify referential integrity: that none of the deleted nodes are
referenced by nodes not being deleted.
- List<ReferenceEntity> invalidReferences =
query.getInvalidReferences();
+ List<ReferenceEntity> invalidReferences =
query.getInwardReferences();
if (invalidReferences.size() > 0) {
// Some of the references that remain will be invalid, since they
point to nodes that
// have just been deleted. Build up the information necessary to
produce a useful exception ...
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-18
22:46:16 UTC (rev 692)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-18
22:52:23 UTC (rev 693)
@@ -47,7 +47,9 @@
@NamedQuery( name = "SubgraphNodeEntity.getPropertiesEntities", query =
"select props from PropertiesEntity props, SubgraphNodeEntity node where
props.id.uuidString = node.nodeUuid and node.queryId = :queryId and node.depth >=
:depth and node.depth <= :maxDepth order by node.depth, node.parentIndexInParent,
node.indexInParent" ),
@NamedQuery( name =
"SubgraphNodeEntity.getPropertiesEntitiesWithLargeValues", query = "select
props from PropertiesEntity props, SubgraphNodeEntity node where props.id.uuidString =
node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and
size(props.largeValues) > 0" ),
@NamedQuery( name = "SubgraphNodeEntity.getChildEntities", query =
"select child from ChildEntity child, SubgraphNodeEntity node where
child.id.childUuidString = node.nodeUuid and node.queryId = :queryId and node.depth >=
:depth and node.depth <= :maxDepth order by node.depth, node.parentIndexInParent,
node.indexInParent" ),
- @NamedQuery( name = "SubgraphNodeEntity.getReferenceThatWillBeInvalid",
query = "select ref from ReferenceEntity as ref where ref.id.toUuidString in ( select
node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and
ref.id.fromUuidString not in (select node.nodeUuid from SubgraphNodeEntity node where
node.queryId = :queryId)" ),
+ @NamedQuery( name = "SubgraphNodeEntity.getInternalReferences", query =
"select ref from ReferenceEntity as ref where ref.id.toUuidString in ( select
node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and
ref.id.fromUuidString in (select node.nodeUuid from SubgraphNodeEntity node where
node.queryId = :queryId)" ),
+ @NamedQuery( name = "SubgraphNodeEntity.getOutwardReferences", query =
"select ref from ReferenceEntity as ref where ref.id.toUuidString not in ( select
node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and
ref.id.fromUuidString in (select node.nodeUuid from SubgraphNodeEntity node where
node.queryId = :queryId)" ),
+ @NamedQuery( name = "SubgraphNodeEntity.getInwardReferences", query =
"select ref from ReferenceEntity as ref where ref.id.toUuidString in ( select
node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and
ref.id.fromUuidString not in (select node.nodeUuid from SubgraphNodeEntity node where
node.queryId = :queryId)" ),
@NamedQuery( name = "SubgraphNodeEntity.deletePropertiesEntities", query =
"delete PropertiesEntity props where props.id.uuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId )" ),
@NamedQuery( name = "SubgraphNodeEntity.deleteChildEntities", query =
"delete ChildEntity child where child.id.childUuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId )" ),
@NamedQuery( name = "SubgraphNodeEntity.deleteReferences", query =
"delete ReferenceEntity as ref where ref.id.fromUuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId )" ),
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-18
22:46:16 UTC (rev 692)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-18
22:52:23 UTC (rev 693)
@@ -284,21 +284,50 @@
}
/**
- * Determine whether there are any invalid references (typically called after {@link
#deleteSubgraph(boolean)}).
+ * Get the list of references that are owned by nodes within the subgraph and that
point to other nodes <i>in this same
+ * subgraph</i>. This set of references is important in copying a subgraph,
since all intra-subgraph references in the
+ * original subgraph must also be intra-subgraph references in the copy.
*
+ * @return the list of references completely contained by this subgraphs
+ */
+ @SuppressWarnings( "unchecked" )
+ public List<ReferenceEntity> getInternalReferences() {
+ Query references =
manager.createNamedQuery("SubgraphNodeEntity.getInternalReferences");
+ references.setParameter("queryId", query.getId());
+ return references.getResultList();
+ }
+
+ /**
+ * Get the list of references that are owned by nodes within the subgraph and that
point to nodes <i>not in this same
+ * subgraph</i>. This set of references is important in copying a subgraph.
+ *
+ * @return the list of references that are owned by the subgraph but that point to
nodes outside of the subgraph
+ */
+ @SuppressWarnings( "unchecked" )
+ public List<ReferenceEntity> getOutwardReferences() {
+ Query references =
manager.createNamedQuery("SubgraphNodeEntity.getOutwardReferences");
+ references.setParameter("queryId", query.getId());
+ return references.getResultList();
+ }
+
+ /**
+ * Get the list of references that are owned by nodes <i>outside</i> of
the subgraph that point to nodes <i>in this
+ * subgraph</i>. This set of references is important in deleting nodes, since
such references prevent the deletion of the
+ * subgraph.
+ *
* @return the list of references that are no longer valid
*/
@SuppressWarnings( "unchecked" )
- public List<ReferenceEntity> getInvalidReferences() {
+ public List<ReferenceEntity> getInwardReferences() {
// Verify referential integrity: that none of the deleted nodes are referenced by
nodes not being deleted.
- Query references =
manager.createNamedQuery("SubgraphNodeEntity.getReferenceThatWillBeInvalid");
+ Query references =
manager.createNamedQuery("SubgraphNodeEntity.getInwardReferences");
references.setParameter("queryId", query.getId());
return references.getResultList();
}
/**
* Delete the nodes in the subgraph. This method first does not check for referential
integrity (see
- * {@link #getInvalidReferences()}).
+ * {@link #getInwardReferences()}).
*
* @param includeRoot true if the root node should also be deleted
*/
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java 2008-12-18
22:46:16 UTC (rev 692)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java 2008-12-18
22:52:23 UTC (rev 693)
@@ -47,7 +47,10 @@
import org.jboss.dna.graph.properties.PropertyFactory;
import org.jboss.dna.graph.properties.PropertyType;
import org.jboss.dna.graph.properties.Reference;
+import org.jboss.dna.graph.properties.UuidFactory;
import org.jboss.dna.graph.properties.ValueFactories;
+import org.jboss.dna.graph.properties.ValueFactory;
+import org.jboss.dna.graph.properties.ValueFormatException;
/**
* @author Randall Hauch
@@ -101,13 +104,12 @@
long length,
PropertyType type,
Object value ) {
- throw new UnsupportedOperationException();
}
public Object read( ValueFactories valueFactories,
byte[] hash,
long length ) {
- throw new UnsupportedOperationException();
+ return null;
}
}
@@ -399,6 +401,147 @@
}
/**
+ * Deserialize the properties, adjust all {@link Reference} values that point to an
"old" UUID to point to the corresponding
+ * "new" UUID, and reserialize the properties. If any reference is to a
UUID not in the map, it is left untouched.
+ * <p>
+ * This is an efficient method that (for the most part) reads from the input stream
and directly writes to the output stream.
+ * The exception is when a Reference value is read, that Reference is attempted to be
remapped to a new Reference and written
+ * in place of the old reference. (Of course, if the Reference is to a UUID that is
not in the "old" to "new" map, the old is
+ * written directly.)
+ * </p>
+ *
+ * @param input the stream from which the existing properties are to be deserialized;
may not be null
+ * @param output the stream to which the updated properties are to be serialized; may
not be null
+ * @param oldUuidToNewUuid the map of old-to-new UUIDs
+ * @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
+ * @throws ClassNotFoundException if the class for the value's object could not
be found
+ */
+ public void adjustReferenceProperties( ObjectInputStream input,
+ ObjectOutputStream output,
+ Map<String, String> oldUuidToNewUuid )
throws IOException, ClassNotFoundException {
+ assert input != null;
+ assert output != null;
+ assert oldUuidToNewUuid != null;
+
+ UuidFactory uuidFactory = valueFactories.getUuidFactory();
+ ValueFactory<Reference> referenceFactory =
valueFactories.getReferenceFactory();
+
+ // Read the number of properties ...
+ int count = input.readInt();
+ output.writeInt(count);
+ // Deserialize all of the proeprties ...
+ for (int i = 0; i != count; ++i) {
+ // Read and write the property name ...
+ Object name = input.readObject();
+ output.writeObject(name);
+ // Read and write the number of values ...
+ int numValues = input.readInt();
+ output.writeInt(numValues);
+ // Now read and write each property value ...
+ for (int j = 0; j != numValues; ++j) {
+ // Read and write the type of value ...
+ char type = input.readChar();
+ output.writeChar(type);
+ switch (type) {
+ case 'S':
+ output.writeObject(input.readObject());
+ break;
+ case 'b':
+ output.writeBoolean(input.readBoolean());
+ break;
+ case 'i':
+ output.writeInt(input.readInt());
+ break;
+ case 'l':
+ output.writeLong(input.readLong());
+ break;
+ case 's':
+ output.writeShort(input.readShort());
+ break;
+ case 'f':
+ output.writeFloat(input.readFloat());
+ break;
+ case 'd':
+ output.writeDouble(input.readDouble());
+ break;
+ case 'c':
+ // char
+ output.writeChar(input.readChar());
+ break;
+ case 'U':
+ // UUID
+ output.writeLong(input.readLong());
+ output.writeLong(input.readLong());
+ break;
+ case 'I':
+ // URI
+ output.writeObject(input.readObject());
+ break;
+ case 'N':
+ // Name
+ output.writeObject(input.readObject());
+ break;
+ case 'P':
+ // Path
+ output.writeObject(input.readObject());
+ break;
+ case 'T':
+ // DateTime
+ output.writeObject(input.readObject());
+ break;
+ case 'D':
+ // BigDecimal
+ output.writeObject(input.readObject());
+ break;
+ case 'R':
+ // Reference
+ String refValue = (String)input.readObject();
+ Reference ref = referenceFactory.create(refValue);
+ try {
+ UUID toUuid = uuidFactory.create(ref);
+ String newUuid = oldUuidToNewUuid.get(toUuid.toString());
+ if (newUuid != null) {
+ // Create a new reference ...
+ ref = referenceFactory.create(newUuid);
+ refValue = ref.getString();
+ }
+ } catch (ValueFormatException e) {
+ // Unknown reference, so simply write it again ...
+ }
+ // Write the reference ...
+ output.writeObject(refValue);
+ break;
+ case 'B':
+ // Binary
+ // Read the length of the content ...
+ long binaryLength = input.readLong();
+ byte[] content = new byte[(int)binaryLength];
+ input.read(content);
+ // Now write out the value ...
+ output.writeLong(binaryLength);
+ output.write(content);
+ break;
+ case 'L':
+ // Large object ...
+ int hashLength = input.readInt();
+ byte[] hash = new byte[hashLength];
+ input.read(hash);
+ long length = input.readLong();
+ // write to the output ...
+ output.writeInt(hash.length);
+ output.write(hash);
+ output.writeLong(length);
+ break;
+ default:
+ // All other objects ...
+ output.writeObject(input.readObject());
+ break;
+ }
+ }
+ }
+ }
+
+ /**
* Deserialize the serialized properties on the supplied object stream.
*
* @param stream the stream that contains the serialized properties; may not be null
@@ -469,7 +612,7 @@
read = name.equals(nameToRead) || (namesToRead != null &&
namesToRead.contains(namesToRead));
if (read) {
// Now read the property values ...
- Object[] values = deserializePropertyValues(stream, name, false,
skippedLargeValues, skippedLargeValues, null);
+ Object[] values = deserializePropertyValues(stream, name, false,
largeValues, skippedLargeValues, null);
// Add the property to the collection ...
Property property = propertyFactory.create(name, values);
assert property != null;
@@ -609,21 +752,22 @@
// Reference
String refValue = (String)stream.readObject();
Reference ref =
valueFactories.getReferenceFactory().create(refValue);
- if (!skip || references != null) {
- if (!skip) {
- value = ref;
- if (references != null) references.remove(ref);
- } else {
- assert references != null;
- references.read(ref);
- }
+ if (skip) {
+ if (references != null) references.remove(ref);
+ } else {
+ value = ref;
+ if (references != null) references.read(ref);
}
break;
case 'B':
// Binary
// Read the length of the content ...
long binaryLength = stream.readLong();
- if (!skip) value = valueFactories.getBinaryFactory().create(stream,
binaryLength);
+ byte[] content = new byte[(int)binaryLength];
+ stream.read(content);
+ if (!skip) {
+ value = valueFactories.getBinaryFactory().create(content);
+ }
break;
case 'L':
// Large object ...
@@ -657,5 +801,4 @@
throw new SystemFailureException(e);
}
}
-
}
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-18
22:46:16 UTC (rev 692)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-18
22:52:23 UTC (rev 693)
@@ -22,6 +22,7 @@
package org.jboss.dna.connector.store.jpa;
import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.jboss.dna.graph.IsNodeWithChildren.hasChild;
import static org.jboss.dna.graph.IsNodeWithChildren.hasChildren;
@@ -49,6 +50,7 @@
import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.Path;
import org.jboss.dna.graph.properties.Property;
+import org.jboss.dna.graph.properties.Reference;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -744,9 +746,52 @@
numPropsOnEach = 3;
createTree("", 3, 3, numPropsOnEach, null, true, false);
+ // Create some references between nodes that aren't involved with the copy
...
+
graph.set("refProp").on("/node1").to(graph.getNodeAt("/node1/node3"));
+
graph.set("refProp").on("/node1/node1").to(graph.getNodeAt("/node3/node2"));
// will soon be /node3/node2[1]
+
+ // Create some "inward" references from nodes that are NOT being copied
to nodes that are being copied ...
+
graph.set("refProp").on("/node1/node2").to(graph.getNodeAt("/node2/node2"));
+
graph.set("refProp").on("/node1/node3").to(graph.getNodeAt("/node2/node2"));
+
+ // Create some "outward" references from nodes that are being copied to
nodes that are NOT being copied ...
+
graph.set("refProp").on("/node2/node1").to(graph.getNodeAt("/node1/node1"));
+
graph.set("refProp").on("/node2/node3").to(graph.getNodeAt("/node1/node2"));
+
+ // Create some "internal" references between nodes that are being
copied ...
+
graph.set("refProp").on("/node2/node2").to(graph.getNodeAt("/node2/node2/node1"));
+
graph.set("refProp").on("/node2/node3/node1").to(graph.getNodeAt("/node2/node2/node1"));
+
+ // Verify the references are there ...
+ assertReference("/node1", "refProp",
"/node1/node3");
+ assertReference("/node1/node1", "refProp",
"/node3/node2");
+ assertReference("/node1/node2", "refProp",
"/node2/node2");
+ assertReference("/node1/node3", "refProp",
"/node2/node2");
+ assertReference("/node2/node1", "refProp",
"/node1/node1");
+ assertReference("/node2/node3", "refProp",
"/node1/node2");
+ assertReference("/node2/node2", "refProp",
"/node2/node2/node1");
+ assertReference("/node2/node3/node1", "refProp",
"/node2/node2/node1");
+
// Copy a branches ...
graph.copy("/node2").into("/node3");
+ // Verify the references are still there ...
+ assertReference("/node1", "refProp",
"/node1/node3");
+ assertReference("/node1/node1", "refProp",
"/node3/node2[1]");
+ assertReference("/node1/node2", "refProp",
"/node2/node2");
+ assertReference("/node1/node3", "refProp",
"/node2/node2");
+ assertReference("/node2/node1", "refProp",
"/node1/node1");
+ assertReference("/node2/node3", "refProp",
"/node1/node2");
+ assertReference("/node2/node2", "refProp",
"/node2/node2/node1");
+ assertReference("/node2/node3/node1", "refProp",
"/node2/node2/node1");
+
+ // And verify that we have a few new (outward and internal) references in the
copy ...
+ assertReference("/node3/node2[2]/node1", "refProp",
"/node1/node1"); // outward
+ assertReference("/node3/node2[2]/node3", "refProp",
"/node1/node2"); // outward
+ assertReference("/node3/node2[2]/node2", "refProp",
"/node3/node2[2]/node2/node1"); // internal
+ assertReference("/node3/node2[2]/node3/node1", "refProp",
"/node3/node2[2]/node2/node1"); // internal
+
+ // Now assert the structure ...
assertThat(graph.getChildren().of("/node1"),
hasChildren(child("node1"), child("node2"),
child("node3")));
assertThat(graph.getChildren().of("/node1/node1"),
hasChildren(child("node1"), child("node2"),
child("node3")));
assertThat(graph.getChildren().of("/node1/node2"),
hasChildren(child("node1"), child("node2"),
child("node3")));
@@ -873,6 +918,19 @@
"The quick
brown fox jumped over the moon. What? "));
}
+ protected void assertReference( String fromNodePath,
+ String propertyName,
+ String... toNodePath ) {
+ Object[] values =
graph.getProperty(propertyName).on(fromNodePath).getValuesAsArray();
+ assertThat(values.length, is(toNodePath.length));
+ for (int i = 0; i != values.length; ++i) {
+ Object value = values[i];
+ assertThat(value, is(instanceOf(Reference.class)));
+ Reference ref = (Reference)value;
+ assertThat(graph.resolve(ref), is(graph.getNodeAt(toNodePath[i])));
+ }
+ }
+
@Test
public void shouldReadRangeOfChildren() {
// Create a shallow tree with many children under one node ...
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-18
22:46:16 UTC (rev 692)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-18
22:52:23 UTC (rev 693)
@@ -459,7 +459,7 @@
verifyNextLocationIs("/a/a1/a3");
verifyNoMoreLocations();
query.deleteSubgraph(true);
- assertThat(query.getInvalidReferences().isEmpty(), is(true));
+ assertThat(query.getInwardReferences().isEmpty(), is(true));
query.close();
// Commit the transaction, and start another ...
@@ -542,7 +542,7 @@
query.deleteSubgraph(true);
// Now there should be invalid references ...
- List<ReferenceEntity> invalidReferences = query.getInvalidReferences();
+ List<ReferenceEntity> invalidReferences = query.getInwardReferences();
assertThat(invalidReferences.size(), is(3));
invalidReferences.removeAll(invalidReferences);
assertThat(invalidReferences.size(), is(0));
@@ -583,7 +583,7 @@
query.deleteSubgraph(true);
// Now there should be invalid references ...
- List<ReferenceEntity> invalidReferences = query.getInvalidReferences();
+ List<ReferenceEntity> invalidReferences = query.getInwardReferences();
assertThat(invalidReferences.size(), is(0));
query.close();
@@ -595,4 +595,59 @@
assertThat(remainingReferences.size(), is(0));
}
+ @Test
+ public void shouldGetVariousReferencesRelatedToSubgraph() throws Exception {
+ // Verify that all the nodes with large values do indeed have them ...
+ verifyNodesHaveLargeValues("/a/a1", "/a/a2",
"/a/a2/a1");
+
+ // Count the number of objects ...
+ assertThat((Long)manager.createQuery("select count(*) from
LargeValueEntity").getSingleResult(), is(3L));
+ assertThat((Long)manager.createQuery("select count(*) from
PropertiesEntity").getSingleResult(), is(14L));
+ assertThat((Long)manager.createQuery("select count(*) from
ChildEntity").getSingleResult(), is(14L));
+
+ // Create references from the nodes that aren't even part of the subgraph
...
+ List<ReferenceEntity> otherRefs = new ArrayList<ReferenceEntity>();
+ otherRefs.add(createReferenceBetween("/a/a2", "/a/a2/a1"));
+ otherRefs.add(createReferenceBetween("/a/a2/a1",
"/a/a2/a2"));
+
+ // Create references between nodes in the subgraph ...
+ List<ReferenceEntity> internalRefs = new
ArrayList<ReferenceEntity>();
+ internalRefs.add(createReferenceBetween("/a/a1",
"/a/a1/a1"));
+ internalRefs.add(createReferenceBetween("/a/a1/a2",
"/a/a1/a3"));
+
+ // Create references from nodes outside of the subgraph to nodes inside of the
subgraph ...
+ List<ReferenceEntity> inwardRefs = new ArrayList<ReferenceEntity>();
+ inwardRefs.add(createReferenceBetween("/a/a2", "/a/a1/a1"));
+ inwardRefs.add(createReferenceBetween("/a/a2/a1",
"/a/a1/a3"));
+
+ // Create references from nodes inside of the subgraph to nodes outside of the
subgraph ...
+ List<ReferenceEntity> outwardRefs = new
ArrayList<ReferenceEntity>();
+ outwardRefs.add(createReferenceBetween("/a/a1", "/a/a2"));
+ outwardRefs.add(createReferenceBetween("/a/a1/a1",
"/a/a2/a1"));
+
+ // Create the query ...
+ Path path = path("/a/a1");
+ UUID uuid = uuidByPath.get(path);
+ query = SubgraphQuery.create(context, manager, uuid, path, Integer.MAX_VALUE);
+
+ // Check the various kinds of references ...
+ List<ReferenceEntity> actualInternal = query.getInternalReferences();
+ List<ReferenceEntity> actualInward = query.getInwardReferences();
+ List<ReferenceEntity> actualOutward = query.getOutwardReferences();
+
+ assertThat(actualInternal.size(), is(internalRefs.size()));
+ actualInternal.removeAll(internalRefs);
+ assertThat(actualInternal.size(), is(0));
+
+ assertThat(actualInward.size(), is(inwardRefs.size()));
+ actualInward.removeAll(inwardRefs);
+ assertThat(actualInward.size(), is(0));
+
+ assertThat(actualOutward.size(), is(outwardRefs.size()));
+ actualOutward.removeAll(outwardRefs);
+ assertThat(actualOutward.size(), is(0));
+
+ query.close();
+ }
+
}
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java 2008-12-18
22:46:16 UTC (rev 692)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java 2008-12-18
22:52:23 UTC (rev 693)
@@ -53,6 +53,7 @@
import org.jboss.dna.graph.properties.Property;
import org.jboss.dna.graph.properties.PropertyFactory;
import org.jboss.dna.graph.properties.PropertyType;
+import org.jboss.dna.graph.properties.Reference;
import org.jboss.dna.graph.properties.ValueFactories;
import org.junit.Before;
import org.junit.Test;
@@ -229,8 +230,14 @@
Property prop5 = createProperty("p5",
valueFactories.getBinaryFactory().create("something"));
String binaryValue = "really really long string that will be converted to a
binary value and tested like that";
Property prop6 = createProperty("p6",
valueFactories.getBinaryFactory().create(binaryValue));
+ UUID uuid7 = UUID.randomUUID();
+ Reference ref7 = valueFactories.getReferenceFactory().create(uuid7);
+ Property prop7 = createProperty("p7", ref7);
+ UUID uuid8 = UUID.randomUUID();
+ Reference ref8 = valueFactories.getReferenceFactory().create(uuid8);
+ Property prop8 = createProperty("p8", ref8);
- assertSerializableAndDeserializable(serializer, prop1, prop2, prop3, prop4,
prop5, prop6);
+ assertSerializableAndDeserializable(serializer, prop1, prop2, prop3, prop4,
prop5, prop6, prop7, prop8);
assertThat(largeValues.getCount(), is(2));
}
@@ -245,6 +252,12 @@
String binaryValueStr = "really really long string that will be converted to
a binary value and tested like that";
Binary binaryValue = valueFactories.getBinaryFactory().create(binaryValueStr);
Property prop6 = createProperty("p6", binaryValue);
+ UUID uuid7 = UUID.randomUUID();
+ Reference ref7 = valueFactories.getReferenceFactory().create(uuid7);
+ Property prop7 = createProperty("p7", ref7);
+ UUID uuid8 = UUID.randomUUID();
+ Reference ref8 = valueFactories.getReferenceFactory().create(uuid8);
+ Property prop8 = createProperty("p8", ref8);
Property prop2b = createProperty("p2");
Property prop3b = createProperty("p3", "v3");
@@ -252,7 +265,7 @@
Binary binaryValue2 = valueFactories.getBinaryFactory().create(binaryValueStr2);
Property prop6b = createProperty("p6", binaryValue2);
- Property[] initial = new Property[] {prop1, prop2, prop3, prop4, prop5, prop6};
+ Property[] initial = new Property[] {prop1, prop2, prop3, prop4, prop5, prop6,
prop7, prop8};
Property[] updated = new Property[] {prop2b, prop3b, prop6b};
SkippedLargeValues removedLargeValues = new SkippedLargeValues();
assertReserializable(serializer, removedLargeValues, initial, updated);
@@ -265,6 +278,61 @@
assertThat(removedLargeValues.isSkipped(binaryValue), is(true));
}
+ @Test
+ public void shouldAdjustReferences() throws Exception {
+ Property prop1 = createProperty("p1", "v1");
+ String value = "v234567890123456789012345678901234567890";
+ Property prop2 = createProperty("p2", value);
+ Property prop3 = createProperty("p3", "v2");
+ Property prop4 = createProperty("p4", new String(value)); // make sure
it's a different String object
+ Property prop5 = createProperty("p5",
valueFactories.getBinaryFactory().create("something"));
+ String binaryValueStr = "really really long string that will be converted to
a binary value and tested like that";
+ Binary binaryValue = valueFactories.getBinaryFactory().create(binaryValueStr);
+ Property prop6 = createProperty("p6", binaryValue);
+ UUID uuid7 = UUID.randomUUID();
+ Reference ref7 = valueFactories.getReferenceFactory().create(uuid7);
+ Property prop7 = createProperty("p7", ref7);
+ UUID uuid8 = UUID.randomUUID();
+ Reference ref8 = valueFactories.getReferenceFactory().create(uuid8);
+ Property prop8 = createProperty("p8", ref8);
+
+ // Serialize the properties (and verify they're serialized properly) ...
+ Property[] props = new Property[] {prop1, prop2, prop3, prop4, prop5, prop6,
prop7, prop8};
+ byte[] content = serialize(serializer, props);
+ List<Property> properties = deserialize(serializer, content);
+ assertThat(properties, hasItems(props));
+
+ // Define the old-to-new UUID mapping ...
+ UUID newUuid7 = UUID.randomUUID();
+ Map<String, String> oldToNewUuids = new HashMap<String, String>();
+ oldToNewUuids.put(uuid7.toString(), newUuid7.toString());
+ // note that 'uuid8' is not included, so 'ref8' should be
untouched
+
+ // Now update the references in the serialized properties ...
+ ByteArrayInputStream bais = new ByteArrayInputStream(content);
+ ObjectInputStream ois = new ObjectInputStream(bais);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ ObjectOutputStream oos = new ObjectOutputStream(baos);
+ try {
+ serializer.adjustReferenceProperties(ois, oos, oldToNewUuids);
+ } finally {
+ baos.close();
+ oos.close();
+ }
+ byte[] newContent = baos.toByteArray();
+
+ // Now deserialize the updated content ...
+ properties = deserialize(serializer, newContent);
+
+ // Update a new 'prop7' ...
+ Reference newRef7 = valueFactories.getReferenceFactory().create(newUuid7);
+ Property newProp7 = createProperty("p7", newRef7);
+ Property[] newProps = new Property[] {prop1, prop2, prop3, prop4, prop5, prop6,
newProp7, prop8};
+
+ // Finally verify that the updated content matches the expected new properties
...
+ assertThat(properties, hasItems(newProps));
+ }
+
protected Property createProperty( String name,
Object... values ) {
return propertyFactory.create(valueFactories.getNameFactory().create(name),
values);
@@ -301,27 +369,42 @@
List<Property> outputProperties = new
ArrayList<Property>(propertyList.size());
// Serialize the properties one at a time ...
+ byte[] bytes = serialize(serializer, propertyList.toArray(new
Property[propertyList.size()]));
+
+ // Deserialize ...
+ outputProperties = deserialize(serializer, bytes);
+
+ // Check the properties match ...
+ assertThat(outputProperties.size(), is(propertyList.size()));
+ assertThat(outputProperties, hasItems(propertyList.toArray(new
Property[propertyList.size()])));
+ }
+
+ protected byte[] serialize( Serializer serializer,
+ Property... originalProperties ) throws IOException {
+ // Serialize the properties one at a time ...
+ Collection<Property> initialProps = Arrays.asList(originalProperties);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
- serializer.serializeProperties(oos, propertyList.size(), propertyList,
largeValues, references);
+ serializer.serializeProperties(oos, initialProps.size(), initialProps,
largeValues, references);
} finally {
oos.close();
}
- byte[] bytes = baos.toByteArray();
+ return baos.toByteArray();
+ }
+ protected List<Property> deserialize( Serializer serializer,
+ byte[] content ) throws IOException,
ClassNotFoundException {
// Deserialize ...
- ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+ List<Property> afterProperties = new ArrayList<Property>();
+ ByteArrayInputStream bais = new ByteArrayInputStream(content);
ObjectInputStream ois = new ObjectInputStream(bais);
try {
- serializer.deserializeAllProperties(ois, outputProperties, largeValues);
+ serializer.deserializeAllProperties(ois, afterProperties, largeValues);
} finally {
ois.close();
}
-
- // Check the properties match ...
- assertThat(outputProperties.size(), is(propertyList.size()));
- assertThat(outputProperties, hasItems(propertyList.toArray(new
Property[propertyList.size()])));
+ return afterProperties;
}
protected void assertReserializable( Serializer serializer,
@@ -343,22 +426,14 @@
}
// Serialize the properties one at a time ...
- Collection<Property> initialProps = Arrays.asList(originalProperties);
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- ObjectOutputStream oos = new ObjectOutputStream(baos);
- try {
- serializer.serializeProperties(oos, initialProps.size(), initialProps,
largeValues, references);
- } finally {
- oos.close();
- }
- byte[] bytes = baos.toByteArray();
+ byte[] bytes = serialize(serializer, originalProperties);
// Now reserialize, updating the properties ...
Collection<Property> updatedProps = Arrays.asList(updatedProperties);
ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
ObjectInputStream ois = new ObjectInputStream(bais);
- baos = new ByteArrayOutputStream();
- oos = new ObjectOutputStream(baos);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
serializer.reserializeProperties(ois, oos, updatedProps, largeValues,
removedLargeValues, references);
} finally {
@@ -367,14 +442,8 @@
}
// Deserialize ...
- List<Property> afterProperties = new ArrayList<Property>();
- bais = new ByteArrayInputStream(baos.toByteArray());
- ois = new ObjectInputStream(bais);
- try {
- serializer.deserializeAllProperties(ois, afterProperties, largeValues);
- } finally {
- ois.close();
- }
+ List<Property> afterProperties = deserialize(serializer,
baos.toByteArray());
+
Collection<Name> namesAfter = new HashSet<Name>();
for (Property prop : afterProperties) {
namesAfter.add(prop.getName());