DNA SVN: r694 - in trunk/dna-graph/src: test/java/org/jboss/dna/graph and 1 other directory.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2008-12-19 14:49:01 -0500 (Fri, 19 Dec 2008)
New Revision: 694
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java
Log:
DNA-266 Improve usability of graph API with additional methods
Added additional methods for setting the values of properties, overloading with the different types allowed for property values (e.g., boolean, int, Path, Name, etc.). This resulted in quite a bit more code (that is all pretty straightforward), but this detriment is outweighed by the API being more useful and clear.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-18 22:52:23 UTC (rev 693)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-19 19:49:01 UTC (rev 694)
@@ -23,10 +23,15 @@
import java.io.File;
import java.io.IOException;
+import java.io.InputStream;
+import java.io.Reader;
+import java.math.BigDecimal;
import java.net.URI;
import java.util.ArrayList;
+import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
+import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
@@ -41,6 +46,8 @@
import org.jboss.dna.graph.connectors.RepositoryConnectionFactory;
import org.jboss.dna.graph.connectors.RepositorySource;
import org.jboss.dna.graph.connectors.RepositorySourceException;
+import org.jboss.dna.graph.properties.Binary;
+import org.jboss.dna.graph.properties.DateTime;
import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.NameFactory;
import org.jboss.dna.graph.properties.Path;
@@ -718,6 +725,93 @@
return nextGraph;
}
+ protected Conjunction<Graph> toValue( Object value ) {
+ Property property = getContext().getPropertyFactory().create(propertyName, value);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ queue().submit(request);
+ return nextGraph;
+ }
+
+ public Conjunction<Graph> to( String value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( int value ) {
+ return toValue(Integer.valueOf(value));
+ }
+
+ public Conjunction<Graph> to( long value ) {
+ return toValue(Long.valueOf(value));
+ }
+
+ public Conjunction<Graph> to( boolean value ) {
+ return toValue(Boolean.valueOf(value));
+ }
+
+ public Conjunction<Graph> to( float value ) {
+ return toValue(Float.valueOf(value));
+ }
+
+ public Conjunction<Graph> to( double value ) {
+ return toValue(Double.valueOf(value));
+ }
+
+ public Conjunction<Graph> to( BigDecimal value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( Calendar value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( Date value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( DateTime value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( Name value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( Path value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( Reference value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( URI value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( UUID value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( Binary value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( byte[] value ) {
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( InputStream stream,
+ long approximateLength ) {
+ Binary value = getContext().getValueFactories().getBinaryFactory().create(stream, approximateLength);
+ return toValue(value);
+ }
+
+ public Conjunction<Graph> to( Reader reader,
+ long approximateLength ) {
+ Binary value = getContext().getValueFactories().getBinaryFactory().create(reader, approximateLength);
+ return toValue(value);
+ }
+
public Conjunction<Graph> to( Object value ) {
value = convertReferenceValue(value);
Property property = getContext().getPropertyFactory().create(propertyName, value);
@@ -795,6 +889,90 @@
return set(getContext().getPropertyFactory().create(propertyName, value));
}
+ protected On<Conjunction<Graph>> toValue( Object value ) {
+ return set(getContext().getPropertyFactory().create(propertyName, value));
+ }
+
+ public On<Conjunction<Graph>> to( String value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( int value ) {
+ return toValue(Integer.valueOf(value));
+ }
+
+ public On<Conjunction<Graph>> to( long value ) {
+ return toValue(Long.valueOf(value));
+ }
+
+ public On<Conjunction<Graph>> to( boolean value ) {
+ return toValue(Boolean.valueOf(value));
+ }
+
+ public On<Conjunction<Graph>> to( float value ) {
+ return toValue(Float.valueOf(value));
+ }
+
+ public On<Conjunction<Graph>> to( double value ) {
+ return toValue(Double.valueOf(value));
+ }
+
+ public On<Conjunction<Graph>> to( BigDecimal value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( Calendar value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( Date value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( DateTime value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( Name value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( Path value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( Reference value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( URI value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( UUID value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( Binary value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( byte[] value ) {
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( InputStream stream,
+ long approximateLength ) {
+ Binary value = getContext().getValueFactories().getBinaryFactory().create(stream, approximateLength);
+ return toValue(value);
+ }
+
+ public On<Conjunction<Graph>> to( Reader reader,
+ long approximateLength ) {
+ Binary value = getContext().getValueFactories().getBinaryFactory().create(reader, approximateLength);
+ return toValue(value);
+ }
+
public On<Conjunction<Graph>> to( Object value ) {
value = convertReferenceValue(value);
return set(getContext().getPropertyFactory().create(propertyName, value));
@@ -2009,6 +2187,93 @@
return nextRequests;
}
+ protected BatchConjunction toValue( Object value ) {
+ Property property = getContext().getPropertyFactory().create(propertyName, value);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ requestQueue.submit(request);
+ return nextRequests;
+ }
+
+ public BatchConjunction to( String value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( int value ) {
+ return toValue(Integer.valueOf(value));
+ }
+
+ public BatchConjunction to( long value ) {
+ return toValue(Long.valueOf(value));
+ }
+
+ public BatchConjunction to( boolean value ) {
+ return toValue(Boolean.valueOf(value));
+ }
+
+ public BatchConjunction to( float value ) {
+ return toValue(Float.valueOf(value));
+ }
+
+ public BatchConjunction to( double value ) {
+ return toValue(Double.valueOf(value));
+ }
+
+ public BatchConjunction to( BigDecimal value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( Calendar value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( Date value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( DateTime value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( Name value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( Path value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( Reference value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( URI value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( UUID value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( Binary value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( byte[] value ) {
+ return toValue(value);
+ }
+
+ public BatchConjunction to( InputStream stream,
+ long approximateLength ) {
+ Binary value = getContext().getValueFactories().getBinaryFactory().create(stream, approximateLength);
+ return toValue(value);
+ }
+
+ public BatchConjunction to( Reader reader,
+ long approximateLength ) {
+ Binary value = getContext().getValueFactories().getBinaryFactory().create(reader, approximateLength);
+ return toValue(value);
+ }
+
public BatchConjunction to( Object value ) {
value = convertReferenceValue(value);
Property property = getContext().getPropertyFactory().create(propertyName, value);
@@ -2052,6 +2317,7 @@
requestQueue.submit(request);
return nextRequests;
}
+
};
}
@@ -2086,6 +2352,90 @@
return set(getContext().getPropertyFactory().create(propertyName, reference));
}
+ protected On<BatchConjunction> toValue( Object value ) {
+ return set(getContext().getPropertyFactory().create(propertyName, value));
+ }
+
+ public On<BatchConjunction> to( String value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( int value ) {
+ return toValue(Integer.valueOf(value));
+ }
+
+ public On<BatchConjunction> to( long value ) {
+ return toValue(Long.valueOf(value));
+ }
+
+ public On<BatchConjunction> to( boolean value ) {
+ return toValue(Boolean.valueOf(value));
+ }
+
+ public On<BatchConjunction> to( float value ) {
+ return toValue(Float.valueOf(value));
+ }
+
+ public On<BatchConjunction> to( double value ) {
+ return toValue(Double.valueOf(value));
+ }
+
+ public On<BatchConjunction> to( BigDecimal value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( Calendar value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( Date value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( DateTime value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( Name value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( Path value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( Reference value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( URI value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( UUID value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( Binary value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( byte[] value ) {
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( InputStream stream,
+ long approximateLength ) {
+ Binary value = getContext().getValueFactories().getBinaryFactory().create(stream, approximateLength);
+ return toValue(value);
+ }
+
+ public On<BatchConjunction> to( Reader reader,
+ long approximateLength ) {
+ Binary value = getContext().getValueFactories().getBinaryFactory().create(reader, approximateLength);
+ return toValue(value);
+ }
+
public On<BatchConjunction> to( Object value ) {
value = convertReferenceValue(value);
return set(getContext().getPropertyFactory().create(propertyName, value));
@@ -3158,6 +3508,162 @@
Next to( Location location );
/**
+ * Set the property value to the given string.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( String value );
+
+ /**
+ * Set the property value to the given integer value.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( int value );
+
+ /**
+ * Set the property value to the given long value.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( long value );
+
+ /**
+ * Set the property value to the given boolean value.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( boolean value );
+
+ /**
+ * Set the property value to the given float value.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( float value );
+
+ /**
+ * Set the property value to the given double value.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( double value );
+
+ /**
+ * Set the property value to the given decimal value.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( BigDecimal value );
+
+ /**
+ * Set the property value to the date given by the supplied calendar.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( Calendar value );
+
+ /**
+ * Set the property value to the given date.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( Date value );
+
+ /**
+ * Set the property value to the given date-time instant.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( DateTime value );
+
+ /**
+ * Set the property value to the given Name.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( Name value );
+
+ /**
+ * Set the property value to the given Path.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( Path value );
+
+ /**
+ * Set the property value to the given Reference. See also {@link #to(Node)}.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( Reference value );
+
+ /**
+ * Set the property value to the given URI.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( URI value );
+
+ /**
+ * Set the property value to the given UUID.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( UUID value );
+
+ /**
+ * Set the property value to the given binary value.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( Binary value );
+
+ /**
+ * Set the property value to the given byte array.
+ *
+ * @param value the property value
+ * @return the interface for additional requests or actions
+ */
+ Next to( byte[] value );
+
+ /**
+ * Set the property value to the given string.
+ *
+ * @param stream the stream containing the content to be used for the property value
+ * @param approximateLength the approximate length of the content (in bytes)
+ * @return the interface for additional requests or actions
+ */
+ Next to( InputStream stream,
+ long approximateLength );
+
+ /**
+ * Set the property value to the given string.
+ *
+ * @param reader the reader containing the content to be used for the property value
+ * @param approximateLength the approximate length of the content (in bytes)
+ * @return the interface for additional requests or actions
+ */
+ Next to( Reader reader,
+ long approximateLength );
+
+ /**
* Set the property value to the given object. The supplied <code>value</code> should be a valid property value, or a
* {@link Node} (or {@link Location}) if the property value is to be a reference to that node (or location). Note that it
* is an error if the Node (or Location) does not have a {@link Location#getUuid() UUID}.
Modified: trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java
===================================================================
--- trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java 2008-12-18 22:52:23 UTC (rev 693)
+++ trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java 2008-12-19 19:49:01 UTC (rev 694)
@@ -31,7 +31,9 @@
import static org.mockito.Matchers.argThat;
import static org.mockito.Mockito.stub;
import java.util.Arrays;
+import java.util.Calendar;
import java.util.Collection;
+import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@@ -44,6 +46,7 @@
import org.jboss.dna.graph.connectors.RepositoryConnectionFactory;
import org.jboss.dna.graph.connectors.RepositorySourceException;
import org.jboss.dna.graph.connectors.RepositorySourceListener;
+import org.jboss.dna.graph.properties.DateTime;
import org.jboss.dna.graph.properties.InvalidPathException;
import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.Path;
@@ -139,6 +142,11 @@
return context.getValueFactories().getNameFactory().create(name);
}
+ protected Property createProperty( String name,
+ Object... values ) {
+ return context.getPropertyFactory().create(createName(name), values);
+ }
+
protected void setPropertiesToReadOn( Location location,
Property... properties ) {
this.properties.put(location, Arrays.asList(properties));
@@ -244,6 +252,15 @@
executedRequests.addAll(0, ((CompositeRequest)request).getRequests());
}
+ protected void assertNextRequestUpdateProperties( Location on,
+ Property... properties ) {
+ Request request = executedRequests.poll();
+ assertThat(request, is(instanceOf(UpdatePropertiesRequest.class)));
+ UpdatePropertiesRequest read = (UpdatePropertiesRequest)request;
+ assertThat(read.on(), is(on));
+ assertThat(read.properties(), hasItems(properties));
+ }
+
// ----------------------------------------------------------------------------------------------------------------
// Immediate requests
// ----------------------------------------------------------------------------------------------------------------
@@ -489,6 +506,53 @@
}
@Test
+ public void shouldSetPropertiesWithEitherOnOrToMethodsCalledFirst() {
+ graph.set("propName").on(validPath).to(3.0f);
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName", 3.0f));
+
+ graph.set("propName").to(3.0f).on(validPath);
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName", 3.0f));
+ }
+
+ @Test
+ public void shouldSetPropertyValueToPrimitiveTypes() {
+ graph.set("propName").on(validPath).to(3.0F);
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName", new Float(3.0f)));
+
+ graph.set("propName").on(validPath).to(1.0D);
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName", new Double(1.0)));
+
+ graph.set("propName").on(validPath).to(false);
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName", Boolean.FALSE));
+
+ graph.set("propName").on(validPath).to(3);
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName", new Integer(3)));
+
+ graph.set("propName").on(validPath).to(5L);
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName", new Long(5)));
+
+ graph.set("propName").on(validPath).to(validPath);
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName", validPath));
+
+ graph.set("propName").on(validPath).to(validPath.getLastSegment().getName());
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName",
+ validPath.getLastSegment().getName()));
+ Date now = new Date();
+ graph.set("propName").on(validPath).to(now);
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName", now));
+
+ DateTime dtNow = context.getValueFactories().getDateFactory().create(now);
+ graph.set("propName").on(validPath).to(dtNow);
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName", dtNow));
+
+ Calendar calNow = Calendar.getInstance();
+ calNow.setTime(now);
+ graph.set("propName").on(validPath).to(calNow);
+ assertNextRequestUpdateProperties(new Location(validPath), createProperty("propName", dtNow));
+
+ }
+
+ @Test
public void shouldReadNode() {
Location child1 = new Location(createPath(validPath, "x"));
Location child2 = new Location(createPath(validPath, "y"));
15 years, 3 months
DNA SVN: r693 - in trunk/extensions/dna-connector-store-jpa/src: main/java/org/jboss/dna/connector/store/jpa/util and 3 other directories.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2008-12-18 17:52:23 -0500 (Thu, 18 Dec 2008)
New Revision: 693
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java
Log:
DNA-40
Per JSR-283 section 5.1.7, corrected this connector's copy behavior for how "internal" references (from nodes within the subgraph to nodes also within the subgraph) are remapped in the copy, so that the copy's references are also internal to the copy. Also verified that "outward" references (from nodes within the subgraph to nodes outside of the subgraph) are copied correctly, and that "inward" references (from nodes outside of the original subgraph to nodes in the original subgraph) are not affected by the copy operation (since the original subgraph is not modified).
Note that JSR-170 does not define the semantics of how references are handled in a copy operation.
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-18 22:46:16 UTC (rev 692)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-18 22:52:23 UTC (rev 693)
@@ -937,7 +937,7 @@
actualToLocation = addNewChild(actualNewParent, copyUuid, childName);
}
- // Now process the children in the subgraph ...
+ // Now create copies of all children in the subgraph, assigning new UUIDs to each new child ...
while (originalIter.hasNext()) {
ChildEntity original = originalIter.next();
String newParentUuidOfCopy = originalToNewUuid.get(original.getId().getParentUuidString());
@@ -955,6 +955,29 @@
}
entities.flush();
+ // Now create copies of all the intra-subgraph references, replacing the UUIDs on both ends ...
+ Set<String> newNodesWithReferenceProperties = new HashSet<String>();
+ for (ReferenceEntity reference : query.getInternalReferences()) {
+ String newFromUuid = originalToNewUuid.get(reference.getId().getFromUuidString());
+ assert newFromUuid != null;
+ String newToUuid = originalToNewUuid.get(reference.getId().getToUuidString());
+ assert newToUuid != null;
+ ReferenceEntity copy = new ReferenceEntity(new ReferenceId(newFromUuid, newToUuid));
+ entities.persist(copy);
+ newNodesWithReferenceProperties.add(newFromUuid);
+ }
+
+ // Now create copies of all the references owned by the subgraph but pointing to non-subgraph nodes,
+ // so we only replaced the 'from' UUID ...
+ for (ReferenceEntity reference : query.getOutwardReferences()) {
+ String oldToUuid = reference.getId().getToUuidString();
+ String newFromUuid = originalToNewUuid.get(reference.getId().getFromUuidString());
+ assert newFromUuid != null;
+ ReferenceEntity copy = new ReferenceEntity(new ReferenceId(newFromUuid, oldToUuid));
+ entities.persist(copy);
+ newNodesWithReferenceProperties.add(newFromUuid);
+ }
+
// Now process the properties, creating a copy (note references are not changed) ...
for (PropertiesEntity original : query.getProperties(true, true)) {
// Find the UUID of the copy ...
@@ -962,9 +985,33 @@
assert copyUuid != null;
// Create the copy ...
+ boolean compressed = original.isCompressed();
+ byte[] originalData = original.getData();
PropertiesEntity copy = new PropertiesEntity(new NodeId(copyUuid));
- copy.setCompressed(original.isCompressed());
- copy.setData(original.getData());
+ copy.setCompressed(compressed);
+ if (newNodesWithReferenceProperties.contains(copyUuid)) {
+
+ // This node has internal or outward references that must be adjusted ...
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ OutputStream os = compressed ? new GZIPOutputStream(baos) : baos;
+ ObjectOutputStream oos = new ObjectOutputStream(os);
+ ByteArrayInputStream bais = new ByteArrayInputStream(originalData);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.adjustReferenceProperties(ois, oos, originalToNewUuid);
+ } finally {
+ try {
+ ois.close();
+ } finally {
+ oos.close();
+ }
+ }
+ copy.setData(baos.toByteArray());
+ } else {
+ // No references to adjust, so just copy the original data ...
+ copy.setData(originalData);
+ }
copy.setPropertyCount(original.getPropertyCount());
copy.setReferentialIntegrityEnforced(original.isReferentialIntegrityEnforced());
entities.persist(copy);
@@ -1014,7 +1061,7 @@
query.deleteSubgraph(true);
// Verify referential integrity: that none of the deleted nodes are referenced by nodes not being deleted.
- List<ReferenceEntity> invalidReferences = query.getInvalidReferences();
+ List<ReferenceEntity> invalidReferences = query.getInwardReferences();
if (invalidReferences.size() > 0) {
// Some of the references that remain will be invalid, since they point to nodes that
// have just been deleted. Build up the information necessary to produce a useful exception ...
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-18 22:46:16 UTC (rev 692)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-18 22:52:23 UTC (rev 693)
@@ -47,7 +47,9 @@
@NamedQuery( name = "SubgraphNodeEntity.getPropertiesEntities", query = "select props from PropertiesEntity props, SubgraphNodeEntity node where props.id.uuidString = node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and node.depth <= :maxDepth order by node.depth, node.parentIndexInParent, node.indexInParent" ),
@NamedQuery( name = "SubgraphNodeEntity.getPropertiesEntitiesWithLargeValues", query = "select props from PropertiesEntity props, SubgraphNodeEntity node where props.id.uuidString = node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and size(props.largeValues) > 0" ),
@NamedQuery( name = "SubgraphNodeEntity.getChildEntities", query = "select child from ChildEntity child, SubgraphNodeEntity node where child.id.childUuidString = node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and node.depth <= :maxDepth order by node.depth, node.parentIndexInParent, node.indexInParent" ),
- @NamedQuery( name = "SubgraphNodeEntity.getReferenceThatWillBeInvalid", query = "select ref from ReferenceEntity as ref where ref.id.toUuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and ref.id.fromUuidString not in (select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId)" ),
+ @NamedQuery( name = "SubgraphNodeEntity.getInternalReferences", query = "select ref from ReferenceEntity as ref where ref.id.toUuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and ref.id.fromUuidString in (select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId)" ),
+ @NamedQuery( name = "SubgraphNodeEntity.getOutwardReferences", query = "select ref from ReferenceEntity as ref where ref.id.toUuidString not in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and ref.id.fromUuidString in (select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId)" ),
+ @NamedQuery( name = "SubgraphNodeEntity.getInwardReferences", query = "select ref from ReferenceEntity as ref where ref.id.toUuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and ref.id.fromUuidString not in (select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId)" ),
@NamedQuery( name = "SubgraphNodeEntity.deletePropertiesEntities", query = "delete PropertiesEntity props where props.id.uuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId )" ),
@NamedQuery( name = "SubgraphNodeEntity.deleteChildEntities", query = "delete ChildEntity child where child.id.childUuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId )" ),
@NamedQuery( name = "SubgraphNodeEntity.deleteReferences", query = "delete ReferenceEntity as ref where ref.id.fromUuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId )" ),
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-18 22:46:16 UTC (rev 692)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-18 22:52:23 UTC (rev 693)
@@ -284,21 +284,50 @@
}
/**
- * Determine whether there are any invalid references (typically called after {@link #deleteSubgraph(boolean)}).
+ * Get the list of references that are owned by nodes within the subgraph and that point to other nodes <i>in this same
+ * subgraph</i>. This set of references is important in copying a subgraph, since all intra-subgraph references in the
+ * original subgraph must also be intra-subgraph references in the copy.
*
+ * @return the list of references completely contained by this subgraphs
+ */
+ @SuppressWarnings( "unchecked" )
+ public List<ReferenceEntity> getInternalReferences() {
+ Query references = manager.createNamedQuery("SubgraphNodeEntity.getInternalReferences");
+ references.setParameter("queryId", query.getId());
+ return references.getResultList();
+ }
+
+ /**
+ * Get the list of references that are owned by nodes within the subgraph and that point to nodes <i>not in this same
+ * subgraph</i>. This set of references is important in copying a subgraph.
+ *
+ * @return the list of references that are owned by the subgraph but that point to nodes outside of the subgraph
+ */
+ @SuppressWarnings( "unchecked" )
+ public List<ReferenceEntity> getOutwardReferences() {
+ Query references = manager.createNamedQuery("SubgraphNodeEntity.getOutwardReferences");
+ references.setParameter("queryId", query.getId());
+ return references.getResultList();
+ }
+
+ /**
+ * Get the list of references that are owned by nodes <i>outside</i> of the subgraph that point to nodes <i>in this
+ * subgraph</i>. This set of references is important in deleting nodes, since such references prevent the deletion of the
+ * subgraph.
+ *
* @return the list of references that are no longer valid
*/
@SuppressWarnings( "unchecked" )
- public List<ReferenceEntity> getInvalidReferences() {
+ public List<ReferenceEntity> getInwardReferences() {
// Verify referential integrity: that none of the deleted nodes are referenced by nodes not being deleted.
- Query references = manager.createNamedQuery("SubgraphNodeEntity.getReferenceThatWillBeInvalid");
+ Query references = manager.createNamedQuery("SubgraphNodeEntity.getInwardReferences");
references.setParameter("queryId", query.getId());
return references.getResultList();
}
/**
* Delete the nodes in the subgraph. This method first does not check for referential integrity (see
- * {@link #getInvalidReferences()}).
+ * {@link #getInwardReferences()}).
*
* @param includeRoot true if the root node should also be deleted
*/
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java 2008-12-18 22:46:16 UTC (rev 692)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java 2008-12-18 22:52:23 UTC (rev 693)
@@ -47,7 +47,10 @@
import org.jboss.dna.graph.properties.PropertyFactory;
import org.jboss.dna.graph.properties.PropertyType;
import org.jboss.dna.graph.properties.Reference;
+import org.jboss.dna.graph.properties.UuidFactory;
import org.jboss.dna.graph.properties.ValueFactories;
+import org.jboss.dna.graph.properties.ValueFactory;
+import org.jboss.dna.graph.properties.ValueFormatException;
/**
* @author Randall Hauch
@@ -101,13 +104,12 @@
long length,
PropertyType type,
Object value ) {
- throw new UnsupportedOperationException();
}
public Object read( ValueFactories valueFactories,
byte[] hash,
long length ) {
- throw new UnsupportedOperationException();
+ return null;
}
}
@@ -399,6 +401,147 @@
}
/**
+ * Deserialize the properties, adjust all {@link Reference} values that point to an "old" UUID to point to the corresponding
+ * "new" UUID, and reserialize the properties. If any reference is to a UUID not in the map, it is left untouched.
+ * <p>
+ * This is an efficient method that (for the most part) reads from the input stream and directly writes to the output stream.
+ * The exception is when a Reference value is read, that Reference is attempted to be remapped to a new Reference and written
+ * in place of the old reference. (Of course, if the Reference is to a UUID that is not in the "old" to "new" map, the old is
+ * written directly.)
+ * </p>
+ *
+ * @param input the stream from which the existing properties are to be deserialized; may not be null
+ * @param output the stream to which the updated properties are to be serialized; may not be null
+ * @param oldUuidToNewUuid the map of old-to-new UUIDs
+ * @throws IOException if there is an error writing to the <code>stream</code> or <code>largeValues</code>
+ * @throws ClassNotFoundException if the class for the value's object could not be found
+ */
+ public void adjustReferenceProperties( ObjectInputStream input,
+ ObjectOutputStream output,
+ Map<String, String> oldUuidToNewUuid ) throws IOException, ClassNotFoundException {
+ assert input != null;
+ assert output != null;
+ assert oldUuidToNewUuid != null;
+
+ UuidFactory uuidFactory = valueFactories.getUuidFactory();
+ ValueFactory<Reference> referenceFactory = valueFactories.getReferenceFactory();
+
+ // Read the number of properties ...
+ int count = input.readInt();
+ output.writeInt(count);
+ // Deserialize all of the proeprties ...
+ for (int i = 0; i != count; ++i) {
+ // Read and write the property name ...
+ Object name = input.readObject();
+ output.writeObject(name);
+ // Read and write the number of values ...
+ int numValues = input.readInt();
+ output.writeInt(numValues);
+ // Now read and write each property value ...
+ for (int j = 0; j != numValues; ++j) {
+ // Read and write the type of value ...
+ char type = input.readChar();
+ output.writeChar(type);
+ switch (type) {
+ case 'S':
+ output.writeObject(input.readObject());
+ break;
+ case 'b':
+ output.writeBoolean(input.readBoolean());
+ break;
+ case 'i':
+ output.writeInt(input.readInt());
+ break;
+ case 'l':
+ output.writeLong(input.readLong());
+ break;
+ case 's':
+ output.writeShort(input.readShort());
+ break;
+ case 'f':
+ output.writeFloat(input.readFloat());
+ break;
+ case 'd':
+ output.writeDouble(input.readDouble());
+ break;
+ case 'c':
+ // char
+ output.writeChar(input.readChar());
+ break;
+ case 'U':
+ // UUID
+ output.writeLong(input.readLong());
+ output.writeLong(input.readLong());
+ break;
+ case 'I':
+ // URI
+ output.writeObject(input.readObject());
+ break;
+ case 'N':
+ // Name
+ output.writeObject(input.readObject());
+ break;
+ case 'P':
+ // Path
+ output.writeObject(input.readObject());
+ break;
+ case 'T':
+ // DateTime
+ output.writeObject(input.readObject());
+ break;
+ case 'D':
+ // BigDecimal
+ output.writeObject(input.readObject());
+ break;
+ case 'R':
+ // Reference
+ String refValue = (String)input.readObject();
+ Reference ref = referenceFactory.create(refValue);
+ try {
+ UUID toUuid = uuidFactory.create(ref);
+ String newUuid = oldUuidToNewUuid.get(toUuid.toString());
+ if (newUuid != null) {
+ // Create a new reference ...
+ ref = referenceFactory.create(newUuid);
+ refValue = ref.getString();
+ }
+ } catch (ValueFormatException e) {
+ // Unknown reference, so simply write it again ...
+ }
+ // Write the reference ...
+ output.writeObject(refValue);
+ break;
+ case 'B':
+ // Binary
+ // Read the length of the content ...
+ long binaryLength = input.readLong();
+ byte[] content = new byte[(int)binaryLength];
+ input.read(content);
+ // Now write out the value ...
+ output.writeLong(binaryLength);
+ output.write(content);
+ break;
+ case 'L':
+ // Large object ...
+ int hashLength = input.readInt();
+ byte[] hash = new byte[hashLength];
+ input.read(hash);
+ long length = input.readLong();
+ // write to the output ...
+ output.writeInt(hash.length);
+ output.write(hash);
+ output.writeLong(length);
+ break;
+ default:
+ // All other objects ...
+ output.writeObject(input.readObject());
+ break;
+ }
+ }
+ }
+ }
+
+ /**
* Deserialize the serialized properties on the supplied object stream.
*
* @param stream the stream that contains the serialized properties; may not be null
@@ -469,7 +612,7 @@
read = name.equals(nameToRead) || (namesToRead != null && namesToRead.contains(namesToRead));
if (read) {
// Now read the property values ...
- Object[] values = deserializePropertyValues(stream, name, false, skippedLargeValues, skippedLargeValues, null);
+ Object[] values = deserializePropertyValues(stream, name, false, largeValues, skippedLargeValues, null);
// Add the property to the collection ...
Property property = propertyFactory.create(name, values);
assert property != null;
@@ -609,21 +752,22 @@
// Reference
String refValue = (String)stream.readObject();
Reference ref = valueFactories.getReferenceFactory().create(refValue);
- if (!skip || references != null) {
- if (!skip) {
- value = ref;
- if (references != null) references.remove(ref);
- } else {
- assert references != null;
- references.read(ref);
- }
+ if (skip) {
+ if (references != null) references.remove(ref);
+ } else {
+ value = ref;
+ if (references != null) references.read(ref);
}
break;
case 'B':
// Binary
// Read the length of the content ...
long binaryLength = stream.readLong();
- if (!skip) value = valueFactories.getBinaryFactory().create(stream, binaryLength);
+ byte[] content = new byte[(int)binaryLength];
+ stream.read(content);
+ if (!skip) {
+ value = valueFactories.getBinaryFactory().create(content);
+ }
break;
case 'L':
// Large object ...
@@ -657,5 +801,4 @@
throw new SystemFailureException(e);
}
}
-
}
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-18 22:46:16 UTC (rev 692)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-18 22:52:23 UTC (rev 693)
@@ -22,6 +22,7 @@
package org.jboss.dna.connector.store.jpa;
import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.jboss.dna.graph.IsNodeWithChildren.hasChild;
import static org.jboss.dna.graph.IsNodeWithChildren.hasChildren;
@@ -49,6 +50,7 @@
import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.Path;
import org.jboss.dna.graph.properties.Property;
+import org.jboss.dna.graph.properties.Reference;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -744,9 +746,52 @@
numPropsOnEach = 3;
createTree("", 3, 3, numPropsOnEach, null, true, false);
+ // Create some references between nodes that aren't involved with the copy ...
+ graph.set("refProp").on("/node1").to(graph.getNodeAt("/node1/node3"));
+ graph.set("refProp").on("/node1/node1").to(graph.getNodeAt("/node3/node2")); // will soon be /node3/node2[1]
+
+ // Create some "inward" references from nodes that are NOT being copied to nodes that are being copied ...
+ graph.set("refProp").on("/node1/node2").to(graph.getNodeAt("/node2/node2"));
+ graph.set("refProp").on("/node1/node3").to(graph.getNodeAt("/node2/node2"));
+
+ // Create some "outward" references from nodes that are being copied to nodes that are NOT being copied ...
+ graph.set("refProp").on("/node2/node1").to(graph.getNodeAt("/node1/node1"));
+ graph.set("refProp").on("/node2/node3").to(graph.getNodeAt("/node1/node2"));
+
+ // Create some "internal" references between nodes that are being copied ...
+ graph.set("refProp").on("/node2/node2").to(graph.getNodeAt("/node2/node2/node1"));
+ graph.set("refProp").on("/node2/node3/node1").to(graph.getNodeAt("/node2/node2/node1"));
+
+ // Verify the references are there ...
+ assertReference("/node1", "refProp", "/node1/node3");
+ assertReference("/node1/node1", "refProp", "/node3/node2");
+ assertReference("/node1/node2", "refProp", "/node2/node2");
+ assertReference("/node1/node3", "refProp", "/node2/node2");
+ assertReference("/node2/node1", "refProp", "/node1/node1");
+ assertReference("/node2/node3", "refProp", "/node1/node2");
+ assertReference("/node2/node2", "refProp", "/node2/node2/node1");
+ assertReference("/node2/node3/node1", "refProp", "/node2/node2/node1");
+
// Copy a branches ...
graph.copy("/node2").into("/node3");
+ // Verify the references are still there ...
+ assertReference("/node1", "refProp", "/node1/node3");
+ assertReference("/node1/node1", "refProp", "/node3/node2[1]");
+ assertReference("/node1/node2", "refProp", "/node2/node2");
+ assertReference("/node1/node3", "refProp", "/node2/node2");
+ assertReference("/node2/node1", "refProp", "/node1/node1");
+ assertReference("/node2/node3", "refProp", "/node1/node2");
+ assertReference("/node2/node2", "refProp", "/node2/node2/node1");
+ assertReference("/node2/node3/node1", "refProp", "/node2/node2/node1");
+
+ // And verify that we have a few new (outward and internal) references in the copy ...
+ assertReference("/node3/node2[2]/node1", "refProp", "/node1/node1"); // outward
+ assertReference("/node3/node2[2]/node3", "refProp", "/node1/node2"); // outward
+ assertReference("/node3/node2[2]/node2", "refProp", "/node3/node2[2]/node2/node1"); // internal
+ assertReference("/node3/node2[2]/node3/node1", "refProp", "/node3/node2[2]/node2/node1"); // internal
+
+ // Now assert the structure ...
assertThat(graph.getChildren().of("/node1"), hasChildren(child("node1"), child("node2"), child("node3")));
assertThat(graph.getChildren().of("/node1/node1"), hasChildren(child("node1"), child("node2"), child("node3")));
assertThat(graph.getChildren().of("/node1/node2"), hasChildren(child("node1"), child("node2"), child("node3")));
@@ -873,6 +918,19 @@
"The quick brown fox jumped over the moon. What? "));
}
+ protected void assertReference( String fromNodePath,
+ String propertyName,
+ String... toNodePath ) {
+ Object[] values = graph.getProperty(propertyName).on(fromNodePath).getValuesAsArray();
+ assertThat(values.length, is(toNodePath.length));
+ for (int i = 0; i != values.length; ++i) {
+ Object value = values[i];
+ assertThat(value, is(instanceOf(Reference.class)));
+ Reference ref = (Reference)value;
+ assertThat(graph.resolve(ref), is(graph.getNodeAt(toNodePath[i])));
+ }
+ }
+
@Test
public void shouldReadRangeOfChildren() {
// Create a shallow tree with many children under one node ...
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-18 22:46:16 UTC (rev 692)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-18 22:52:23 UTC (rev 693)
@@ -459,7 +459,7 @@
verifyNextLocationIs("/a/a1/a3");
verifyNoMoreLocations();
query.deleteSubgraph(true);
- assertThat(query.getInvalidReferences().isEmpty(), is(true));
+ assertThat(query.getInwardReferences().isEmpty(), is(true));
query.close();
// Commit the transaction, and start another ...
@@ -542,7 +542,7 @@
query.deleteSubgraph(true);
// Now there should be invalid references ...
- List<ReferenceEntity> invalidReferences = query.getInvalidReferences();
+ List<ReferenceEntity> invalidReferences = query.getInwardReferences();
assertThat(invalidReferences.size(), is(3));
invalidReferences.removeAll(invalidReferences);
assertThat(invalidReferences.size(), is(0));
@@ -583,7 +583,7 @@
query.deleteSubgraph(true);
// Now there should be invalid references ...
- List<ReferenceEntity> invalidReferences = query.getInvalidReferences();
+ List<ReferenceEntity> invalidReferences = query.getInwardReferences();
assertThat(invalidReferences.size(), is(0));
query.close();
@@ -595,4 +595,59 @@
assertThat(remainingReferences.size(), is(0));
}
+ @Test
+ public void shouldGetVariousReferencesRelatedToSubgraph() throws Exception {
+ // Verify that all the nodes with large values do indeed have them ...
+ verifyNodesHaveLargeValues("/a/a1", "/a/a2", "/a/a2/a1");
+
+ // Count the number of objects ...
+ assertThat((Long)manager.createQuery("select count(*) from LargeValueEntity").getSingleResult(), is(3L));
+ assertThat((Long)manager.createQuery("select count(*) from PropertiesEntity").getSingleResult(), is(14L));
+ assertThat((Long)manager.createQuery("select count(*) from ChildEntity").getSingleResult(), is(14L));
+
+ // Create references from the nodes that aren't even part of the subgraph ...
+ List<ReferenceEntity> otherRefs = new ArrayList<ReferenceEntity>();
+ otherRefs.add(createReferenceBetween("/a/a2", "/a/a2/a1"));
+ otherRefs.add(createReferenceBetween("/a/a2/a1", "/a/a2/a2"));
+
+ // Create references between nodes in the subgraph ...
+ List<ReferenceEntity> internalRefs = new ArrayList<ReferenceEntity>();
+ internalRefs.add(createReferenceBetween("/a/a1", "/a/a1/a1"));
+ internalRefs.add(createReferenceBetween("/a/a1/a2", "/a/a1/a3"));
+
+ // Create references from nodes outside of the subgraph to nodes inside of the subgraph ...
+ List<ReferenceEntity> inwardRefs = new ArrayList<ReferenceEntity>();
+ inwardRefs.add(createReferenceBetween("/a/a2", "/a/a1/a1"));
+ inwardRefs.add(createReferenceBetween("/a/a2/a1", "/a/a1/a3"));
+
+ // Create references from nodes inside of the subgraph to nodes outside of the subgraph ...
+ List<ReferenceEntity> outwardRefs = new ArrayList<ReferenceEntity>();
+ outwardRefs.add(createReferenceBetween("/a/a1", "/a/a2"));
+ outwardRefs.add(createReferenceBetween("/a/a1/a1", "/a/a2/a1"));
+
+ // Create the query ...
+ Path path = path("/a/a1");
+ UUID uuid = uuidByPath.get(path);
+ query = SubgraphQuery.create(context, manager, uuid, path, Integer.MAX_VALUE);
+
+ // Check the various kinds of references ...
+ List<ReferenceEntity> actualInternal = query.getInternalReferences();
+ List<ReferenceEntity> actualInward = query.getInwardReferences();
+ List<ReferenceEntity> actualOutward = query.getOutwardReferences();
+
+ assertThat(actualInternal.size(), is(internalRefs.size()));
+ actualInternal.removeAll(internalRefs);
+ assertThat(actualInternal.size(), is(0));
+
+ assertThat(actualInward.size(), is(inwardRefs.size()));
+ actualInward.removeAll(inwardRefs);
+ assertThat(actualInward.size(), is(0));
+
+ assertThat(actualOutward.size(), is(outwardRefs.size()));
+ actualOutward.removeAll(outwardRefs);
+ assertThat(actualOutward.size(), is(0));
+
+ query.close();
+ }
+
}
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java 2008-12-18 22:46:16 UTC (rev 692)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java 2008-12-18 22:52:23 UTC (rev 693)
@@ -53,6 +53,7 @@
import org.jboss.dna.graph.properties.Property;
import org.jboss.dna.graph.properties.PropertyFactory;
import org.jboss.dna.graph.properties.PropertyType;
+import org.jboss.dna.graph.properties.Reference;
import org.jboss.dna.graph.properties.ValueFactories;
import org.junit.Before;
import org.junit.Test;
@@ -229,8 +230,14 @@
Property prop5 = createProperty("p5", valueFactories.getBinaryFactory().create("something"));
String binaryValue = "really really long string that will be converted to a binary value and tested like that";
Property prop6 = createProperty("p6", valueFactories.getBinaryFactory().create(binaryValue));
+ UUID uuid7 = UUID.randomUUID();
+ Reference ref7 = valueFactories.getReferenceFactory().create(uuid7);
+ Property prop7 = createProperty("p7", ref7);
+ UUID uuid8 = UUID.randomUUID();
+ Reference ref8 = valueFactories.getReferenceFactory().create(uuid8);
+ Property prop8 = createProperty("p8", ref8);
- assertSerializableAndDeserializable(serializer, prop1, prop2, prop3, prop4, prop5, prop6);
+ assertSerializableAndDeserializable(serializer, prop1, prop2, prop3, prop4, prop5, prop6, prop7, prop8);
assertThat(largeValues.getCount(), is(2));
}
@@ -245,6 +252,12 @@
String binaryValueStr = "really really long string that will be converted to a binary value and tested like that";
Binary binaryValue = valueFactories.getBinaryFactory().create(binaryValueStr);
Property prop6 = createProperty("p6", binaryValue);
+ UUID uuid7 = UUID.randomUUID();
+ Reference ref7 = valueFactories.getReferenceFactory().create(uuid7);
+ Property prop7 = createProperty("p7", ref7);
+ UUID uuid8 = UUID.randomUUID();
+ Reference ref8 = valueFactories.getReferenceFactory().create(uuid8);
+ Property prop8 = createProperty("p8", ref8);
Property prop2b = createProperty("p2");
Property prop3b = createProperty("p3", "v3");
@@ -252,7 +265,7 @@
Binary binaryValue2 = valueFactories.getBinaryFactory().create(binaryValueStr2);
Property prop6b = createProperty("p6", binaryValue2);
- Property[] initial = new Property[] {prop1, prop2, prop3, prop4, prop5, prop6};
+ Property[] initial = new Property[] {prop1, prop2, prop3, prop4, prop5, prop6, prop7, prop8};
Property[] updated = new Property[] {prop2b, prop3b, prop6b};
SkippedLargeValues removedLargeValues = new SkippedLargeValues();
assertReserializable(serializer, removedLargeValues, initial, updated);
@@ -265,6 +278,61 @@
assertThat(removedLargeValues.isSkipped(binaryValue), is(true));
}
+ @Test
+ public void shouldAdjustReferences() throws Exception {
+ Property prop1 = createProperty("p1", "v1");
+ String value = "v234567890123456789012345678901234567890";
+ Property prop2 = createProperty("p2", value);
+ Property prop3 = createProperty("p3", "v2");
+ Property prop4 = createProperty("p4", new String(value)); // make sure it's a different String object
+ Property prop5 = createProperty("p5", valueFactories.getBinaryFactory().create("something"));
+ String binaryValueStr = "really really long string that will be converted to a binary value and tested like that";
+ Binary binaryValue = valueFactories.getBinaryFactory().create(binaryValueStr);
+ Property prop6 = createProperty("p6", binaryValue);
+ UUID uuid7 = UUID.randomUUID();
+ Reference ref7 = valueFactories.getReferenceFactory().create(uuid7);
+ Property prop7 = createProperty("p7", ref7);
+ UUID uuid8 = UUID.randomUUID();
+ Reference ref8 = valueFactories.getReferenceFactory().create(uuid8);
+ Property prop8 = createProperty("p8", ref8);
+
+ // Serialize the properties (and verify they're serialized properly) ...
+ Property[] props = new Property[] {prop1, prop2, prop3, prop4, prop5, prop6, prop7, prop8};
+ byte[] content = serialize(serializer, props);
+ List<Property> properties = deserialize(serializer, content);
+ assertThat(properties, hasItems(props));
+
+ // Define the old-to-new UUID mapping ...
+ UUID newUuid7 = UUID.randomUUID();
+ Map<String, String> oldToNewUuids = new HashMap<String, String>();
+ oldToNewUuids.put(uuid7.toString(), newUuid7.toString());
+ // note that 'uuid8' is not included, so 'ref8' should be untouched
+
+ // Now update the references in the serialized properties ...
+ ByteArrayInputStream bais = new ByteArrayInputStream(content);
+ ObjectInputStream ois = new ObjectInputStream(bais);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ ObjectOutputStream oos = new ObjectOutputStream(baos);
+ try {
+ serializer.adjustReferenceProperties(ois, oos, oldToNewUuids);
+ } finally {
+ baos.close();
+ oos.close();
+ }
+ byte[] newContent = baos.toByteArray();
+
+ // Now deserialize the updated content ...
+ properties = deserialize(serializer, newContent);
+
+ // Update a new 'prop7' ...
+ Reference newRef7 = valueFactories.getReferenceFactory().create(newUuid7);
+ Property newProp7 = createProperty("p7", newRef7);
+ Property[] newProps = new Property[] {prop1, prop2, prop3, prop4, prop5, prop6, newProp7, prop8};
+
+ // Finally verify that the updated content matches the expected new properties ...
+ assertThat(properties, hasItems(newProps));
+ }
+
protected Property createProperty( String name,
Object... values ) {
return propertyFactory.create(valueFactories.getNameFactory().create(name), values);
@@ -301,27 +369,42 @@
List<Property> outputProperties = new ArrayList<Property>(propertyList.size());
// Serialize the properties one at a time ...
+ byte[] bytes = serialize(serializer, propertyList.toArray(new Property[propertyList.size()]));
+
+ // Deserialize ...
+ outputProperties = deserialize(serializer, bytes);
+
+ // Check the properties match ...
+ assertThat(outputProperties.size(), is(propertyList.size()));
+ assertThat(outputProperties, hasItems(propertyList.toArray(new Property[propertyList.size()])));
+ }
+
+ protected byte[] serialize( Serializer serializer,
+ Property... originalProperties ) throws IOException {
+ // Serialize the properties one at a time ...
+ Collection<Property> initialProps = Arrays.asList(originalProperties);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
- serializer.serializeProperties(oos, propertyList.size(), propertyList, largeValues, references);
+ serializer.serializeProperties(oos, initialProps.size(), initialProps, largeValues, references);
} finally {
oos.close();
}
- byte[] bytes = baos.toByteArray();
+ return baos.toByteArray();
+ }
+ protected List<Property> deserialize( Serializer serializer,
+ byte[] content ) throws IOException, ClassNotFoundException {
// Deserialize ...
- ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+ List<Property> afterProperties = new ArrayList<Property>();
+ ByteArrayInputStream bais = new ByteArrayInputStream(content);
ObjectInputStream ois = new ObjectInputStream(bais);
try {
- serializer.deserializeAllProperties(ois, outputProperties, largeValues);
+ serializer.deserializeAllProperties(ois, afterProperties, largeValues);
} finally {
ois.close();
}
-
- // Check the properties match ...
- assertThat(outputProperties.size(), is(propertyList.size()));
- assertThat(outputProperties, hasItems(propertyList.toArray(new Property[propertyList.size()])));
+ return afterProperties;
}
protected void assertReserializable( Serializer serializer,
@@ -343,22 +426,14 @@
}
// Serialize the properties one at a time ...
- Collection<Property> initialProps = Arrays.asList(originalProperties);
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- ObjectOutputStream oos = new ObjectOutputStream(baos);
- try {
- serializer.serializeProperties(oos, initialProps.size(), initialProps, largeValues, references);
- } finally {
- oos.close();
- }
- byte[] bytes = baos.toByteArray();
+ byte[] bytes = serialize(serializer, originalProperties);
// Now reserialize, updating the properties ...
Collection<Property> updatedProps = Arrays.asList(updatedProperties);
ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
ObjectInputStream ois = new ObjectInputStream(bais);
- baos = new ByteArrayOutputStream();
- oos = new ObjectOutputStream(baos);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
serializer.reserializeProperties(ois, oos, updatedProps, largeValues, removedLargeValues, references);
} finally {
@@ -367,14 +442,8 @@
}
// Deserialize ...
- List<Property> afterProperties = new ArrayList<Property>();
- bais = new ByteArrayInputStream(baos.toByteArray());
- ois = new ObjectInputStream(bais);
- try {
- serializer.deserializeAllProperties(ois, afterProperties, largeValues);
- } finally {
- ois.close();
- }
+ List<Property> afterProperties = deserialize(serializer, baos.toByteArray());
+
Collection<Name> namesAfter = new HashSet<Name>();
for (Property prop : afterProperties) {
namesAfter.add(prop.getName());
15 years, 3 months
DNA SVN: r692 - in trunk/dna-graph/src/main: resources/org/jboss/dna/graph and 1 other directory.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2008-12-18 17:46:16 -0500 (Thu, 18 Dec 2008)
New Revision: 692
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java
trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties
Log:
DNA-266 Improve usability of graph API with additional methods
Added an easy way to set references to nodes as property values. Any Node or Location passed as a property value will automatically be converted to a Reference property value pointing to the UUID of the Node (or Location). Also added additional "to" methods (for setting property values) that take a Node and Location, making it more obvious what values are allowed.
Also changed the SetValuesTo interface so that it's possible to specify the "on" before the values. For example:
graph.set("propName").to("value").on("/node1");
graph.set("propName").on("/node1").to("value");
The first example was what was original required, but the bottom one also works now. Notice that the 3rd method called in each of these ("on" in the first, "to" in the second example) returns an interface that completes the set operation. In other words, the following results in a compile error:
graph.set("propName").to("value").on("/node1").to("value2");
This is great, since it is the desired behavior.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-18 11:53:51 UTC (rev 691)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-18 22:46:16 UTC (rev 692)
@@ -46,6 +46,8 @@
import org.jboss.dna.graph.properties.Path;
import org.jboss.dna.graph.properties.Property;
import org.jboss.dna.graph.properties.PropertyFactory;
+import org.jboss.dna.graph.properties.Reference;
+import org.jboss.dna.graph.properties.ValueFormatException;
import org.jboss.dna.graph.properties.Path.Segment;
import org.jboss.dna.graph.requests.CompositeRequest;
import org.jboss.dna.graph.requests.CopyBranchRequest;
@@ -687,7 +689,7 @@
* @param propertyName the property name
* @return the interface used to specify the values
*/
- public SetValuesTo<On<Conjunction<Graph>>> set( String propertyName ) {
+ public SetValues<Conjunction<Graph>> set( String propertyName ) {
Name name = getContext().getValueFactories().getNameFactory().create(propertyName);
return set(name);
}
@@ -699,23 +701,130 @@
* @param propertyName the property name
* @return the interface used to specify the values
*/
- public SetValuesTo<On<Conjunction<Graph>>> set( final Name propertyName ) {
- return new SetValuesTo<On<Conjunction<Graph>>>() {
+ public SetValues<Conjunction<Graph>> set( final Name propertyName ) {
+ return new SetValues<Conjunction<Graph>>() {
+ @SuppressWarnings( "synthetic-access" )
+ public SetValuesTo<Conjunction<Graph>> on( final Location location ) {
+ return new SetValuesTo<Conjunction<Graph>>() {
+ public Conjunction<Graph> to( Node value ) {
+ return to(value.getLocation());
+ }
+
+ public Conjunction<Graph> to( Location value ) {
+ Reference ref = (Reference)convertReferenceValue(value);
+ Property property = getContext().getPropertyFactory().create(propertyName, ref);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ queue().submit(request);
+ return nextGraph;
+ }
+
+ public Conjunction<Graph> to( Object value ) {
+ value = convertReferenceValue(value);
+ Property property = getContext().getPropertyFactory().create(propertyName, value);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ queue().submit(request);
+ return nextGraph;
+ }
+
+ public Conjunction<Graph> to( Object firstValue,
+ Object... otherValues ) {
+ firstValue = convertReferenceValue(firstValue);
+ for (int i = 0, len = otherValues.length; i != len; ++i) {
+ otherValues[i] = convertReferenceValue(otherValues[i]);
+ }
+ Property property = getContext().getPropertyFactory().create(propertyName, firstValue, otherValues);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ queue().submit(request);
+ return nextGraph;
+ }
+
+ public Conjunction<Graph> to( Iterable<?> values ) {
+ List<Object> valueList = new LinkedList<Object>();
+ for (Object value : values) {
+ value = convertReferenceValue(value);
+ valueList.add(value);
+ }
+ Property property = getContext().getPropertyFactory().create(propertyName, valueList);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ queue().submit(request);
+ return nextGraph;
+ }
+
+ public Conjunction<Graph> to( Iterator<?> values ) {
+ List<Object> valueList = new LinkedList<Object>();
+ while (values.hasNext()) {
+ Object value = values.next();
+ valueList.add(value);
+ }
+ Property property = getContext().getPropertyFactory().create(propertyName, valueList);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ queue().submit(request);
+ return nextGraph;
+ }
+ };
+ }
+
+ public SetValuesTo<Conjunction<Graph>> on( String path ) {
+ return on(new Location(createPath(path)));
+ }
+
+ public SetValuesTo<Conjunction<Graph>> on( Path path ) {
+ return on(new Location(path));
+ }
+
+ public SetValuesTo<Conjunction<Graph>> on( Property idProperty ) {
+ return on(new Location(idProperty));
+ }
+
+ public SetValuesTo<Conjunction<Graph>> on( Property firstIdProperty,
+ Property... additionalIdProperties ) {
+ return on(new Location(firstIdProperty, additionalIdProperties));
+ }
+
+ public SetValuesTo<Conjunction<Graph>> on( UUID uuid ) {
+ return on(new Location(uuid));
+ }
+
+ public On<Conjunction<Graph>> to( Node node ) {
+ Reference value = (Reference)convertReferenceValue(node);
+ return set(getContext().getPropertyFactory().create(propertyName, value));
+ }
+
+ public On<Conjunction<Graph>> to( Location location ) {
+ Reference value = (Reference)convertReferenceValue(location);
+ return set(getContext().getPropertyFactory().create(propertyName, value));
+ }
+
public On<Conjunction<Graph>> to( Object value ) {
+ value = convertReferenceValue(value);
return set(getContext().getPropertyFactory().create(propertyName, value));
}
public On<Conjunction<Graph>> to( Object firstValue,
Object... otherValues ) {
+ firstValue = convertReferenceValue(firstValue);
+ for (int i = 0, len = otherValues.length; i != len; ++i) {
+ otherValues[i] = convertReferenceValue(otherValues[i]);
+ }
return set(getContext().getPropertyFactory().create(propertyName, firstValue, otherValues));
}
public On<Conjunction<Graph>> to( Iterable<?> values ) {
- return set(getContext().getPropertyFactory().create(propertyName, values));
+ List<Object> valueList = new LinkedList<Object>();
+ for (Object value : values) {
+ value = convertReferenceValue(value);
+ valueList.add(value);
+ }
+ return set(getContext().getPropertyFactory().create(propertyName, valueList));
}
public On<Conjunction<Graph>> to( Iterator<?> values ) {
- return set(getContext().getPropertyFactory().create(propertyName, values));
+ List<Object> valueList = new LinkedList<Object>();
+ while (values.hasNext()) {
+ Object value = values.next();
+ valueList.add(value);
+ }
+ return set(getContext().getPropertyFactory().create(propertyName, valueList));
}
};
}
@@ -1092,6 +1201,19 @@
}
/**
+ * Request to read the node given by the supplied reference value.
+ *
+ * @param reference the reference property value that is to be resolved into a node
+ * @return the node that is read from the repository
+ * @throws ValueFormatException if the supplied reference could not be converted to an identifier property value
+ */
+ public Node resolve( Reference reference ) {
+ CheckArg.isNotNull(reference, "reference");
+ UUID uuid = context.getValueFactories().getUuidFactory().create(reference);
+ return getNodeAt(uuid);
+ }
+
+ /**
* Request to read a subgraph of the specified depth, rooted at a location that will be specified via <code>at(...)</code> in
* the resulting {@link At} object. All properties and children of every node in the subgraph will be read and returned in the
* {@link Subgraph} object returned from the <code>at(...)</code> methods.
@@ -1859,7 +1981,7 @@
* @param propertyName the property name
* @return the interface used to specify the values
*/
- public SetValuesTo<On<BatchConjunction>> set( String propertyName ) {
+ public SetValues<BatchConjunction> set( String propertyName ) {
Name name = getContext().getValueFactories().getNameFactory().create(propertyName);
return set(name);
}
@@ -1871,23 +1993,129 @@
* @param propertyName the property name
* @return the interface used to specify the values
*/
- public SetValuesTo<On<BatchConjunction>> set( final Name propertyName ) {
- return new SetValuesTo<On<BatchConjunction>>() {
+ public SetValues<BatchConjunction> set( final Name propertyName ) {
+ return new SetValues<BatchConjunction>() {
+ public SetValuesTo<BatchConjunction> on( final Location location ) {
+ return new SetValuesTo<BatchConjunction>() {
+ public BatchConjunction to( Node value ) {
+ return to(value.getLocation());
+ }
+
+ public BatchConjunction to( Location value ) {
+ Reference ref = (Reference)convertReferenceValue(value);
+ Property property = getContext().getPropertyFactory().create(propertyName, ref);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ requestQueue.submit(request);
+ return nextRequests;
+ }
+
+ public BatchConjunction to( Object value ) {
+ value = convertReferenceValue(value);
+ Property property = getContext().getPropertyFactory().create(propertyName, value);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ requestQueue.submit(request);
+ return nextRequests;
+ }
+
+ public BatchConjunction to( Object firstValue,
+ Object... otherValues ) {
+ firstValue = convertReferenceValue(firstValue);
+ for (int i = 0, len = otherValues.length; i != len; ++i) {
+ otherValues[i] = convertReferenceValue(otherValues[i]);
+ }
+ Property property = getContext().getPropertyFactory().create(propertyName, firstValue, otherValues);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ requestQueue.submit(request);
+ return nextRequests;
+ }
+
+ public BatchConjunction to( Iterable<?> values ) {
+ List<Object> valueList = new LinkedList<Object>();
+ for (Object value : values) {
+ value = convertReferenceValue(value);
+ valueList.add(value);
+ }
+ Property property = getContext().getPropertyFactory().create(propertyName, valueList);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ requestQueue.submit(request);
+ return nextRequests;
+ }
+
+ public BatchConjunction to( Iterator<?> values ) {
+ List<Object> valueList = new LinkedList<Object>();
+ while (values.hasNext()) {
+ Object value = values.next();
+ valueList.add(value);
+ }
+ Property property = getContext().getPropertyFactory().create(propertyName, valueList);
+ UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, property);
+ requestQueue.submit(request);
+ return nextRequests;
+ }
+ };
+ }
+
+ public SetValuesTo<BatchConjunction> on( String path ) {
+ return on(new Location(createPath(path)));
+ }
+
+ public SetValuesTo<BatchConjunction> on( Path path ) {
+ return on(new Location(path));
+ }
+
+ public SetValuesTo<BatchConjunction> on( Property idProperty ) {
+ return on(new Location(idProperty));
+ }
+
+ public SetValuesTo<BatchConjunction> on( Property firstIdProperty,
+ Property... additionalIdProperties ) {
+ return on(new Location(firstIdProperty, additionalIdProperties));
+ }
+
+ public SetValuesTo<BatchConjunction> on( UUID uuid ) {
+ return on(new Location(uuid));
+ }
+
+ public On<BatchConjunction> to( Node value ) {
+ Object reference = convertReferenceValue(value);
+ return set(getContext().getPropertyFactory().create(propertyName, reference));
+ }
+
+ public On<BatchConjunction> to( Location value ) {
+ Object reference = convertReferenceValue(value);
+ return set(getContext().getPropertyFactory().create(propertyName, reference));
+ }
+
public On<BatchConjunction> to( Object value ) {
+ value = convertReferenceValue(value);
return set(getContext().getPropertyFactory().create(propertyName, value));
}
public On<BatchConjunction> to( Object firstValue,
Object... otherValues ) {
+ firstValue = convertReferenceValue(firstValue);
+ for (int i = 0, len = otherValues.length; i != len; ++i) {
+ otherValues[i] = convertReferenceValue(otherValues[i]);
+ }
return set(getContext().getPropertyFactory().create(propertyName, firstValue, otherValues));
}
public On<BatchConjunction> to( Iterable<?> values ) {
- return set(getContext().getPropertyFactory().create(propertyName, values));
+ List<Object> valueList = new LinkedList<Object>();
+ for (Object value : values) {
+ value = convertReferenceValue(value);
+ valueList.add(value);
+ }
+ return set(getContext().getPropertyFactory().create(propertyName, valueList));
}
public On<BatchConjunction> to( Iterator<?> values ) {
- return set(getContext().getPropertyFactory().create(propertyName, values));
+ List<Object> valueList = new LinkedList<Object>();
+ while (values.hasNext()) {
+ Object value = values.next();
+ valueList.add(value);
+ }
+ return set(getContext().getPropertyFactory().create(propertyName, valueList));
}
};
}
@@ -2254,6 +2482,37 @@
}
/**
+ * Utility method for checking a property value. If the value is a {@link Node} or {@link Location}, a {@link Reference} value
+ * is created (if the node/location has a UUID); otherwise, the value is returned as is.
+ *
+ * @param value the property value
+ * @return the property value, which may be a {@link Reference} if the input value is a Node or Location
+ */
+ protected Object convertReferenceValue( Object value ) {
+ if (value instanceof Node) {
+ Node node = (Node)value;
+ UUID uuid = node.getLocation().getUuid();
+ if (uuid == null) {
+ String nodeString = node.getLocation().getString(getContext().getNamespaceRegistry());
+ String msg = GraphI18n.unableToCreateReferenceToNodeWithoutUuid.text(nodeString);
+ throw new IllegalArgumentException(msg);
+ }
+ return getContext().getValueFactories().getReferenceFactory().create(uuid);
+ }
+ if (value instanceof Location) {
+ Location location = (Location)value;
+ UUID uuid = location.getUuid();
+ if (uuid == null) {
+ String nodeString = location.getString(getContext().getNamespaceRegistry());
+ String msg = GraphI18n.unableToCreateReferenceToNodeWithoutUuid.text(nodeString);
+ throw new IllegalArgumentException(msg);
+ }
+ return getContext().getValueFactories().getReferenceFactory().create(uuid);
+ }
+ return value;
+ }
+
+ /**
* A interface used to execute the accumulated {@link Batch requests}.
*
* @author Randall Hauch
@@ -2864,41 +3123,86 @@
/**
* A component used to set the values on a property.
*
+ * @param <Next> the next command
+ * @author Randall Hauch
+ */
+ public interface SetValues<Next> extends On<SetValuesTo<Next>>, SetValuesTo<On<Next>> {
+ }
+
+ /**
+ * A component used to set the values on a property.
+ *
* @param <Next>
* @author Randall Hauch
*/
public interface SetValuesTo<Next> {
+
/**
- * Set the property value to the given object.
+ * Set the property value to be a reference to the given node. Note that it is an error if the Node does not have a
+ * {@link Location#getUuid() UUID}.
*
+ * @param node the node to which a reference should be set
+ * @return the interface for additional requests or actions
+ * @throws IllegalArgumentException if the value is a Node that has no {@link Location#getUuid() UUID}
+ */
+ Next to( Node node );
+
+ /**
+ * Set the property value to be a reference to the given location. Note that it is an error if the Location does not have
+ * a {@link Location#getUuid() UUID}.
+ *
+ * @param location the location to which a reference should be set
+ * @return the interface for additional requests or actions
+ * @throws IllegalArgumentException if the value is a Location that has no {@link Location#getUuid() UUID}
+ */
+ Next to( Location location );
+
+ /**
+ * Set the property value to the given object. The supplied <code>value</code> should be a valid property value, or a
+ * {@link Node} (or {@link Location}) if the property value is to be a reference to that node (or location). Note that it
+ * is an error if the Node (or Location) does not have a {@link Location#getUuid() UUID}.
+ *
* @param value the property value
* @return the interface for additional requests or actions
+ * @throws IllegalArgumentException if the value is a Node or Location that has no {@link Location#getUuid() UUID}
*/
Next to( Object value );
/**
- * Set the property value to the given objects.
+ * Set the property value to the given objects. Each of the supplied values should be a valid property value, or a
+ * {@link Node} (or {@link Location}) if the property value is to be a reference to that node (or location). Note that it
+ * is an error if the Node (or Location) does not have a {@link Location#getUuid() UUID}.
*
* @param firstValue the first property value
* @param otherValues the remaining property values
* @return the interface for additional requests or actions
+ * @throws IllegalArgumentException if the any of the values is a Node or Location that has no {@link Location#getUuid()
+ * UUID}
*/
Next to( Object firstValue,
Object... otherValues );
/**
- * Set the property value to the given object.
+ * Set the property value to the given object. Each of the supplied values should be a valid property value, or a
+ * {@link Node} (or {@link Location}) if the property value is to be a reference to that node (or location). Note that it
+ * is an error if the Node (or Location) does not have a {@link Location#getUuid() UUID}.
*
* @param values the container for the property values
* @return the interface for additional requests or actions
+ * @throws IllegalArgumentException if the any of the values is a Node or Location that has no {@link Location#getUuid()
+ * UUID}
*/
Next to( Iterable<?> values );
/**
- * Set the property value to the given object.
+ * Set the property value to the given object. Each of the supplied values should be a valid property value, or a
+ * {@link Node} (or {@link Location}) if the property value is to be a reference to that node (or location). Note that it
+ * is an error if the Node (or Location) does not have a {@link Location#getUuid() UUID}.
*
* @param values the iterator over the property values
* @return the interface for additional requests or actions
+ * @throws IllegalArgumentException if the any of the values is a Node or Location that has no {@link Location#getUuid()
+ * UUID}
*/
Next to( Iterator<?> values );
}
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java 2008-12-18 11:53:51 UTC (rev 691)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java 2008-12-18 22:46:16 UTC (rev 692)
@@ -66,6 +66,7 @@
public static I18n closedRequestProcessor;
public static I18n multipleErrorsWhileExecutingRequests;
public static I18n unableToAddMoreRequestsToAlreadyExecutedBatch;
+ public static I18n unableToCreateReferenceToNodeWithoutUuid;
public static I18n actualLocationIsNotSameAsInputLocation;
public static I18n actualLocationIsNotChildOfInputLocation;
public static I18n actualLocationMustHavePath;
Modified: trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties
===================================================================
--- trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties 2008-12-18 11:53:51 UTC (rev 691)
+++ trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties 2008-12-18 22:46:16 UTC (rev 692)
@@ -53,6 +53,7 @@
closedRequestProcessor = Closed request processor
multipleErrorsWhileExecutingRequests = {0} of the {1} requests resulted in errors
unableToAddMoreRequestsToAlreadyExecutedBatch = Unable to add more requests to a batch of graph requests that has already been executed
+unableToCreateReferenceToNodeWithoutUuid = Unable to set a reference to node {0} since it has no UUID
actualLocationIsNotSameAsInputLocation = The actual location of {0} is not the same as the current location of {1}
actualLocationIsNotChildOfInputLocation = The actual location of {0} is not a child of the specified location {1}
actualLocationMustHavePath = The actual location of {0} must have a path
15 years, 3 months
DNA SVN: r691 - in trunk/extensions: dna-common-jdbc/src/main/java/org/jboss/dna/common/jdbc/util and 13 other directories.
by dna-commits@lists.jboss.org
Author: lisbor
Date: 2008-12-18 06:53:51 -0500 (Thu, 18 Dec 2008)
New Revision: 691
Added:
trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcConnection.java
trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcRequestProcesor.java
trunk/extensions/dna-connector-jdbc-metadata/src/test/data/
trunk/extensions/dna-connector-jdbc-metadata/src/test/data/insert.xml
trunk/extensions/dna-connector-jdbc-metadata/src/test/data/testdb/
trunk/extensions/dna-connector-jdbc-metadata/src/test/data/testdb/db.properties
trunk/extensions/dna-connector-jdbc-metadata/src/test/data/testdb/db.script
trunk/extensions/dna-connector-jdbc-metadata/src/test/java/org/
trunk/extensions/dna-connector-jdbc-metadata/src/test/java/org/jboss/
trunk/extensions/dna-connector-jdbc-metadata/src/test/java/org/jboss/dna/
trunk/extensions/dna-connector-jdbc-metadata/src/test/java/org/jboss/dna/connector/
trunk/extensions/dna-connector-jdbc-metadata/src/test/java/org/jboss/dna/connector/jdbc/
trunk/extensions/dna-connector-jdbc-metadata/src/test/java/org/jboss/dna/connector/jdbc/DatabaseBasicTest.java
trunk/extensions/dna-connector-jdbc-metadata/src/test/resources/log4j.xml
Modified:
trunk/extensions/dna-common-jdbc/src/main/java/org/jboss/dna/common/jdbc/model/spi/TableTypeBean.java
trunk/extensions/dna-common-jdbc/src/main/java/org/jboss/dna/common/jdbc/util/DatabaseUtil.java
trunk/extensions/dna-connector-jdbc-metadata/pom.xml
trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcMetadataI18n.java
trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcRepositorySource.java
trunk/extensions/dna-connector-jdbc-metadata/src/main/resources/org/jboss/dna/connector/jdbc/JdbcMetadataI18n.properties
Log:
DNA-37 Federate schema information from relational sources
https://jira.jboss.org/jira/browse/DNA-37
JdbcRepositorySource, JdbcConnection, and simple test
Modified: trunk/extensions/dna-common-jdbc/src/main/java/org/jboss/dna/common/jdbc/model/spi/TableTypeBean.java
===================================================================
--- trunk/extensions/dna-common-jdbc/src/main/java/org/jboss/dna/common/jdbc/model/spi/TableTypeBean.java 2008-12-17 15:54:36 UTC (rev 690)
+++ trunk/extensions/dna-common-jdbc/src/main/java/org/jboss/dna/common/jdbc/model/spi/TableTypeBean.java 2008-12-18 11:53:51 UTC (rev 691)
@@ -35,8 +35,7 @@
*/
public class TableTypeBean extends DatabaseNamedObjectBean implements TableType {
private static final long serialVersionUID = -5095835769360603900L;
- private String tableTypeName;
-
+
/**
* Default constructor
*/
Modified: trunk/extensions/dna-common-jdbc/src/main/java/org/jboss/dna/common/jdbc/util/DatabaseUtil.java
===================================================================
--- trunk/extensions/dna-common-jdbc/src/main/java/org/jboss/dna/common/jdbc/util/DatabaseUtil.java 2008-12-17 15:54:36 UTC (rev 690)
+++ trunk/extensions/dna-common-jdbc/src/main/java/org/jboss/dna/common/jdbc/util/DatabaseUtil.java 2008-12-18 11:53:51 UTC (rev 691)
@@ -21,6 +21,8 @@
*/
package org.jboss.dna.common.jdbc.util;
+import java.lang.reflect.Method;
+import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
@@ -3464,4 +3466,27 @@
superTableName));
}
}
+
+ /**
+ * Get simple database metadata for the getter method (no input parameters)
+ * @param <T> the return type
+ * @param instance the instance of database metadata implementation
+ * @param methodName the full name of a getter method to execute
+ * @param traceLog the log
+ * @return simple database metadata for the getter method
+ */
+ @SuppressWarnings("unchecked")
+ public static <T> T getDatabaseMetadataProperty (DatabaseMetaData instance, String methodName, Logger traceLog) {
+ try {
+ // acces to the instance's RTTI
+ Method m = instance.getClass().getDeclaredMethod (methodName);
+ // trying to execute method without parameters
+ return (T) m.invoke(instance);
+ } catch (Exception e) {
+ traceLog.debug(String.format ("Unable to execute getDatabaseMetadata for the '%1$s' method - %2$s: %3$s",
+ methodName, e.getClass().getName(), e.getMessage()));
+ // default is null
+ return null;
+ }
+ }
}
Modified: trunk/extensions/dna-connector-jdbc-metadata/pom.xml
===================================================================
--- trunk/extensions/dna-connector-jdbc-metadata/pom.xml 2008-12-17 15:54:36 UTC (rev 690)
+++ trunk/extensions/dna-connector-jdbc-metadata/pom.xml 2008-12-18 11:53:51 UTC (rev 691)
@@ -49,5 +49,46 @@
<version>4.4</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <version>1.8.0.7</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.dbunit</groupId>
+ <artifactId>dbunit</artifactId>
+ <version>2.4.1</version>
+ <scope>test</scope>
+ </dependency>
</dependencies>
+ <properties>
+ <jdbc.driverClassName>org.hsqldb.jdbcDriver</jdbc.driverClassName>
+ <jdbc.username>sa</jdbc.username>
+ <jdbc.password />
+ <jdbc.url>jdbc:hsqldb:file:${basedir}/target/testdb/db</jdbc.url>
+ </properties>
+ <build>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
+ <testResources>
+ <testResource>
+ <directory>src/test/resources</directory>
+ <filtering>true</filtering>
+ </testResource>
+ <testResource>
+ <directory>src/test/data</directory>
+ <filtering>true</filtering>
+ <targetPath>../</targetPath>
+ </testResource>
+ <testResource>
+ <directory>src/it/resources</directory>
+ <filtering>true</filtering>
+ </testResource>
+ </testResources>
+ </build>
</project>
\ No newline at end of file
Added: trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcConnection.java
===================================================================
--- trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcConnection.java (rev 0)
+++ trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcConnection.java 2008-12-18 11:53:51 UTC (rev 691)
@@ -0,0 +1,176 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.jboss.dna.connector.jdbc;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.UUID;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.TimeUnit;
+import javax.transaction.xa.XAResource;
+import javax.sql.XAConnection;
+
+import org.jboss.dna.common.util.Logger;
+import org.jboss.dna.graph.ExecutionContext;
+import org.jboss.dna.graph.cache.CachePolicy;
+import org.jboss.dna.graph.connectors.RepositoryConnection;
+import org.jboss.dna.graph.connectors.RepositorySourceException;
+import org.jboss.dna.graph.connectors.RepositorySourceListener;
+import org.jboss.dna.graph.requests.Request;
+import org.jboss.dna.graph.requests.processor.RequestProcessor;
+
+/**
+ * JDBC connection wrapper
+ *
+ * @author <a href="mailto:litsenko_sergey@yahoo.com">Sergiy Litsenko</a>
+ */
+public class JdbcConnection implements RepositoryConnection {
+ /**
+ * Logging for this instance
+ */
+ protected Logger log = Logger.getLogger(getClass());
+
+ private final String name;
+ private final CachePolicy cachePolicy;
+ private final CopyOnWriteArrayList<RepositorySourceListener> listeners = new CopyOnWriteArrayList<RepositorySourceListener>();
+ private final Connection connection;
+ private final UUID rootNodeUuid;
+
+ /*package*/JdbcConnection( String sourceName,
+ CachePolicy cachePolicy,
+ Connection connection,
+ UUID rootNodeUuid) {
+ assert sourceName != null;
+ assert connection != null;
+ assert rootNodeUuid != null;
+ this.name = sourceName;
+ this.cachePolicy = cachePolicy; // may be null
+ this.connection = connection;
+ this.rootNodeUuid = rootNodeUuid;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.connectors.RepositoryConnection#getSourceName()
+ */
+ public String getSourceName() {
+ return name;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.connectors.RepositoryConnection#setListener(org.jboss.dna.graph.connectors.RepositorySourceListener)
+ */
+ public void setListener( RepositorySourceListener listener ) {
+ if (listener != null) {
+ listeners.addIfAbsent(listener);
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.connectors.RepositoryConnection#getDefaultCachePolicy()
+ */
+ public CachePolicy getDefaultCachePolicy() {
+ return cachePolicy;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.connectors.RepositoryConnection#getXAResource()
+ */
+ public XAResource getXAResource() {
+ // if implemented by JDBC driver
+ if (connection instanceof XAConnection) {
+ try {
+ return ((XAConnection)connection).getXAResource();
+ } catch (SQLException e) {
+ // handle an exception silently so far and write it to the log
+ log.error(e, JdbcMetadataI18n.unableToGetXAResource, getSourceName());
+ return null;
+ }
+ }
+ // default
+ return null;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.connectors.RepositoryConnection#ping(long, java.util.concurrent.TimeUnit)
+ */
+ public boolean ping( long time,
+ TimeUnit unit ) {
+ try {
+ // JDBC 4 has a method to check validity of a connection (connection.isValid(timeout))
+ // but many drivers didn't get updated with latest spec
+ return connection != null && ! connection.isClosed();
+ } catch (SQLException e) {
+ // debug
+ if (log.isDebugEnabled()) {
+ log.debug(e, "{0}: Unable to check database connection due to error.", getSourceName());
+ }
+ return false;
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.connectors.RepositoryConnection#execute(org.jboss.dna.graph.ExecutionContext,
+ * org.jboss.dna.graph.requests.Request)
+ */
+ public void execute( ExecutionContext context,
+ Request request ) throws RepositorySourceException {
+ // create processor and delegate handling
+ RequestProcessor proc = new JdbcRequestProcesor(getSourceName(),context, connection);
+ try {
+ proc.process(request);
+ } finally {
+ proc.close();
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.connectors.RepositoryConnection#close()
+ */
+ public void close() {
+ try {
+ // release the JDBC connection resource
+ if (connection != null && ! connection.isClosed()) {
+ connection.close();
+ }
+ } catch (Exception e) {
+ // handle exception silently so far
+ if (log.isDebugEnabled()) {
+ log.debug(e, "{0}: Unable to close database connection due to error.", getSourceName());
+ }
+ }
+ }
+
+}
Property changes on: trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcConnection.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Modified: trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcMetadataI18n.java
===================================================================
--- trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcMetadataI18n.java 2008-12-17 15:54:36 UTC (rev 690)
+++ trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcMetadataI18n.java 2008-12-18 11:53:51 UTC (rev 691)
@@ -34,9 +34,13 @@
public static I18n nodeDoesNotExist;
public static I18n nodeTypeIsNotSupported;
public static I18n propertyIsRequired;
+ public static I18n oneOfPropertiesIsRequired;
public static I18n errorSerializingCachePolicyInSource;
public static I18n locationInRequestMustHavePath;
public static I18n sourceIsReadOnly;
+ public static I18n unableToGetConnectionUsingDriver;
+ public static I18n unableToGetConnectionUsingDataSource;
+ public static I18n unableToGetXAResource;
static {
try {
Modified: trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcRepositorySource.java
===================================================================
--- trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcRepositorySource.java 2008-12-17 15:54:36 UTC (rev 690)
+++ trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcRepositorySource.java 2008-12-18 11:53:51 UTC (rev 691)
@@ -30,6 +30,7 @@
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Map;
+import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import javax.naming.BinaryRefAddr;
@@ -39,8 +40,13 @@
import javax.naming.Reference;
import javax.naming.StringRefAddr;
import javax.naming.spi.ObjectFactory;
+
import net.jcip.annotations.ThreadSafe;
import org.jboss.dna.common.i18n.I18n;
+import org.jboss.dna.common.jdbc.provider.DataSourceDatabaseMetadataProvider;
+import org.jboss.dna.common.jdbc.provider.DefaultDataSourceDatabaseMetadataProvider;
+import org.jboss.dna.common.jdbc.provider.DefaultDriverDatabaseMetadataProvider;
+import org.jboss.dna.common.jdbc.provider.DriverDatabaseMetadataProvider;
import org.jboss.dna.graph.cache.CachePolicy;
import org.jboss.dna.graph.connectors.RepositoryConnection;
import org.jboss.dna.graph.connectors.RepositoryContext;
@@ -54,7 +60,8 @@
* @author <a href="mailto:litsenko_sergey@yahoo.com">Sergiy Litsenko</a>
*/
public class JdbcRepositorySource implements RepositorySource, ObjectFactory {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 3380130639143030018L;
+
/**
* The default limit is {@value} for retrying {@link RepositoryConnection connection} calls to the underlying source.
*/
@@ -65,6 +72,10 @@
* read-only or updateable}.
*/
public static final boolean DEFAULT_SUPPORTS_UPDATES = true;
+ /**
+ * The default UUID that is used for root nodes in a JDBC connector.
+ */
+ public static final String DEFAULT_ROOT_NODE_UUID = "9f9a52c8-0a4d-40d0-ac58-7c77b24b3155";
/**
* This source supports events.
@@ -80,14 +91,47 @@
protected final Capabilities capabilities = new Capabilities();
protected transient RepositoryContext repositoryContext;
protected CachePolicy defaultCachePolicy;
-
+ protected transient DriverDatabaseMetadataProvider driverProvider;
+ protected transient DataSourceDatabaseMetadataProvider dataSourceProvider;
+ protected transient UUID rootUuid = UUID.fromString(DEFAULT_ROOT_NODE_UUID);
+
protected static final String SOURCE_NAME = "sourceName";
+ protected static final String ROOT_NODE_UUID = "rootNodeUuid";
protected static final String DEFAULT_CACHE_POLICY = "defaultCachePolicy";
- protected static final String REPO_JNDI_NAME = "jndiName";
- protected static final String REPO_FACTORY_JNDI_NAME = "factoryJndiName";
+ protected static final String DATA_SOURCE_JNDI_NAME = "dataSourceJndiName";
+ protected static final String USERNAME = "username";
+ protected static final String PASSWORD = "password";
+ protected static final String URL = "url";
+ protected static final String DRIVER_CLASS_NAME = "driverClassName";
protected static final String RETRY_LIMIT = "retryLimit";
/**
+ * Get and optionally create driver based provider
+ * @param create create provider
+ * @return driverProvider
+ */
+ protected DriverDatabaseMetadataProvider getDriverProvider(boolean create) {
+ // lazy creation
+ if (driverProvider == null) {
+ driverProvider = new DefaultDriverDatabaseMetadataProvider();
+ }
+ return driverProvider;
+ }
+
+ /**
+ * Get and optionally create data source based provider
+ * @param create create provider
+ * @return dataSourceProvider
+ */
+ protected DataSourceDatabaseMetadataProvider getDataSourceProvider(boolean create) {
+ // lazy creation
+ if (dataSourceProvider == null && create) {
+ dataSourceProvider = new DefaultDataSourceDatabaseMetadataProvider();
+ }
+ return dataSourceProvider;
+ }
+
+ /**
* default constructor
*/
public JdbcRepositorySource() {
@@ -108,8 +152,44 @@
* @see org.jboss.dna.graph.connectors.RepositorySource#getConnection()
*/
public RepositoryConnection getConnection() throws RepositorySourceException {
- // TODO create Jdbc connection
- return null;
+ String errMsg = null;
+ // check name
+ if (getName() == null) {
+ errMsg = JdbcMetadataI18n.propertyIsRequired.text("name");
+ throw new RepositorySourceException(errMsg);
+ }
+
+ // create Jdbc connection using data source first
+ try {
+ if (dataSourceProvider != null) {
+ // create wrapper for Jdbc connection
+ return new JdbcConnection(getName(),
+ getDefaultCachePolicy(),
+ dataSourceProvider.getConnection(),
+ rootUuid);
+ }
+ } catch (Exception e) {
+ errMsg = JdbcMetadataI18n.unableToGetConnectionUsingDriver.text(getName(), getDriverClassName(), getDatabaseUrl());
+ throw new RepositorySourceException(errMsg, e);
+ }
+
+ // create Jdbc connection using driver and database URL
+ try {
+ if (driverProvider != null) {
+ // create wrapper for Jdbc connection
+ return new JdbcConnection(getName(),
+ getDefaultCachePolicy(),
+ driverProvider.getConnection(),
+ rootUuid);
+ }
+ } catch (Exception e) {
+ errMsg = JdbcMetadataI18n.unableToGetConnectionUsingDataSource.text(getName(), getDataSourceName());
+ throw new RepositorySourceException(errMsg, e);
+ }
+
+ // Either data source name or JDBC driver connection properties must be defined
+ errMsg = JdbcMetadataI18n.oneOfPropertiesIsRequired.text(getName());
+ throw new RepositorySourceException(errMsg);
}
/**
@@ -193,6 +273,22 @@
}
/**
+ * @return rootNodeUuid
+ */
+ public String getRootNodeUuid() {
+ return rootUuid != null? rootUuid.toString() : null;
+ }
+
+ /**
+ * @param rootNodeUuid Sets rootNodeUuid to the specified value.
+ * @throws IllegalArgumentException if the string value cannot be converted to UUID
+ */
+ public void setRootNodeUuid( String rootNodeUuid ) {
+ if (rootNodeUuid != null && rootNodeUuid.trim().length() == 0) rootNodeUuid = DEFAULT_ROOT_NODE_UUID;
+ this.rootUuid = UUID.fromString(rootNodeUuid);
+ }
+
+ /**
* {@inheritDoc}
*/
@Override
@@ -211,6 +307,140 @@
}
/**
+ * Gets JDBC driver class name
+ *
+ * @return the JDBC driver class name if any
+ */
+ public String getDriverClassName() {
+ // get provider
+ DriverDatabaseMetadataProvider provider = getDriverProvider(false);
+ // return
+ return (provider != null)? provider.getDriverClassName() : null;
+ }
+
+ /**
+ * Sets JDBC driver class name
+ *
+ * @param driverClassName the JDBC driver class name
+ */
+ public void setDriverClassName( String driverClassName ) {
+ if (driverClassName == null) {
+ driverProvider = null;
+ } else {
+ // get/create provider
+ DriverDatabaseMetadataProvider provider = getDriverProvider(true);
+ // set
+ provider.setDriverClassName(driverClassName);
+ }
+ }
+
+ /**
+ * Gets database URL as string
+ *
+ * @return database URL as string
+ */
+ public String getDatabaseUrl() {
+ // get provider
+ DriverDatabaseMetadataProvider provider = getDriverProvider(false);
+ // return
+ return (provider != null)? provider.getDatabaseUrl() : null;
+ }
+
+ /**
+ * Sets the database URL as string
+ *
+ * @param databaseUrl the database URL as string
+ */
+ public void setDatabaseUrl( String databaseUrl ) {
+ if (databaseUrl == null) {
+ driverProvider = null;
+ } else {
+ // get/create provider
+ DriverDatabaseMetadataProvider provider = getDriverProvider(true);
+ // set
+ provider.setDatabaseUrl(databaseUrl);
+ }
+ }
+
+ /**
+ * Gets the user name
+ *
+ * @return the user name
+ */
+ public String getUserName() {
+ // get provider
+ DriverDatabaseMetadataProvider provider = getDriverProvider(false);
+ return (provider != null)? provider.getUserName() : null;
+ }
+
+ /**
+ * Sets the user name
+ *
+ * @param userName the user name
+ */
+ public void setUserName( String userName ) {
+ if (userName == null) {
+ driverProvider = null;
+ } else {
+ // get provider
+ DriverDatabaseMetadataProvider provider = getDriverProvider(true);
+ provider.setUserName(userName);
+ }
+ }
+
+ /**
+ * Get user's password
+ *
+ * @return user's password
+ */
+ public String getPassword() {
+ // get provider
+ DriverDatabaseMetadataProvider provider = getDriverProvider(false);
+ return (provider != null)? provider.getPassword() : null;
+ }
+
+ /**
+ * Sets the user's password
+ *
+ * @param password the user's password
+ */
+ public void setPassword( String password ) {
+ if (password == null) {
+ driverProvider = null;
+ } else {
+ // get provider
+ DriverDatabaseMetadataProvider provider = getDriverProvider(true);
+ provider.setPassword(password);
+ }
+ }
+
+ /**
+ * Sets data source JNDI name
+ *
+ * @return data source JNDI name
+ */
+ public String getDataSourceName() {
+ // get provider
+ DataSourceDatabaseMetadataProvider provider = getDataSourceProvider(false);
+ return (provider != null)? provider.getDataSourceName() : null;
+ }
+
+ /**
+ * Sets data source JNDI name
+ *
+ * @param dataSourceName the data source JNDI name
+ */
+ public void setDataSourceName( String dataSourceName ) {
+ if (dataSourceName == null) {
+ dataSourceProvider = null;
+ } else {
+ // get provider
+ DataSourceDatabaseMetadataProvider provider = getDataSourceProvider(true);
+ provider.setDataSourceName(dataSourceName);
+ }
+ }
+
+ /**
* {@inheritDoc}
*
* @see javax.naming.Referenceable#getReference()
@@ -223,6 +453,29 @@
if (getName() != null) {
ref.add(new StringRefAddr(SOURCE_NAME, getName()));
}
+
+ if (getRootNodeUuid() != null) {
+ ref.add(new StringRefAddr(ROOT_NODE_UUID, getRootNodeUuid()));
+ }
+ if (getDataSourceName() != null) {
+ ref.add(new StringRefAddr(DATA_SOURCE_JNDI_NAME, getDataSourceName()));
+ }
+
+ if (getUserName() != null) {
+ ref.add(new StringRefAddr(USERNAME, getUserName()));
+ }
+
+ if (getPassword() != null) {
+ ref.add(new StringRefAddr(PASSWORD, getPassword()));
+ }
+
+ if (getDatabaseUrl() != null) {
+ ref.add(new StringRefAddr(URL, getDatabaseUrl()));
+ }
+ if (getDriverClassName() != null) {
+ ref.add(new StringRefAddr(DRIVER_CLASS_NAME, getDriverClassName()));
+ }
+
if (getDefaultCachePolicy() != null) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
CachePolicy policy = getDefaultCachePolicy();
@@ -272,13 +525,27 @@
}
}
}
+ // get individual properties
String sourceName = (String)values.get(SOURCE_NAME);
+ String rootNodeUuid = (String)values.get(ROOT_NODE_UUID);
+ String dataSourceJndiName = (String)values.get(DATA_SOURCE_JNDI_NAME);
+ String userName = (String)values.get(USERNAME);
+ String password = (String)values.get(PASSWORD);
+ String url = (String)values.get(URL);
+ String driverClassName = (String)values.get(DRIVER_CLASS_NAME);
+
Object defaultCachePolicy = values.get(DEFAULT_CACHE_POLICY);
String retryLimit = (String)values.get(RETRY_LIMIT);
// Create the source instance ...
JdbcRepositorySource source = new JdbcRepositorySource();
if (sourceName != null) source.setName(sourceName);
+ if (rootNodeUuid != null) source.setRootNodeUuid(rootNodeUuid);
+ if (dataSourceJndiName != null) source.setDataSourceName(dataSourceJndiName);
+ if (userName != null) source.setUserName(userName);
+ if (password != null) source.setPassword(password);
+ if (url != null) source.setDatabaseUrl(url);
+ if (driverClassName != null) source.setDriverClassName(driverClassName);
if (defaultCachePolicy instanceof CachePolicy) {
source.setDefaultCachePolicy((CachePolicy)defaultCachePolicy);
}
Added: trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcRequestProcesor.java
===================================================================
--- trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcRequestProcesor.java (rev 0)
+++ trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcRequestProcesor.java 2008-12-18 11:53:51 UTC (rev 691)
@@ -0,0 +1,139 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.jboss.dna.connector.jdbc;
+
+import java.sql.Connection;
+import org.jboss.dna.common.util.Logger;
+import org.jboss.dna.graph.ExecutionContext;
+import org.jboss.dna.graph.properties.DateTime;
+import org.jboss.dna.graph.requests.CopyBranchRequest;
+import org.jboss.dna.graph.requests.CreateNodeRequest;
+import org.jboss.dna.graph.requests.DeleteBranchRequest;
+import org.jboss.dna.graph.requests.MoveBranchRequest;
+import org.jboss.dna.graph.requests.ReadAllChildrenRequest;
+import org.jboss.dna.graph.requests.ReadAllPropertiesRequest;
+import org.jboss.dna.graph.requests.UpdatePropertiesRequest;
+import org.jboss.dna.graph.requests.processor.RequestProcessor;
+
+/**
+ * JDBC request processor
+ *
+ * @author <a href="mailto:litsenko_sergey@yahoo.com">Sergiy Litsenko</a>
+ *
+ */
+public class JdbcRequestProcesor extends RequestProcessor {
+ protected Connection connection;
+
+ /**
+ * Logging for this instance
+ */
+ protected Logger log = Logger.getLogger(getClass());
+
+ /**
+ * @param sourceName
+ * @param context
+ * @param connection
+ */
+ public JdbcRequestProcesor( String sourceName,
+ ExecutionContext context,
+ Connection connection) {
+ super(sourceName, context);
+ this.connection = connection;
+ }
+
+ /**
+ * @param sourceName
+ * @param context
+ * @param connection
+ * @param now
+ */
+ public JdbcRequestProcesor( String sourceName,
+ ExecutionContext context,
+ Connection connection,
+ DateTime now ) {
+ super(sourceName, context, now);
+ this.connection = connection;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.CopyBranchRequest)
+ */
+ @Override
+ public void process( CopyBranchRequest request ) {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.CreateNodeRequest)
+ */
+ @Override
+ public void process( CreateNodeRequest request ) {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.DeleteBranchRequest)
+ */
+ @Override
+ public void process( DeleteBranchRequest request ) {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.MoveBranchRequest)
+ */
+ @Override
+ public void process( MoveBranchRequest request ) {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.ReadAllChildrenRequest)
+ */
+ @Override
+ public void process( ReadAllChildrenRequest request ) {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.ReadAllPropertiesRequest)
+ */
+ @Override
+ public void process( ReadAllPropertiesRequest request ) {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.UpdatePropertiesRequest)
+ */
+ @Override
+ public void process( UpdatePropertiesRequest request ) {
+ }
+}
Property changes on: trunk/extensions/dna-connector-jdbc-metadata/src/main/java/org/jboss/dna/connector/jdbc/JdbcRequestProcesor.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Modified: trunk/extensions/dna-connector-jdbc-metadata/src/main/resources/org/jboss/dna/connector/jdbc/JdbcMetadataI18n.properties
===================================================================
--- trunk/extensions/dna-connector-jdbc-metadata/src/main/resources/org/jboss/dna/connector/jdbc/JdbcMetadataI18n.properties 2008-12-17 15:54:36 UTC (rev 690)
+++ trunk/extensions/dna-connector-jdbc-metadata/src/main/resources/org/jboss/dna/connector/jdbc/JdbcMetadataI18n.properties 2008-12-18 11:53:51 UTC (rev 691)
@@ -25,5 +25,9 @@
nodeTypeIsNotSupported= The {0} node type is not supported
locationInRequestMustHavePath = Location must have a path {0}
propertyIsRequired = The {0} property is required but has no value
+oneOfPropertiesIsRequired={0}: Either data source name or JDBC driver connection properties must be defined
errorSerializingCachePolicyInSource = Error serializing a {0} instance owned by the {1} JdbcRepositorySource
-sourceIsReadOnly = {0} is a read-only source; no updates are allowed
\ No newline at end of file
+sourceIsReadOnly = {0} is a read-only source; no updates are allowed
+unableToGetConnectionUsingDriver = {0}: Unable to get connection using following driver: {1} ; and URL: {2}
+unableToGetConnectionUsingDataSource = {0}: Unable to get connection using following data source: {1}
+unableToGetXAResource = {0}: Unable to get XA Resource for JDBC connection
\ No newline at end of file
Added: trunk/extensions/dna-connector-jdbc-metadata/src/test/data/insert.xml
===================================================================
--- trunk/extensions/dna-connector-jdbc-metadata/src/test/data/insert.xml (rev 0)
+++ trunk/extensions/dna-connector-jdbc-metadata/src/test/data/insert.xml 2008-12-18 11:53:51 UTC (rev 691)
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<dataset>
+ <TEST ID="1" NAME="1(a)hotmail.com" DESCRIPTION="This is 1"/>
+ <TEST ID="2" NAME="2(a)hotmail.com" DESCRIPTION="This is 2"/>
+ <TEST ID="3" NAME="3(a)hotmail.com" DESCRIPTION="This is 3"/>
+ <TEST ID="4" NAME="4(a)hotmail.com" DESCRIPTION="This is 4"/>
+ <TEST ID="5" NAME="5(a)hotmail.com" DESCRIPTION="This is 5"/>
+ <TEST ID="6" NAME="6(a)hotmail.com" DESCRIPTION="This is 6"/>
+ <TEST ID="7" NAME="7(a)hotmail.com" DESCRIPTION="This is 7"/>
+ <TEST ID="8" NAME="8(a)hotmail.com" DESCRIPTION="This is 8"/>
+ <TEST ID="9" NAME="9(a)hotmail.com" DESCRIPTION="This is 9"/>
+ <TEST ID="10" NAME="10(a)hotmail.com" DESCRIPTION="This is 10"/>
+
+ <SALGRADE GRADE="1" LOSAL="700" HISAL="1200"/>
+ <SALGRADE GRADE="2" LOSAL="1201" HISAL="1400"/>
+ <SALGRADE GRADE="3" LOSAL="1401" HISAL="2000"/>
+ <SALGRADE GRADE="4" LOSAL="2001" HISAL="3000"/>
+ <SALGRADE GRADE="5" LOSAL="3001" HISAL="9999"/>
+
+ <DEPT DEPTNO="10" DNAME="ACCOUNTING" LOC="NEW YORK"/>
+ <DEPT DEPTNO="20" DNAME="RESEARCH" LOC="DALLAS"/>
+ <DEPT DEPTNO="30" DNAME="SALES" LOC="CHICAGO"/>
+ <DEPT DEPTNO="40" DNAME="OPERATIONS" LOC="BOSTON"/>
+
+ <EMP EMPNO="7839" ENAME="KING" JOB="PRESIDENT" HIREDATE="1981-11-17 00:00:00" SAL="5000" DEPTNO="10"/>
+ <EMP EMPNO="7566" ENAME="JONES" JOB="MANAGER" MGR="7839" HIREDATE="1981-04-02 00:00:00" SAL="2975" DEPTNO="20"/>
+ <EMP EMPNO="7902" ENAME="FORD" JOB="ANALYST" MGR="7566" HIREDATE="1981-12-03 00:00:00" SAL="3000" DEPTNO="20"/>
+ <EMP EMPNO="7369" ENAME="SMITH" JOB="CLERK" MGR="7902" HIREDATE="1980-12-17 00:00:00" SAL="800" DEPTNO="20"/>
+ <EMP EMPNO="7698" ENAME="BLAKE" JOB="MANAGER" MGR="7839" HIREDATE="1981-05-01 00:00:00" SAL="2850" DEPTNO="30"/>
+ <EMP EMPNO="7499" ENAME="ALLEN" JOB="SALESMAN" MGR="7698" HIREDATE="1981-02-20 00:00:00" SAL="1600" DEPTNO="30" COMM="300" />
+ <EMP EMPNO="7521" ENAME="WARD" JOB="SALESMAN" MGR="7698" HIREDATE="1981-02-22 00:00:00" SAL="1250" DEPTNO="30" COMM="500" />
+ <EMP EMPNO="7654" ENAME="MARTIN" JOB="SALESMAN" MGR="7698" HIREDATE="1981-09-28 00:00:00" SAL="1250" DEPTNO="30" COMM="1400"/>
+ <EMP EMPNO="7782" ENAME="CLARK" JOB="MANAGER" MGR="7839" HIREDATE="1981-06-09 00:00:00" SAL="2450" DEPTNO="10"/>
+ <EMP EMPNO="7788" ENAME="SCOTT" JOB="ANALYST" MGR="7566" HIREDATE="1987-04-19 00:00:00" SAL="3000" DEPTNO="20"/>
+ <EMP EMPNO="7844" ENAME="TURNER" JOB="SALESMAN" MGR="7698" HIREDATE="1981-09-08 00:00:00" SAL="1500" DEPTNO="30" COMM="0"/>
+ <EMP EMPNO="7876" ENAME="ADAMS" JOB="CLERK" MGR="7788" HIREDATE="1987-05-23 00:00:00" SAL="1100" DEPTNO="20"/>
+ <EMP EMPNO="7900" ENAME="JAMES" JOB="CLERK" MGR="7698" HIREDATE="1981-12-03 00:00:00" SAL="950" DEPTNO="30"/>
+ <EMP EMPNO="7934" ENAME="MILLER" JOB="CLERK" MGR="7782" HIREDATE="1982-01-23 00:00:00" SAL="1300" DEPTNO="10"/>
+</dataset>
Property changes on: trunk/extensions/dna-connector-jdbc-metadata/src/test/data/insert.xml
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Added: trunk/extensions/dna-connector-jdbc-metadata/src/test/data/testdb/db.properties
===================================================================
--- trunk/extensions/dna-connector-jdbc-metadata/src/test/data/testdb/db.properties (rev 0)
+++ trunk/extensions/dna-connector-jdbc-metadata/src/test/data/testdb/db.properties 2008-12-18 11:53:51 UTC (rev 691)
@@ -0,0 +1,15 @@
+hsqldb.script_format=0
+runtime.gc_interval=0
+sql.enforce_strict_size=false
+hsqldb.cache_size_scale=8
+readonly=false
+hsqldb.nio_data_file=true
+hsqldb.cache_scale=14
+version=1.8.0
+hsqldb.default_table_type=memory
+hsqldb.cache_file_scale=1
+hsqldb.log_size=200
+modified=yes
+hsqldb.cache_version=1.7.0
+hsqldb.original_version=1.8.0
+hsqldb.compatible_version=1.8.0
Property changes on: trunk/extensions/dna-connector-jdbc-metadata/src/test/data/testdb/db.properties
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Added: trunk/extensions/dna-connector-jdbc-metadata/src/test/data/testdb/db.script
===================================================================
--- trunk/extensions/dna-connector-jdbc-metadata/src/test/data/testdb/db.script (rev 0)
+++ trunk/extensions/dna-connector-jdbc-metadata/src/test/data/testdb/db.script 2008-12-18 11:53:51 UTC (rev 691)
@@ -0,0 +1,17 @@
+CREATE SCHEMA PUBLIC AUTHORIZATION DBA
+CREATE USER SA PASSWORD ""
+GRANT DBA TO SA
+SET WRITE_DELAY 10
+CREATE TABLE TEST (ID NUMERIC NOT NULL PRIMARY KEY, NAME VARCHAR(30) NOT NULL, DESCRIPTION VARCHAR(255))
+CREATE TABLE BONUS (ENAME VARCHAR(10) NULL, JOB VARCHAR(9) NULL, SAL NUMERIC NULL, COMM NUMERIC NULL)
+CREATE TABLE DEPT (DEPTNO NUMERIC(2,0) NOT NULL, DNAME VARCHAR(14) NULL, LOC VARCHAR(13) NULL)
+ALTER TABLE DEPT ADD CONSTRAINT PK_DEPT PRIMARY KEY (DEPTNO)
+CREATE TABLE EMP (EMPNO NUMERIC(4,0) NOT NULL, ENAME VARCHAR(10) NULL, JOB VARCHAR(9) NULL, MGR NUMERIC(4,0) NULL, HIREDATE DATE NULL, SAL NUMERIC(7,2) NULL, COMM NUMERIC(7,2) NULL, DEPTNO NUMERIC(2,0) NULL)
+ALTER TABLE EMP ADD CONSTRAINT PK_EMP PRIMARY KEY (EMPNO)
+ALTER TABLE EMP ADD CONSTRAINT FK_DEPTNO FOREIGN KEY(DEPTNO) REFERENCES DEPT(DEPTNO) ON DELETE CASCADE
+CREATE TABLE SALGRADE (GRADE NUMERIC NULL, LOSAL NUMERIC NULL, HISAL NUMERIC NULL)
+CREATE UNIQUE INDEX PK_DEPT ON DEPT(DEPTNO)
+CREATE UNIQUE INDEX PK_EMP ON EMP(EMPNO);
+
+
+
Added: trunk/extensions/dna-connector-jdbc-metadata/src/test/java/org/jboss/dna/connector/jdbc/DatabaseBasicTest.java
===================================================================
--- trunk/extensions/dna-connector-jdbc-metadata/src/test/java/org/jboss/dna/connector/jdbc/DatabaseBasicTest.java (rev 0)
+++ trunk/extensions/dna-connector-jdbc-metadata/src/test/java/org/jboss/dna/connector/jdbc/DatabaseBasicTest.java 2008-12-18 11:53:51 UTC (rev 691)
@@ -0,0 +1,141 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.jboss.dna.connector.jdbc;
+
+import java.io.File;
+import java.io.IOException;
+import java.math.BigDecimal;
+import org.dbunit.IDatabaseTester;
+import org.dbunit.Assertion;
+import org.dbunit.JdbcDatabaseTester;
+import org.dbunit.dataset.DataSetException;
+import org.dbunit.dataset.IDataSet;
+import org.dbunit.dataset.ITable;
+import org.dbunit.dataset.xml.FlatXmlDataSet;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Basic test of the HSQLDB database with simple schema
+ *
+ * @author <a href="mailto:litsenko_sergey@yahoo.com">Sergiy Litsenko</a>
+ */
+public class DatabaseBasicTest {
+ private IDatabaseTester dbTester;
+
+ private IDataSet getDataSet() throws IOException, DataSetException {
+ return new FlatXmlDataSet(new File("src/test/data/insert.xml"));
+ }
+
+ @Before
+ public void beforeEach() throws Exception {
+ dbTester = new JdbcDatabaseTester("org.hsqldb.jdbcDriver", "jdbc:hsqldb:file:target/testdb/db", "sa", "");
+ dbTester.setDataSet(getDataSet());
+ dbTester.onSetup();
+ }
+
+ @After
+ public void afterEach() throws Exception {
+ dbTester.onTearDown();
+ }
+
+ @Test
+ public void testTableShallBeLoaded() throws Exception {
+ // Fetch live database data
+ IDataSet databaseDataSet = dbTester.getConnection().createDataSet();
+ ITable actualTable = databaseDataSet.getTable("TEST");
+ ITable expectedTable = getDataSet().getTable("TEST");
+ Assertion.assertEquals(expectedTable, actualTable);
+ }
+
+ @Test
+ public void testTableRecordsLoaded() throws Exception {
+ // Fetch live database data
+ Assert.assertEquals(10, dbTester.getConnection().getRowCount("TEST"));
+ }
+
+ @Test
+ public void testTableRandomRowColumnId() throws Exception {
+ // Fetch live database data
+ IDataSet databaseDataSet = dbTester.getConnection().createDataSet();
+ ITable actualTable = databaseDataSet.getTable("TEST");
+ // get 5th row, column: id
+ Object actualId = actualTable.getValue(4, "ID");
+ Assert.assertNotNull(actualId);
+
+ BigDecimal expectedId = new BigDecimal(5);
+ Assert.assertTrue(expectedId.equals(actualId));
+ }
+
+ @Test
+ public void testTableRandomRowColumnName() throws Exception {
+ // Fetch live database data
+ IDataSet databaseDataSet = dbTester.getConnection().createDataSet();
+ ITable actualTable = databaseDataSet.getTable("TEST");
+ // get 6th row, column: name
+ Assert.assertEquals("6(a)hotmail.com", actualTable.getValue(5, "NAME"));
+ }
+
+ @Test
+ public void testTableRandomRowColumnDescription() throws Exception {
+ // Fetch live database data
+ IDataSet databaseDataSet = dbTester.getConnection().createDataSet();
+ ITable actualTable = databaseDataSet.getTable("TEST");
+ // get 7th row, column: description
+ Assert.assertEquals("This is 7", actualTable.getValue(6, "DESCRIPTION"));
+ }
+
+ @Test
+ public void testDefaultDatabaseSchemaNameIsEmpty() throws Exception {
+ Assert.assertNull(dbTester.getConnection().getSchema());
+ }
+
+ @Test
+ public void salGradeTableShallBeLoaded() throws Exception {
+ // Fetch live database data
+ IDataSet databaseDataSet = dbTester.getConnection().createDataSet();
+ ITable actualTable = databaseDataSet.getTable("SALGRADE");
+ ITable expectedTable = getDataSet().getTable("SALGRADE");
+ Assertion.assertEquals(expectedTable, actualTable);
+ }
+
+ @Test
+ public void deptTableShallBeLoaded() throws Exception {
+ // Fetch live database data
+ IDataSet databaseDataSet = dbTester.getConnection().createDataSet();
+ ITable actualTable = databaseDataSet.getTable("DEPT");
+ ITable expectedTable = getDataSet().getTable("DEPT");
+ Assertion.assertEquals(expectedTable, actualTable);
+ }
+
+ @Test
+ public void empTableShallBeLoaded() throws Exception {
+ // Fetch live database data
+ IDataSet databaseDataSet = dbTester.getConnection().createDataSet();
+ ITable actualTable = databaseDataSet.getTable("EMP");
+ ITable expectedTable = getDataSet().getTable("EMP");
+ // Assertion.assertEquals(expectedTable, actualTable);
+ Assert.assertEquals(actualTable.getRowCount(), expectedTable.getRowCount());
+ }
+}
Property changes on: trunk/extensions/dna-connector-jdbc-metadata/src/test/java/org/jboss/dna/connector/jdbc/DatabaseBasicTest.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Added: trunk/extensions/dna-connector-jdbc-metadata/src/test/resources/log4j.xml
===================================================================
--- trunk/extensions/dna-connector-jdbc-metadata/src/test/resources/log4j.xml (rev 0)
+++ trunk/extensions/dna-connector-jdbc-metadata/src/test/resources/log4j.xml 2008-12-18 11:53:51 UTC (rev 691)
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/" debug="false">
+ <appender name="CONSOLE" class="org.apache.log4j.ConsoleAppender">
+ <param name="Target" value="System.out"/>
+ <param name="Threshold" value="INFO"/>
+
+ <layout class="org.apache.log4j.PatternLayout">
+ <param name="ConversionPattern" value="%d{HH:mm:ss,SSS} %-5p %t [%c{1}] %m%n"/>
+ </layout>
+ </appender>
+
+ <appender name="FILE" class="org.apache.log4j.FileAppender">
+ <param name="Threshold" value="TRACE"/>
+ <param name="Append" value="false"/>
+ <param name="File" value="target/test.log"/>
+
+ <layout class="org.apache.log4j.PatternLayout">
+ <param name="ConversionPattern" value="%d{HH:mm:ss,SSS} %-5p %t [%c, %L] %m%n"/>
+ </layout>
+ </appender>
+
+
+ <category name="org.jboss.dna.connector.jdbc">
+ <priority value="TRACE"/>
+ </category>
+
+ <category name="org.dbunit">
+ <priority value="INFO"/>
+ </category>
+
+ <root>
+ <priority value="INFO"/>
+ <appender-ref ref="FILE"/>
+ </root>
+</log4j:configuration>
Property changes on: trunk/extensions/dna-connector-jdbc-metadata/src/test/resources/log4j.xml
___________________________________________________________________
Name: svn:mime-type
+ text/plain
15 years, 3 months
DNA SVN: r690 - in trunk: dna-graph/src/main/java/org/jboss/dna/graph/requests and 5 other directories.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2008-12-17 10:54:36 -0500 (Wed, 17 Dec 2008)
New Revision: 690
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/CopyBranchRequest.java
trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties
trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
Log:
DNA-40
Completed the connector's method to process branch copy requests, and added more unit tests. Fixed a bug in the CopyBranchRequest class - when setting the actual locations, the request was incorrectly checking that the actual location matched that of the "into" location (which is the parent of the new copy), rather than the actual location of the new copy.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java 2008-12-17 15:51:44 UTC (rev 689)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java 2008-12-17 15:54:36 UTC (rev 690)
@@ -67,6 +67,7 @@
public static I18n multipleErrorsWhileExecutingRequests;
public static I18n unableToAddMoreRequestsToAlreadyExecutedBatch;
public static I18n actualLocationIsNotSameAsInputLocation;
+ public static I18n actualLocationIsNotChildOfInputLocation;
public static I18n actualLocationMustHavePath;
public static I18n actualNewLocationIsNotSameAsInputLocation;
public static I18n actualNewLocationMustHavePath;
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/CopyBranchRequest.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/CopyBranchRequest.java 2008-12-17 15:51:44 UTC (rev 689)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/CopyBranchRequest.java 2008-12-17 15:54:36 UTC (rev 690)
@@ -118,7 +118,7 @@
* processing the request, and the actual location must have a {@link Location#getPath() path}.
*
* @param oldLocation the actual location of the node before being renamed
- * @param newLocation the actual location of the node after being renamed
+ * @param newLocation the actual location of the new copy of the node
* @throws IllegalArgumentException if the either location is null, if the old location does not represent the
* {@link Location#isSame(Location) same location} as the {@link #from() from location}, if the new location does not
* represent the {@link Location#isSame(Location) same location} as the {@link #into() into location}, or if the
@@ -129,9 +129,7 @@
if (!from.isSame(oldLocation)) { // not same if actual is null
throw new IllegalArgumentException(GraphI18n.actualLocationIsNotSameAsInputLocation.text(oldLocation, from));
}
- if (!into.isSame(newLocation, false)) { // not same if actual is null
- throw new IllegalArgumentException(GraphI18n.actualLocationIsNotSameAsInputLocation.text(newLocation, into));
- }
+ CheckArg.isNotNull(newLocation, "newLocation");
assert oldLocation != null;
assert newLocation != null;
if (!oldLocation.hasPath()) {
@@ -140,6 +138,10 @@
if (!newLocation.hasPath()) {
throw new IllegalArgumentException(GraphI18n.actualNewLocationMustHavePath.text(newLocation));
}
+ // The 'into' should be the parent of the 'newLocation' ...
+ if (into.hasPath() && !newLocation.getPath().getParent().equals(into.getPath())) {
+ throw new IllegalArgumentException(GraphI18n.actualLocationIsNotChildOfInputLocation.text(newLocation, into));
+ }
this.actualNewLocation = newLocation;
}
Modified: trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties
===================================================================
--- trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties 2008-12-17 15:51:44 UTC (rev 689)
+++ trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties 2008-12-17 15:54:36 UTC (rev 690)
@@ -54,6 +54,7 @@
multipleErrorsWhileExecutingRequests = {0} of the {1} requests resulted in errors
unableToAddMoreRequestsToAlreadyExecutedBatch = Unable to add more requests to a batch of graph requests that has already been executed
actualLocationIsNotSameAsInputLocation = The actual location of {0} is not the same as the current location of {1}
+actualLocationIsNotChildOfInputLocation = The actual location of {0} is not a child of the specified location {1}
actualLocationMustHavePath = The actual location of {0} must have a path
actualNewLocationIsNotSameAsInputLocation = The actual new location of {0} is not the same as the input location of {1}
actualNewLocationMustHavePath = The actual new location of {0} must have a path
Modified: trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java
===================================================================
--- trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java 2008-12-17 15:51:44 UTC (rev 689)
+++ trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java 2008-12-17 15:54:36 UTC (rev 690)
@@ -777,8 +777,16 @@
@Override
public void process( CopyBranchRequest request ) {
- // Just update the actual location
- request.setActualLocations(actualLocationOf(request.from()), actualLocationOf(request.into()));
+ // Create a child under the new parent ...
+ if (request.into().hasPath()) {
+ Path childPath = context.getValueFactories().getPathFactory().create(request.into().getPath(), "child");
+ Location newChild = actualLocationOf(new Location(childPath));
+ // Just update the actual location
+ request.setActualLocations(actualLocationOf(request.from()), newChild);
+ } else {
+ // Just update the actual location
+ request.setActualLocations(actualLocationOf(request.from()), actualLocationOf(request.into()));
+ }
}
@Override
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-17 15:51:44 UTC (rev 689)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-17 15:54:36 UTC (rev 690)
@@ -34,6 +34,7 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
@@ -185,71 +186,14 @@
String childNsUri = childName.getNamespaceUri();
NamespaceEntity ns = namespaces.get(childNsUri, true);
assert ns != null;
-
- // Figure out the next SNS index and index-in-parent for this new child ...
- int nextSnsIndex = 1; // SNS index is 1-based
- int nextIndexInParent = 0; // index-in-parent is 0-based
final Path parentPath = actual.location.getPath();
assert parentPath != null;
- // Look in the cache for the children of the parent node.
- LinkedList<Location> childrenOfParent = cache.getAllChildren(parentPath);
- if (childrenOfParent != null) {
- // The cache had the complete list of children for the parent node, which means
- // we know about all of the children and can walk the children to figure out the next indexes.
- nextIndexInParent = childrenOfParent.size();
- if (nextIndexInParent > 1) {
- // Since we want the last indexes, process the list backwards ...
- ListIterator<Location> iter = childrenOfParent.listIterator(childrenOfParent.size());
- while (iter.hasPrevious()) {
- Location existing = iter.previous();
- Path.Segment segment = existing.getPath().getLastSegment();
- if (!segment.getName().equals(childName)) continue;
- // Otherwise the name matched, so get the indexes ...
- nextSnsIndex = segment.getIndex() + 1;
- }
- }
- } else {
- // The cache did not have the complete list of children for the parent node,
- // so we need to look the values up by querying the database ...
- // Find the largest SNS index in the existing ChildEntity objects with the same name ...
- String childLocalName = childName.getLocalName();
- Query query = entities.createNamedQuery("ChildEntity.findMaximumSnsIndex");
- query.setParameter("parentUuid", parentUuidString);
- query.setParameter("ns", ns.getId());
- query.setParameter("childName", childLocalName);
- try {
- Integer result = (Integer)query.getSingleResult();
- nextSnsIndex = result != null ? result + 1 : 1; // SNS index is 1-based
- } catch (NoResultException e) {
- }
+ // Figure out the next SNS index and index-in-parent for this new child ...
+ actualLocation = addNewChild(actual, uuidString, childName);
- // Find the largest child index in the existing ChildEntity objects ...
- query = entities.createNamedQuery("ChildEntity.findMaximumChildIndex");
- query.setParameter("parentUuid", parentUuidString);
- try {
- Integer result = (Integer)query.getSingleResult();
- nextIndexInParent = result != null ? result + 1 : 0; // index-in-parent is 0-based
- } catch (NoResultException e) {
- }
- }
-
- // Create the new ChildEntity ...
- ChildId id = new ChildId(parentUuidString, uuidString);
- ChildEntity entity = new ChildEntity(id, nextIndexInParent, ns, childName.getLocalName(), nextSnsIndex);
- entities.persist(entity);
-
- // Set the actual path, regardless of the supplied path...
- Path path = pathFactory.create(parentPath, childName, nextSnsIndex);
- actualLocation = new Location(path, UUID.fromString(uuidString));
-
- // Finally, update the cache with the information we know ...
- if (childrenOfParent != null) {
- // Add to the cached list of children ...
- childrenOfParent.add(actualLocation);
- }
// Since we've just created this node, we know about all the children (actually, there are none).
- cache.setAllChildren(path, new LinkedList<Location>());
+ cache.setAllChildren(actualLocation.getPath(), new LinkedList<Location>());
// Flush the entities ...
// entities.flush();
@@ -262,6 +206,89 @@
request.setActualLocationOfNode(actualLocation);
}
+ protected Location addNewChild( ActualLocation parent,
+ String childUuid,
+ Name childName ) {
+ int nextSnsIndex = 1; // SNS index is 1-based
+ int nextIndexInParent = 0; // index-in-parent is 0-based
+ String childNsUri = childName.getNamespaceUri();
+ NamespaceEntity ns = namespaces.get(childNsUri, true);
+ assert ns != null;
+
+ final Path parentPath = parent.location.getPath();
+ assert parentPath != null;
+
+ // Look in the cache for the children of the parent node.
+ LinkedList<Location> childrenOfParent = cache.getAllChildren(parentPath);
+ if (childrenOfParent != null) {
+ // The cache had the complete list of children for the parent node, which means
+ // we know about all of the children and can walk the children to figure out the next indexes.
+ nextIndexInParent = childrenOfParent.size();
+ if (nextIndexInParent > 1) {
+ // Since we want the last indexes, process the list backwards ...
+ ListIterator<Location> iter = childrenOfParent.listIterator(childrenOfParent.size());
+ while (iter.hasPrevious()) {
+ Location existing = iter.previous();
+ Path.Segment segment = existing.getPath().getLastSegment();
+ if (!segment.getName().equals(childName)) continue;
+ // Otherwise the name matched, so get the indexes ...
+ nextSnsIndex = segment.getIndex() + 1;
+ }
+ }
+ } else {
+ // The cache did not have the complete list of children for the parent node,
+ // so we need to look the values up by querying the database ...
+
+ // Find the largest SNS index in the existing ChildEntity objects with the same name ...
+ String childLocalName = childName.getLocalName();
+ Query query = entities.createNamedQuery("ChildEntity.findMaximumSnsIndex");
+ query.setParameter("parentUuid", parent.uuid);
+ query.setParameter("ns", ns.getId());
+ query.setParameter("childName", childLocalName);
+ try {
+ Integer result = (Integer)query.getSingleResult();
+ nextSnsIndex = result != null ? result + 1 : 1; // SNS index is 1-based
+ } catch (NoResultException e) {
+ }
+
+ // Find the largest child index in the existing ChildEntity objects ...
+ query = entities.createNamedQuery("ChildEntity.findMaximumChildIndex");
+ query.setParameter("parentUuid", parent.uuid);
+ try {
+ Integer result = (Integer)query.getSingleResult();
+ nextIndexInParent = result != null ? result + 1 : 0; // index-in-parent is 0-based
+ } catch (NoResultException e) {
+ }
+ }
+
+ // Create the new ChildEntity ...
+ ChildId id = new ChildId(parent.uuid, childUuid);
+ ChildEntity entity = new ChildEntity(id, nextIndexInParent, ns, childName.getLocalName(), nextSnsIndex);
+ entities.persist(entity);
+
+ // Set the actual path, regardless of the supplied path...
+ Path path = pathFactory.create(parentPath, childName, nextSnsIndex);
+ Location actualLocation = new Location(path, UUID.fromString(childUuid));
+
+ // Finally, update the cache with the information we know ...
+ if (childrenOfParent != null) {
+ // Add to the cached list of children ...
+ childrenOfParent.add(actualLocation);
+ }
+ return actualLocation;
+ }
+
+ protected class NextChildIndexes {
+ protected final int nextIndexInParent;
+ protected final int nextSnsIndex;
+
+ protected NextChildIndexes( int nextIndexInParent,
+ int nextSnsIndex ) {
+ this.nextIndexInParent = nextIndexInParent;
+ this.nextSnsIndex = nextSnsIndex;
+ }
+ }
+
/**
* {@inheritDoc}
*
@@ -875,6 +902,85 @@
@Override
public void process( CopyBranchRequest request ) {
logger.trace(request.toString());
+ Location actualFromLocation = null;
+ Location actualToLocation = null;
+ try {
+ Location fromLocation = request.from();
+ ActualLocation actualFrom = getActualLocation(fromLocation);
+ actualFromLocation = actualFrom.location;
+ Path fromPath = actualFromLocation.getPath();
+
+ Location newParentLocation = request.into();
+ ActualLocation actualNewParent = getActualLocation(newParentLocation);
+ assert actualNewParent != null;
+
+ // Create a map that we'll use to record the new UUID for each of the original nodes ...
+ Map<String, String> originalToNewUuid = new HashMap<String, String>();
+
+ // Compute the subgraph, including the top node in the subgraph ...
+ SubgraphQuery query = SubgraphQuery.create(getExecutionContext(), entities, actualFromLocation.getUuid(), fromPath, 0);
+ try {
+ // Walk through the original nodes, creating new ChildEntity object (i.e., copy) for each original ...
+ List<ChildEntity> originalNodes = query.getNodes(true, true);
+ Iterator<ChildEntity> originalIter = originalNodes.iterator();
+
+ // Start with the original (top-level) node first, since we need to add it to the list of children ...
+ if (originalIter.hasNext()) {
+ ChildEntity original = originalIter.next();
+
+ // Create a new UUID for the copy ...
+ String copyUuid = UUID.randomUUID().toString();
+ originalToNewUuid.put(original.getId().getChildUuidString(), copyUuid);
+
+ // Now add the new copy of the original ...
+ Name childName = fromPath.getLastSegment().getName();
+ actualToLocation = addNewChild(actualNewParent, copyUuid, childName);
+ }
+
+ // Now process the children in the subgraph ...
+ while (originalIter.hasNext()) {
+ ChildEntity original = originalIter.next();
+ String newParentUuidOfCopy = originalToNewUuid.get(original.getId().getParentUuidString());
+ assert newParentUuidOfCopy != null;
+
+ // Create a new UUID for the copy ...
+ String copyUuid = UUID.randomUUID().toString();
+ originalToNewUuid.put(original.getId().getChildUuidString(), copyUuid);
+
+ // Create the copy ...
+ ChildEntity copy = new ChildEntity(new ChildId(newParentUuidOfCopy, copyUuid), original.getIndexInParent(),
+ original.getChildNamespace(), original.getChildName(),
+ original.getSameNameSiblingIndex());
+ entities.persist(copy);
+ }
+ entities.flush();
+
+ // Now process the properties, creating a copy (note references are not changed) ...
+ for (PropertiesEntity original : query.getProperties(true, true)) {
+ // Find the UUID of the copy ...
+ String copyUuid = originalToNewUuid.get(original.getId().getUuidString());
+ assert copyUuid != null;
+
+ // Create the copy ...
+ PropertiesEntity copy = new PropertiesEntity(new NodeId(copyUuid));
+ copy.setCompressed(original.isCompressed());
+ copy.setData(original.getData());
+ copy.setPropertyCount(original.getPropertyCount());
+ copy.setReferentialIntegrityEnforced(original.isReferentialIntegrityEnforced());
+ entities.persist(copy);
+ }
+ entities.flush();
+
+ } finally {
+ // Close and release the temporary data used for this operation ...
+ query.close();
+ }
+
+ } catch (Throwable e) { // Includes PathNotFoundException
+ request.setError(e);
+ return;
+ }
+ request.setActualLocations(actualFromLocation, actualToLocation);
}
/**
@@ -892,7 +998,7 @@
actualLocation = actual.location;
Path path = actualLocation.getPath();
- // Compute the subgraph, including the root ...
+ // Compute the subgraph, including the top node in the subgraph ...
SubgraphQuery query = SubgraphQuery.create(getExecutionContext(), entities, actualLocation.getUuid(), path, 0);
try {
ChildEntity deleted = query.getNode();
@@ -905,13 +1011,30 @@
List<Location> deletedLocations = query.getNodeLocations(true, true);
// Now delete the subgraph ...
- SubgraphQuery.Resolver resolver = new SubgraphQuery.Resolver() {
- public Location getLocationFor( UUID uuid ) {
- ActualLocation actual = getActualLocation(new Location(uuid));
- return (actual != null) ? actual.location : null;
+ query.deleteSubgraph(true);
+
+ // Verify referential integrity: that none of the deleted nodes are referenced by nodes not being deleted.
+ List<ReferenceEntity> invalidReferences = query.getInvalidReferences();
+ if (invalidReferences.size() > 0) {
+ // Some of the references that remain will be invalid, since they point to nodes that
+ // have just been deleted. Build up the information necessary to produce a useful exception ...
+ ValueFactory<Reference> refFactory = getExecutionContext().getValueFactories().getReferenceFactory();
+ Map<Location, List<Reference>> invalidRefs = new HashMap<Location, List<Reference>>();
+ for (ReferenceEntity entity : invalidReferences) {
+ UUID fromUuid = UUID.fromString(entity.getId().getFromUuidString());
+ ActualLocation actualFromLocation = getActualLocation(new Location(fromUuid));
+ Location fromLocation = actualFromLocation.location;
+ List<Reference> refs = invalidRefs.get(fromLocation);
+ if (refs == null) {
+ refs = new ArrayList<Reference>();
+ invalidRefs.put(fromLocation, refs);
+ }
+ UUID toUuid = UUID.fromString(entity.getId().getToUuidString());
+ refs.add(refFactory.create(toUuid));
}
- };
- query.deleteSubgraph(true, resolver);
+ String msg = JpaConnectorI18n.unableToDeleteBecauseOfReferences.text();
+ throw new ReferentialIntegrityException(invalidRefs, msg);
+ }
// And adjust the SNS index and indexes ...
ChildEntity.adjustSnsIndexesAndIndexesAfterRemoving(entities, parentUuidString, childName, nsId, indexInParent);
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-17 15:51:44 UTC (rev 689)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-17 15:54:36 UTC (rev 690)
@@ -48,8 +48,9 @@
@NamedQuery( name = "SubgraphNodeEntity.getPropertiesEntitiesWithLargeValues", query = "select props from PropertiesEntity props, SubgraphNodeEntity node where props.id.uuidString = node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and size(props.largeValues) > 0" ),
@NamedQuery( name = "SubgraphNodeEntity.getChildEntities", query = "select child from ChildEntity child, SubgraphNodeEntity node where child.id.childUuidString = node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and node.depth <= :maxDepth order by node.depth, node.parentIndexInParent, node.indexInParent" ),
@NamedQuery( name = "SubgraphNodeEntity.getReferenceThatWillBeInvalid", query = "select ref from ReferenceEntity as ref where ref.id.toUuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and ref.id.fromUuidString not in (select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId)" ),
- @NamedQuery( name = "SubgraphNodeEntity.deletePropertiesEntities", query = "delete PropertiesEntity props where props.id.uuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId and node.depth >= :depth )" ),
- @NamedQuery( name = "SubgraphNodeEntity.deleteChildEntities", query = "delete ChildEntity child where child.id.childUuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId and node.depth >= :depth )" ),
+ @NamedQuery( name = "SubgraphNodeEntity.deletePropertiesEntities", query = "delete PropertiesEntity props where props.id.uuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId )" ),
+ @NamedQuery( name = "SubgraphNodeEntity.deleteChildEntities", query = "delete ChildEntity child where child.id.childUuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId )" ),
+ @NamedQuery( name = "SubgraphNodeEntity.deleteReferences", query = "delete ReferenceEntity as ref where ref.id.fromUuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId )" ),
@NamedQuery( name = "SubgraphNodeEntity.deleteByQueryId", query = "delete SubgraphNodeEntity where queryId = :queryId" )} )
public class SubgraphNodeEntity {
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-17 15:51:44 UTC (rev 689)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-17 15:54:36 UTC (rev 690)
@@ -21,7 +21,6 @@
*/
package org.jboss.dna.connector.store.jpa.models.basic;
-import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@@ -30,16 +29,12 @@
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
-import org.jboss.dna.connector.store.jpa.JpaConnectorI18n;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.Location;
import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.NameFactory;
import org.jboss.dna.graph.properties.Path;
import org.jboss.dna.graph.properties.PathFactory;
-import org.jboss.dna.graph.properties.Reference;
-import org.jboss.dna.graph.properties.ReferentialIntegrityException;
-import org.jboss.dna.graph.properties.ValueFactory;
/**
* Represents a temporary working area for a query that efficiently retrieves the nodes in a subgraph. This class uses the
@@ -55,10 +50,6 @@
*/
public class SubgraphQuery {
- public interface Resolver {
- Location getLocationFor( UUID uuid );
- }
-
/**
* Create a query that returns a subgraph at and below the node with the supplied path and the supplied UUID.
*
@@ -293,40 +284,28 @@
}
/**
- * Delete the nodes in the subgraph. This method first checks for
+ * Determine whether there are any invalid references (typically called after {@link #deleteSubgraph(boolean)}).
*
- * @param includeRoot true if the root node should also be deleted
- * @param resolver the resolver that should be used to resolve UUIDs to the corresponding paths; may not be null
- * @throws ReferentialIntegrityException if the repository's references after the delete would be invalid because they would
- * reference nodes that are to be deleted
+ * @return the list of references that are no longer valid
*/
@SuppressWarnings( "unchecked" )
- public void deleteSubgraph( boolean includeRoot,
- Resolver resolver ) throws ReferentialIntegrityException {
- if (query == null) throw new IllegalStateException();
-
+ public List<ReferenceEntity> getInvalidReferences() {
// Verify referential integrity: that none of the deleted nodes are referenced by nodes not being deleted.
Query references = manager.createNamedQuery("SubgraphNodeEntity.getReferenceThatWillBeInvalid");
references.setParameter("queryId", query.getId());
- List<ReferenceEntity> invalidReferences = references.getResultList();
- if (invalidReferences.size() > 0) {
- ValueFactory<Reference> refFactory = context.getValueFactories().getReferenceFactory();
- Map<Location, List<Reference>> invalidRefs = new HashMap<Location, List<Reference>>();
- for (ReferenceEntity entity : invalidReferences) {
- UUID fromUuid = UUID.fromString(entity.getId().getFromUuidString());
- Location location = resolver.getLocationFor(fromUuid);
- List<Reference> refs = invalidRefs.get(location);
- if (refs == null) {
- refs = new ArrayList<Reference>();
- invalidRefs.put(location, refs);
- }
- UUID toUuid = UUID.fromString(entity.getId().getToUuidString());
- refs.add(refFactory.create(toUuid));
- }
- String msg = JpaConnectorI18n.unableToDeleteBecauseOfReferences.text();
- throw new ReferentialIntegrityException(invalidRefs, msg);
- }
+ return references.getResultList();
+ }
+ /**
+ * Delete the nodes in the subgraph. This method first does not check for referential integrity (see
+ * {@link #getInvalidReferences()}).
+ *
+ * @param includeRoot true if the root node should also be deleted
+ */
+ @SuppressWarnings( "unchecked" )
+ public void deleteSubgraph( boolean includeRoot ) {
+ if (query == null) throw new IllegalStateException();
+
// Delete the PropertiesEntities ...
//
// Right now, Hibernate is not able to support deleting PropertiesEntity in bulk because of the
@@ -349,15 +328,18 @@
// Delete the PropertiesEntities, none of which will have large values ...
Query delete = manager.createNamedQuery("SubgraphNodeEntity.deletePropertiesEntities");
delete.setParameter("queryId", query.getId());
- delete.setParameter("depth", includeRoot ? 0 : 1);
delete.executeUpdate();
// Delete the ChildEntities ...
delete = manager.createNamedQuery("SubgraphNodeEntity.deleteChildEntities");
delete.setParameter("queryId", query.getId());
- delete.setParameter("depth", includeRoot ? 0 : 1);
delete.executeUpdate();
+ // Delete references ...
+ delete = manager.createNamedQuery("SubgraphNodeEntity.deleteReferences");
+ delete.setParameter("queryId", query.getId());
+ delete.executeUpdate();
+
// Delete unused large values ...
LargeValueEntity.deleteUnused(manager);
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-17 15:51:44 UTC (rev 689)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-17 15:54:36 UTC (rev 690)
@@ -692,6 +692,188 @@
}
@Test
+ public void shouldCopyNodeWithChildren() {
+ // Create the tree (at total of 40 nodes, plus the extra 6 added later)...
+ // /
+ // /node1
+ // /node1/node1
+ // /node1/node1/node1
+ // /node1/node1/node2
+ // /node1/node1/node3
+ // /node1/node2
+ // /node1/node2/node1
+ // /node1/node2/node2
+ // /node1/node2/node3
+ // /node1/node3
+ // /node1/node3/node1
+ // /node1/node3/node2
+ // /node1/node3/node3
+ // /node2
+ // /node2/node1
+ // /node2/node1/node1
+ // /node2/node1/node2
+ // /node2/node1/node3
+ // /node2/node2
+ // /node2/node2/node1
+ // /node2/node2/node2
+ // /node2/node2/node3
+ // /node2/node3
+ // /node2/node3/node1
+ // /node2/node3/node2
+ // /node2/node3/node3
+ // /node3
+ // /node3/node1
+ // /node3/node1/node1
+ // /node3/node1/node2
+ // /node3/node1/node3
+ // /node3/node2
+ // /node3/node2/node1
+ // /node3/node2/node2
+ // /node3/node2/node3
+ // /node3/node3
+ // /node3/node3/node1
+ // /node3/node3/node2
+ // /node3/node3/node3
+ // /secondBranch1
+ // /secondBranch1/secondBranch1
+ // /secondBranch1/secondBranch2
+ // /secondBranch2
+ // /secondBranch2/secondBranch1
+ // /secondBranch2/secondBranch2
+
+ numPropsOnEach = 3;
+ createTree("", 3, 3, numPropsOnEach, null, true, false);
+
+ // Copy a branches ...
+ graph.copy("/node2").into("/node3");
+
+ assertThat(graph.getChildren().of("/node1"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node1/node1"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node1/node2"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node1/node3"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node1/node3/node1"), hasChildren());
+
+ // The original of the copy should still exist ...
+ assertThat(graph.getChildren().of("/node2"), hasChildren(child("node1"), child("node3")));
+ assertThat(graph.getChildren().of("/node2/node1"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node2/node3"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node2/node3/node1"), hasChildren());
+
+ assertThat(graph.getChildren().of("/node3"), hasChildren(child("node2[1]"), child("node3"), child("node2[2]")));
+ assertThat(graph.getChildren().of("/node3/node2[1]"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node3/node3"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node3/node3/node1"), hasChildren());
+ assertThat(graph.getChildren().of("/node3/node2[2]"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node3/node2[2]/node1"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node3/node2[2]/node2"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node3/node2[2]/node3"), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(graph.getChildren().of("/node3/node2[2]/node1/node1"), hasChildren());
+
+ Subgraph subgraph = graph.getSubgraphOfDepth(4).at("/node3");
+ assertThat(subgraph, is(notNullValue()));
+ assertThat(subgraph.getNode(".").getChildren(), hasChildren(child("node2"), child("node3")));
+ assertThat(subgraph.getNode("."), hasProperty("property1", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("."), hasProperty("property2", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("."), hasProperty("property3", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[1]").getChildren(), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(subgraph.getNode("node2[1]"), hasProperty("property1", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[1]"), hasProperty("property2", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[1]"), hasProperty("property3", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node3").getChildren(), isEmpty());
+ assertThat(subgraph.getNode("node3"), hasProperty("property1", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node3"), hasProperty("property2", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node3"), hasProperty("property3", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]").getChildren(), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(subgraph.getNode("node2[2]"), hasProperty("property1", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]"), hasProperty("property2", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]"), hasProperty("property3", "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1").getChildren(), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(subgraph.getNode("node2[2]/node1"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node1").getChildren(), isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node1/node1"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node1"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node1"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node2").getChildren(), isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node1/node2"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node2"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node2"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node3").getChildren(), isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node1/node3"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node3"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node1/node3"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2").getChildren(), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(subgraph.getNode("node2[2]/node2"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node1").getChildren(), isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node2/node1"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node1"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node1"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node2").getChildren(), isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node2/node2"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node2"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node2"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node3").getChildren(), isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node2/node3"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node3"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node2/node3"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3").getChildren(), hasChildren(child("node1"), child("node2"), child("node3")));
+ assertThat(subgraph.getNode("node2[2]/node3"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node1").getChildren(), isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node3/node1"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node1"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node1"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node2").getChildren(), isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node3/node2"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node2"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node2"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node3").getChildren(), isEmpty());
+ assertThat(subgraph.getNode("node2[2]/node3/node3"), hasProperty("property1",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node3"), hasProperty("property2",
+ "The quick brown fox jumped over the moon. What? "));
+ assertThat(subgraph.getNode("node2[2]/node3/node3"), hasProperty("property3",
+ "The quick brown fox jumped over the moon. What? "));
+ }
+
+ @Test
public void shouldReadRangeOfChildren() {
// Create a shallow tree with many children under one node ...
// /
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-17 15:51:44 UTC (rev 689)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-17 15:54:36 UTC (rev 690)
@@ -27,6 +27,7 @@
import static org.junit.matchers.IsCollectionContaining.hasItems;
import java.io.UnsupportedEncodingException;
import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -67,7 +68,6 @@
private List<Location> locations;
private String[] validLargeValues;
private SubgraphQuery query;
- private SubgraphQuery.Resolver resolver;
@BeforeClass
public static void beforeAll() throws Exception {
@@ -98,11 +98,6 @@
factory = configurator.buildEntityManagerFactory();
manager = factory.createEntityManager();
namespaces = new Namespaces(manager);
- resolver = new SubgraphQuery.Resolver() {
- public Location getLocationFor( UUID uuid ) {
- return new Location(uuid);
- }
- };
manager.getTransaction().begin();
@@ -189,6 +184,23 @@
uuidByPath.put(path, childUuid);
}
+ protected ReferenceEntity createReferenceBetween( String fromPathStr,
+ String toPathStr ) {
+ Path fromPath = path(fromPathStr);
+ Path toPath = path(toPathStr);
+
+ // Look up the UUIDs ...
+ UUID fromUuid = uuidByPath.get(fromPath);
+ UUID toUuid = uuidByPath.get(toPath);
+ assert fromUuid != null;
+ assert toUuid != null;
+
+ // Now create a reference entity ...
+ ReferenceEntity entity = new ReferenceEntity(new ReferenceId(fromUuid.toString(), toUuid.toString()));
+ manager.persist(entity);
+ return entity;
+ }
+
protected UUID uuidForPath( String pathStr ) {
Path path = path(pathStr);
return uuidByPath.get(path);
@@ -446,7 +458,8 @@
verifyNextLocationIs("/a/a1/a2");
verifyNextLocationIs("/a/a1/a3");
verifyNoMoreLocations();
- query.deleteSubgraph(true, resolver);
+ query.deleteSubgraph(true);
+ assertThat(query.getInvalidReferences().isEmpty(), is(true));
query.close();
// Commit the transaction, and start another ...
@@ -494,8 +507,92 @@
// Now, load the one node remaining with
}
- // @Test
- // public void shouldCreateMultipleSubgraphQueriesInDatabase() {
- // }
+ @Test
+ public void shouldNotDeleteSubgraphThatHasNodesReferencedByOtherNodesNotBeingDeleted() throws Exception {
+ // Verify that all the nodes with large values do indeed have them ...
+ verifyNodesHaveLargeValues("/a/a1", "/a/a2", "/a/a2/a1");
+ // Count the number of objects ...
+ assertThat((Long)manager.createQuery("select count(*) from LargeValueEntity").getSingleResult(), is(3L));
+ assertThat((Long)manager.createQuery("select count(*) from PropertiesEntity").getSingleResult(), is(14L));
+ assertThat((Long)manager.createQuery("select count(*) from ChildEntity").getSingleResult(), is(14L));
+
+ // Create references from the "/a/a2" (not being deleted) branch, to the branch being deleted...
+ List<ReferenceEntity> expectedInvalidRefs = new ArrayList<ReferenceEntity>();
+ expectedInvalidRefs.add(createReferenceBetween("/a/a2", "/a/a1"));
+ expectedInvalidRefs.add(createReferenceBetween("/a/a2/a1", "/a/a1/a1"));
+ expectedInvalidRefs.add(createReferenceBetween("/a/a2/a2", "/a/a1/a2"));
+
+ // Create references between nodes in the branch being deleted (these shouldn't matter) ...
+ createReferenceBetween("/a/a1", "/a/a1/a1");
+ createReferenceBetween("/a/a1/a2", "/a/a1/a3");
+
+ // Delete "/a/a1". Note that "/a/a1" has a large value that is shared by "/a/a2", but it's also the only
+ // user of large value #1.
+ Path path = path("/a/a1");
+ UUID uuid = uuidByPath.get(path);
+
+ query = SubgraphQuery.create(context, manager, uuid, path, Integer.MAX_VALUE);
+ locations = query.getNodeLocations(true, true);
+ verifyNextLocationIs("/a/a1");
+ verifyNextLocationIs("/a/a1/a1");
+ verifyNextLocationIs("/a/a1/a2");
+ verifyNextLocationIs("/a/a1/a3");
+ verifyNoMoreLocations();
+ query.deleteSubgraph(true);
+
+ // Now there should be invalid references ...
+ List<ReferenceEntity> invalidReferences = query.getInvalidReferences();
+ assertThat(invalidReferences.size(), is(3));
+ invalidReferences.removeAll(invalidReferences);
+ assertThat(invalidReferences.size(), is(0));
+ query.close();
+ }
+
+ @SuppressWarnings( "unchecked" )
+ @Test
+ public void shouldDeleteSubgraphThatHasInternalReferences() throws Exception {
+ // Verify that all the nodes with large values do indeed have them ...
+ verifyNodesHaveLargeValues("/a/a1", "/a/a2", "/a/a2/a1");
+
+ // Count the number of objects ...
+ assertThat((Long)manager.createQuery("select count(*) from LargeValueEntity").getSingleResult(), is(3L));
+ assertThat((Long)manager.createQuery("select count(*) from PropertiesEntity").getSingleResult(), is(14L));
+ assertThat((Long)manager.createQuery("select count(*) from ChildEntity").getSingleResult(), is(14L));
+
+ // Create references from the nodes that aren't being deleted (these won't matter, but will remain)...
+ List<ReferenceEntity> expectedValidRefs = new ArrayList<ReferenceEntity>();
+ expectedValidRefs.add(createReferenceBetween("/a/a2", "/a/a2/a1"));
+
+ // Create references between nodes in the branch being deleted (these shouldn't matter) ...
+ createReferenceBetween("/a/a1", "/a/a1/a1");
+ createReferenceBetween("/a/a1/a2", "/a/a1/a3");
+
+ // Delete "/a/a1". Note that "/a/a1" has a large value that is shared by "/a/a2", but it's also the only
+ // user of large value #1.
+ Path path = path("/a/a1");
+ UUID uuid = uuidByPath.get(path);
+
+ query = SubgraphQuery.create(context, manager, uuid, path, Integer.MAX_VALUE);
+ locations = query.getNodeLocations(true, true);
+ verifyNextLocationIs("/a/a1");
+ verifyNextLocationIs("/a/a1/a1");
+ verifyNextLocationIs("/a/a1/a2");
+ verifyNextLocationIs("/a/a1/a3");
+ verifyNoMoreLocations();
+ query.deleteSubgraph(true);
+
+ // Now there should be invalid references ...
+ List<ReferenceEntity> invalidReferences = query.getInvalidReferences();
+ assertThat(invalidReferences.size(), is(0));
+ query.close();
+
+ // There should be no references any more ...
+ Query refQuery = manager.createQuery("select ref from ReferenceEntity as ref");
+ List<ReferenceEntity> remainingReferences = refQuery.getResultList();
+ assertThat(remainingReferences.size(), is(1));
+ remainingReferences.removeAll(expectedValidRefs);
+ assertThat(remainingReferences.size(), is(0));
+ }
+
}
15 years, 3 months
DNA SVN: r689 - trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/basic.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2008-12-17 10:51:44 -0500 (Wed, 17 Dec 2008)
New Revision: 689
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/basic/PathValueFactory.java
Log:
Added null argument check.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/basic/PathValueFactory.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/basic/PathValueFactory.java 2008-12-16 20:11:51 UTC (rev 688)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/basic/PathValueFactory.java 2008-12-17 15:51:44 UTC (rev 689)
@@ -507,6 +507,7 @@
*/
public Path create( Path parentPath,
String subpath ) {
+ CheckArg.isNotNull(parentPath, "parentPath");
CheckArg.isNotNull(subpath, "subpath");
subpath = subpath.trim();
boolean singleChild = subpath.indexOf(Path.DELIMITER) == -1;
15 years, 3 months
DNA SVN: r688 - in trunk/extensions/dna-connector-svn/src: test/java/org/jboss/dna/connector/svn and 1 other directory.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2008-12-16 15:11:51 -0500 (Tue, 16 Dec 2008)
New Revision: 688
Modified:
trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryRequestProcessor.java
trunk/extensions/dna-connector-svn/src/test/java/org/jboss/dna/connector/svn/SVNRepositoryConnectionTest.java
Log:
Fixed compiler warnings by removing unused imports and unused 'suppress-warnings' annotation
Modified: trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryRequestProcessor.java
===================================================================
--- trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryRequestProcessor.java 2008-12-16 17:55:11 UTC (rev 687)
+++ trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryRequestProcessor.java 2008-12-16 20:11:51 UTC (rev 688)
@@ -23,10 +23,8 @@
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
-import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
-import java.util.List;
import org.jboss.dna.common.i18n.I18n;
import org.jboss.dna.common.util.Logger;
import org.jboss.dna.graph.ExecutionContext;
@@ -44,8 +42,6 @@
import org.jboss.dna.graph.properties.Property;
import org.jboss.dna.graph.properties.PropertyFactory;
import org.jboss.dna.graph.properties.ValueFactory;
-import org.jboss.dna.graph.properties.basic.BasicMultiValueProperty;
-import org.jboss.dna.graph.properties.basic.InMemoryBinary;
import org.jboss.dna.graph.requests.CopyBranchRequest;
import org.jboss.dna.graph.requests.CreateNodeRequest;
import org.jboss.dna.graph.requests.DeleteBranchRequest;
@@ -239,8 +235,7 @@
request.addProperty(jcrLastModifiedProperty);
}
if (os.toByteArray().length > 0) {
- Property jcrDataProperty = propertyFactory().create(JcrLexicon.DATA,
- binaryFactory().create(os.toByteArray()));
+ Property jcrDataProperty = propertyFactory().create(JcrLexicon.DATA, binaryFactory().create(os.toByteArray()));
request.addProperty(jcrDataProperty);
}
} else {
Modified: trunk/extensions/dna-connector-svn/src/test/java/org/jboss/dna/connector/svn/SVNRepositoryConnectionTest.java
===================================================================
--- trunk/extensions/dna-connector-svn/src/test/java/org/jboss/dna/connector/svn/SVNRepositoryConnectionTest.java 2008-12-16 17:55:11 UTC (rev 687)
+++ trunk/extensions/dna-connector-svn/src/test/java/org/jboss/dna/connector/svn/SVNRepositoryConnectionTest.java 2008-12-16 20:11:51 UTC (rev 688)
@@ -26,13 +26,10 @@
import static org.hamcrest.core.IsSame.sameInstance;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.stub;
-import static org.mockito.Mockito.verify;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import java.util.concurrent.TimeUnit;
import org.jboss.dna.common.text.UrlEncoder;
import org.jboss.dna.common.util.FileUtil;
import org.jboss.dna.graph.BasicExecutionContext;
@@ -78,7 +75,6 @@
@Mock
private ReadAllChildrenRequest request;
- @SuppressWarnings( "deprecation" )
@Before
public void beforeEach() throws Exception {
MockitoAnnotations.initMocks(this);
15 years, 3 months
DNA SVN: r687 - in trunk: dna-graph/src/main/java/org/jboss/dna/graph/requests/processor and 5 other directories.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2008-12-16 12:55:11 -0500 (Tue, 16 Dec 2008)
New Revision: 687
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/connectors/RepositoryConnection.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/RequestProcessorCacheTest.java
Log:
DNA-40
Added referential integrity checks to the delete process.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/connectors/RepositoryConnection.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/connectors/RepositoryConnection.java 2008-12-16 16:19:33 UTC (rev 686)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/connectors/RepositoryConnection.java 2008-12-16 17:55:11 UTC (rev 687)
@@ -26,6 +26,8 @@
import net.jcip.annotations.NotThreadSafe;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.cache.CachePolicy;
+import org.jboss.dna.graph.properties.PathNotFoundException;
+import org.jboss.dna.graph.properties.ReferentialIntegrityException;
import org.jboss.dna.graph.requests.Request;
/**
@@ -85,6 +87,9 @@
*
* @param context the environment in which the commands are being executed; never null
* @param request the request to be executed; never null
+ * @throws PathNotFoundException if the request(s) contain paths to nodes that do not exist
+ * @throws ReferentialIntegrityException if the request is or contains a delete operation, where the delete could not be
+ * performed because some references to deleted nodes would have remained after the delete operation completed
* @throws RepositorySourceException if there is a problem loading the node data
*/
void execute( ExecutionContext context,
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java 2008-12-16 16:19:33 UTC (rev 686)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java 2008-12-16 17:55:11 UTC (rev 687)
@@ -36,6 +36,7 @@
import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.Path;
import org.jboss.dna.graph.properties.Property;
+import org.jboss.dna.graph.properties.ReferentialIntegrityException;
import org.jboss.dna.graph.properties.basic.BasicEmptyProperty;
import org.jboss.dna.graph.requests.CompositeRequest;
import org.jboss.dna.graph.requests.CopyBranchRequest;
@@ -213,6 +214,8 @@
* </p>
*
* @param request the delete request
+ * @throws ReferentialIntegrityException if the delete could not be performed because some references to deleted nodes would
+ * have remained after the delete operation completed
*/
public abstract void process( DeleteBranchRequest request );
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java 2008-12-16 16:19:33 UTC (rev 686)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java 2008-12-16 17:55:11 UTC (rev 687)
@@ -42,6 +42,7 @@
public static I18n unableToMoveRootNode;
public static I18n locationShouldHavePathAndOrProperty;
public static I18n invalidReferences;
+ public static I18n unableToDeleteBecauseOfReferences;
public static I18n basicModelDescription;
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-16 16:19:33 UTC (rev 686)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-16 17:55:11 UTC (rev 687)
@@ -792,67 +792,72 @@
int maxDepth = request.maximumDepth();
SubgraphQuery query = SubgraphQuery.create(getExecutionContext(), entities, actualLocation.getUuid(), path, maxDepth);
- // Record all of the children ...
- Path parent = path;
- String parentUuid = actual.uuid;
- Location parentLocation = actualLocation;
- List<Location> children = new LinkedList<Location>();
- boolean includeChildrenOfNodesAtMaxDepth = true;
- for (ChildEntity child : query.getNodes(false, includeChildrenOfNodesAtMaxDepth)) {
- String namespaceUri = child.getChildNamespace().getUri();
- String localName = child.getChildName();
- Name childName = nameFactory.create(namespaceUri, localName);
- int sns = child.getSameNameSiblingIndex();
- // Figure out who the parent is ...
- String childParentUuid = child.getId().getParentUuidString();
- if (!parentUuid.equals(childParentUuid)) {
- // The parent isn't the last parent, so record the children found so far ...
+ try {
+ // Record all of the children ...
+ Path parent = path;
+ String parentUuid = actual.uuid;
+ Location parentLocation = actualLocation;
+ List<Location> children = new LinkedList<Location>();
+ boolean includeChildrenOfNodesAtMaxDepth = true;
+ for (ChildEntity child : query.getNodes(false, includeChildrenOfNodesAtMaxDepth)) {
+ String namespaceUri = child.getChildNamespace().getUri();
+ String localName = child.getChildName();
+ Name childName = nameFactory.create(namespaceUri, localName);
+ int sns = child.getSameNameSiblingIndex();
+ // Figure out who the parent is ...
+ String childParentUuid = child.getId().getParentUuidString();
+ if (!parentUuid.equals(childParentUuid)) {
+ // The parent isn't the last parent, so record the children found so far ...
+ request.setChildren(parentLocation, children);
+ // And find the correct parent ...
+ parentLocation = locationsByUuid.get(childParentUuid);
+ parent = parentLocation.getPath();
+ parentUuid = childParentUuid;
+ children = new LinkedList<Location>();
+ }
+ Path childPath = pathFactory.create(parent, childName, sns);
+ String childUuidString = child.getId().getChildUuidString();
+ Location childLocation = new Location(childPath, UUID.fromString(childUuidString));
+ locationsByUuid.put(childUuidString, childLocation);
+ children.add(childLocation);
+ }
+ if (!children.isEmpty()) {
request.setChildren(parentLocation, children);
- // And find the correct parent ...
- parentLocation = locationsByUuid.get(childParentUuid);
- parent = parentLocation.getPath();
- parentUuid = childParentUuid;
- children = new LinkedList<Location>();
}
- Path childPath = pathFactory.create(parent, childName, sns);
- String childUuidString = child.getId().getChildUuidString();
- Location childLocation = new Location(childPath, UUID.fromString(childUuidString));
- locationsByUuid.put(childUuidString, childLocation);
- children.add(childLocation);
- }
- if (!children.isEmpty()) {
- request.setChildren(parentLocation, children);
- }
- // Note that we've found children for nodes that are at the maximum depth. This is so that the nodes
- // in the subgraph all have the correct children. However, we don't want to store the properties for
- // any node whose depth is greater than the maximum depth. Therefore, only get the properties that
- // include nodes within the maximum depth...
- includeChildrenOfNodesAtMaxDepth = false;
+ // Note that we've found children for nodes that are at the maximum depth. This is so that the nodes
+ // in the subgraph all have the correct children. However, we don't want to store the properties for
+ // any node whose depth is greater than the maximum depth. Therefore, only get the properties that
+ // include nodes within the maximum depth...
+ includeChildrenOfNodesAtMaxDepth = false;
- // Now record all of the properties ...
- for (PropertiesEntity props : query.getProperties(true, includeChildrenOfNodesAtMaxDepth)) {
- boolean compressed = props.isCompressed();
- int propertyCount = props.getPropertyCount();
- Collection<Property> properties = new ArrayList<Property>(propertyCount);
- Location nodeLocation = locationsByUuid.get(props.getId().getUuidString());
- assert nodeLocation != null;
- // Record the UUID as a property, since it's not stored in the serialized properties...
- properties.add(actualLocation.getIdProperty(DnaLexicon.UUID));
- // Deserialize all the properties (except the UUID)...
- byte[] data = props.getData();
- if (data != null) {
- LargeValueSerializer largeValues = new LargeValueSerializer(props);
- ByteArrayInputStream bais = new ByteArrayInputStream(data);
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
- try {
- serializer.deserializeAllProperties(ois, properties, largeValues);
- request.setProperties(nodeLocation, properties);
- } finally {
- ois.close();
+ // Now record all of the properties ...
+ for (PropertiesEntity props : query.getProperties(true, includeChildrenOfNodesAtMaxDepth)) {
+ boolean compressed = props.isCompressed();
+ int propertyCount = props.getPropertyCount();
+ Collection<Property> properties = new ArrayList<Property>(propertyCount);
+ Location nodeLocation = locationsByUuid.get(props.getId().getUuidString());
+ assert nodeLocation != null;
+ // Record the UUID as a property, since it's not stored in the serialized properties...
+ properties.add(actualLocation.getIdProperty(DnaLexicon.UUID));
+ // Deserialize all the properties (except the UUID)...
+ byte[] data = props.getData();
+ if (data != null) {
+ LargeValueSerializer largeValues = new LargeValueSerializer(props);
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.deserializeAllProperties(ois, properties, largeValues);
+ request.setProperties(nodeLocation, properties);
+ } finally {
+ ois.close();
+ }
}
}
+ } finally {
+ // Close and release the temporary data used for this operation ...
+ query.close();
}
} catch (Throwable e) { // Includes PathNotFoundException
@@ -889,24 +894,35 @@
// Compute the subgraph, including the root ...
SubgraphQuery query = SubgraphQuery.create(getExecutionContext(), entities, actualLocation.getUuid(), path, 0);
- ChildEntity deleted = query.getNode();
- String parentUuidString = deleted.getId().getParentUuidString();
- String childName = deleted.getChildName();
- long nsId = deleted.getChildNamespace().getId();
- int indexInParent = deleted.getIndexInParent();
+ try {
+ ChildEntity deleted = query.getNode();
+ String parentUuidString = deleted.getId().getParentUuidString();
+ String childName = deleted.getChildName();
+ long nsId = deleted.getChildNamespace().getId();
+ int indexInParent = deleted.getIndexInParent();
- // Get the locations of all deleted nodes, which will be required by events ...
- List<Location> deletedLocations = query.getNodeLocations(true, true);
+ // Get the locations of all deleted nodes, which will be required by events ...
+ List<Location> deletedLocations = query.getNodeLocations(true, true);
- // Now delete the subgraph ...
- query.deleteSubgraph(true);
+ // Now delete the subgraph ...
+ SubgraphQuery.Resolver resolver = new SubgraphQuery.Resolver() {
+ public Location getLocationFor( UUID uuid ) {
+ ActualLocation actual = getActualLocation(new Location(uuid));
+ return (actual != null) ? actual.location : null;
+ }
+ };
+ query.deleteSubgraph(true, resolver);
- // And adjust the SNS index and indexes ...
- ChildEntity.adjustSnsIndexesAndIndexesAfterRemoving(entities, parentUuidString, childName, nsId, indexInParent);
- entities.flush();
+ // And adjust the SNS index and indexes ...
+ ChildEntity.adjustSnsIndexesAndIndexesAfterRemoving(entities, parentUuidString, childName, nsId, indexInParent);
+ entities.flush();
- // Remove from the cache of children locations all entries for deleted nodes ...
- cache.removeBranch(deletedLocations);
+ // Remove from the cache of children locations all entries for deleted nodes ...
+ cache.removeBranch(deletedLocations);
+ } finally {
+ // Close and release the temporary data used for this operation ...
+ query.close();
+ }
} catch (Throwable e) { // Includes PathNotFoundException
request.setError(e);
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-16 16:19:33 UTC (rev 686)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-16 17:55:11 UTC (rev 687)
@@ -39,13 +39,15 @@
*/
@Entity
@Table( name = "DNA_SUBGRAPH_NODES" )
-(a)org.hibernate.annotations.Table( appliesTo = "DNA_SUBGRAPH_NODES", indexes = @Index( name = "QUERYID_INX", columnNames = {"QUERY_ID"} ) )
+(a)org.hibernate.annotations.Table( appliesTo = "DNA_SUBGRAPH_NODES", indexes = @Index( name = "QUERYID_INX", columnNames = {
+ "QUERY_ID", "UUID"} ) )
@NamedQueries( {
@NamedQuery( name = "SubgraphNodeEntity.insertChildren", query = "insert into SubgraphNodeEntity(queryId,nodeUuid,depth,parentIndexInParent,indexInParent) select parentNode.queryId, child.id.childUuidString, parentNode.depth+1, parentNode.indexInParent, child.indexInParent from ChildEntity child, SubgraphNodeEntity parentNode where child.id.parentUuidString = parentNode.nodeUuid and parentNode.queryId = :queryId and parentNode.depth = :parentDepth" ),
@NamedQuery( name = "SubgraphNodeEntity.getCount", query = "select count(*) from SubgraphNodeEntity where queryId = :queryId" ),
@NamedQuery( name = "SubgraphNodeEntity.getPropertiesEntities", query = "select props from PropertiesEntity props, SubgraphNodeEntity node where props.id.uuidString = node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and node.depth <= :maxDepth order by node.depth, node.parentIndexInParent, node.indexInParent" ),
@NamedQuery( name = "SubgraphNodeEntity.getPropertiesEntitiesWithLargeValues", query = "select props from PropertiesEntity props, SubgraphNodeEntity node where props.id.uuidString = node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and size(props.largeValues) > 0" ),
@NamedQuery( name = "SubgraphNodeEntity.getChildEntities", query = "select child from ChildEntity child, SubgraphNodeEntity node where child.id.childUuidString = node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and node.depth <= :maxDepth order by node.depth, node.parentIndexInParent, node.indexInParent" ),
+ @NamedQuery( name = "SubgraphNodeEntity.getReferenceThatWillBeInvalid", query = "select ref from ReferenceEntity as ref where ref.id.toUuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId) and ref.id.fromUuidString not in (select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId)" ),
@NamedQuery( name = "SubgraphNodeEntity.deletePropertiesEntities", query = "delete PropertiesEntity props where props.id.uuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId and node.depth >= :depth )" ),
@NamedQuery( name = "SubgraphNodeEntity.deleteChildEntities", query = "delete ChildEntity child where child.id.childUuidString in ( select node.nodeUuid from SubgraphNodeEntity node where node.queryId = :queryId and node.depth >= :depth )" ),
@NamedQuery( name = "SubgraphNodeEntity.deleteByQueryId", query = "delete SubgraphNodeEntity where queryId = :queryId" )} )
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-16 16:19:33 UTC (rev 686)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-16 17:55:11 UTC (rev 687)
@@ -21,6 +21,7 @@
*/
package org.jboss.dna.connector.store.jpa.models.basic;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@@ -29,12 +30,16 @@
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
+import org.jboss.dna.connector.store.jpa.JpaConnectorI18n;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.Location;
import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.NameFactory;
import org.jboss.dna.graph.properties.Path;
import org.jboss.dna.graph.properties.PathFactory;
+import org.jboss.dna.graph.properties.Reference;
+import org.jboss.dna.graph.properties.ReferentialIntegrityException;
+import org.jboss.dna.graph.properties.ValueFactory;
/**
* Represents a temporary working area for a query that efficiently retrieves the nodes in a subgraph. This class uses the
@@ -50,6 +55,10 @@
*/
public class SubgraphQuery {
+ public interface Resolver {
+ Location getLocationFor( UUID uuid );
+ }
+
/**
* Create a query that returns a subgraph at and below the node with the supplied path and the supplied UUID.
*
@@ -283,10 +292,41 @@
return locations;
}
+ /**
+ * Delete the nodes in the subgraph. This method first checks for
+ *
+ * @param includeRoot true if the root node should also be deleted
+ * @param resolver the resolver that should be used to resolve UUIDs to the corresponding paths; may not be null
+ * @throws ReferentialIntegrityException if the repository's references after the delete would be invalid because they would
+ * reference nodes that are to be deleted
+ */
@SuppressWarnings( "unchecked" )
- public void deleteSubgraph( boolean includeRoot ) {
+ public void deleteSubgraph( boolean includeRoot,
+ Resolver resolver ) throws ReferentialIntegrityException {
if (query == null) throw new IllegalStateException();
+ // Verify referential integrity: that none of the deleted nodes are referenced by nodes not being deleted.
+ Query references = manager.createNamedQuery("SubgraphNodeEntity.getReferenceThatWillBeInvalid");
+ references.setParameter("queryId", query.getId());
+ List<ReferenceEntity> invalidReferences = references.getResultList();
+ if (invalidReferences.size() > 0) {
+ ValueFactory<Reference> refFactory = context.getValueFactories().getReferenceFactory();
+ Map<Location, List<Reference>> invalidRefs = new HashMap<Location, List<Reference>>();
+ for (ReferenceEntity entity : invalidReferences) {
+ UUID fromUuid = UUID.fromString(entity.getId().getFromUuidString());
+ Location location = resolver.getLocationFor(fromUuid);
+ List<Reference> refs = invalidRefs.get(location);
+ if (refs == null) {
+ refs = new ArrayList<Reference>();
+ invalidRefs.put(location, refs);
+ }
+ UUID toUuid = UUID.fromString(entity.getId().getToUuidString());
+ refs.add(refFactory.create(toUuid));
+ }
+ String msg = JpaConnectorI18n.unableToDeleteBecauseOfReferences.text();
+ throw new ReferentialIntegrityException(invalidRefs, msg);
+ }
+
// Delete the PropertiesEntities ...
//
// Right now, Hibernate is not able to support deleting PropertiesEntity in bulk because of the
Modified: trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties 2008-12-16 16:19:33 UTC (rev 686)
+++ trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties 2008-12-16 17:55:11 UTC (rev 687)
@@ -32,5 +32,6 @@
unableToMoveRootNode = Unable to move the root node to another location in {0}
locationShouldHavePathAndOrProperty = The source {0} is unable to find a node without a path or a {1} property
invalidReferences = One or more references were invalid in {0}
+unableToDeleteBecauseOfReferences = At least one deleted node is referenced by a node that is not being deleted
basicModelDescription = Database model that stores node properties as opaque records and children as transparent records. Large property values are stored separately.
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-16 16:19:33 UTC (rev 686)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-16 17:55:11 UTC (rev 687)
@@ -67,6 +67,7 @@
private List<Location> locations;
private String[] validLargeValues;
private SubgraphQuery query;
+ private SubgraphQuery.Resolver resolver;
@BeforeClass
public static void beforeAll() throws Exception {
@@ -97,6 +98,11 @@
factory = configurator.buildEntityManagerFactory();
manager = factory.createEntityManager();
namespaces = new Namespaces(manager);
+ resolver = new SubgraphQuery.Resolver() {
+ public Location getLocationFor( UUID uuid ) {
+ return new Location(uuid);
+ }
+ };
manager.getTransaction().begin();
@@ -440,7 +446,7 @@
verifyNextLocationIs("/a/a1/a2");
verifyNextLocationIs("/a/a1/a3");
verifyNoMoreLocations();
- query.deleteSubgraph(true);
+ query.deleteSubgraph(true, resolver);
query.close();
// Commit the transaction, and start another ...
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/RequestProcessorCacheTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/RequestProcessorCacheTest.java 2008-12-16 16:19:33 UTC (rev 686)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/RequestProcessorCacheTest.java 2008-12-16 17:55:11 UTC (rev 687)
@@ -222,15 +222,15 @@
assertThat(cache.getLocationFor(children2[6].getPath()), is(children2[6]));
assertThat(cache.getLocationFor(children2[7].getPath()), is(children2[7]));
- System.out.println("Before:");
- System.out.println(cache.getString(namespaces));
+ // System.out.println("Before:");
+ // System.out.println(cache.getString(namespaces));
// Move the branch (without a known index) ...
assertThat(cache.moveNode(oldLocation, -1, newLocation), is(true));
- System.out.println("After moving " + oldLocation.getPath().getString(namespaces) + " to "
- + newLocation.getPath().getString(namespaces));
- System.out.println(cache.getString(namespaces));
+ // System.out.println("After moving " + oldLocation.getPath().getString(namespaces) + " to "
+ // + newLocation.getPath().getString(namespaces));
+ // System.out.println(cache.getString(namespaces));
// Check the cache content, which should no longer have any content below the old and new locations ...
LinkedList<Location> afterRemoval = cache.getAllChildren(location.getPath());
@@ -347,8 +347,8 @@
assertThat(cache.getLocationFor(children2[6].getPath()), is(children2[6]));
assertThat(cache.getLocationFor(children2[7].getPath()), is(children2[7]));
- System.out.println("Before:");
- System.out.println(cache.getString(namespaces));
+ // System.out.println("Before:");
+ // System.out.println(cache.getString(namespaces));
// Create the locations that in the branch to be removed ...
List<Location> locationsToRemove = new LinkedList<Location>();
@@ -363,8 +363,8 @@
// Remove the branch ...
assertThat(cache.removeBranch(locationsToRemove), is(true));
- System.out.println("After removing " + locationsToRemove.get(0).getString(namespaces));
- System.out.println(cache.getString(namespaces));
+ // System.out.println("After removing " + locationsToRemove.get(0).getString(namespaces));
+ // System.out.println(cache.getString(namespaces));
// Check the cache content, which should no longer have any content below the old and new locations ...
LinkedList<Location> afterRemoval = cache.getAllChildren(location.getPath());
15 years, 3 months
DNA SVN: r686 - in trunk: dna-graph/src/main/java/org/jboss/dna/graph/properties and 7 other directories.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2008-12-16 11:19:33 -0500 (Tue, 16 Dec 2008)
New Revision: 686
Added:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/ReferentialIntegrityException.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceId.java
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnection.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaSource.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/Model.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java
trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/ModelTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java
Log:
DNA-40
Added to the JPA connector support for referential integrity (which is optional). References that are values in properties are now tracked in the database, where each reference record maps the UUID of the node containing the reference to the UUID of the node being referenced. Note that this table does not maintain which property contains the reference, which is not required to manage dependencies.
Finally, these references are checked upon closing the BasicRequestProcessor to catch any operations that remove any nodes that are still being referenced by other nodes.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -1825,7 +1825,7 @@
return new On<BatchConjunction>() {
public BatchConjunction on( Location location ) {
UpdatePropertiesRequest request = new UpdatePropertiesRequest(location, properties);
- queue().submit(request);
+ requestQueue.submit(request);
return nextRequests;
}
@@ -1902,7 +1902,7 @@
return new On<BatchConjunction>() {
public BatchConjunction on( Location location ) {
RemovePropertiesRequest request = new RemovePropertiesRequest(location, propertyNames);
- queue().submit(request);
+ requestQueue.submit(request);
return nextRequests;
}
@@ -1944,7 +1944,7 @@
return new On<BatchConjunction>() {
public BatchConjunction on( Location location ) {
RemovePropertiesRequest request = new RemovePropertiesRequest(location, names);
- queue().submit(request);
+ requestQueue.submit(request);
return nextRequests;
}
@@ -2115,7 +2115,7 @@
public BatchConjunction on( Location at ) {
ReadPropertyRequest request = new ReadPropertyRequest(at, name);
- queue().submit(request);
+ requestQueue.submit(request);
return Batch.this.nextRequests;
}
};
@@ -2136,7 +2136,7 @@
return new On<BatchConjunction>() {
public BatchConjunction on( Location location ) {
ReadAllPropertiesRequest request = new ReadAllPropertiesRequest(location);
- queue().submit(request);
+ requestQueue.submit(request);
return Batch.this.nextRequests;
}
@@ -2199,7 +2199,7 @@
public BatchConjunction of( Location at ) {
ReadAllChildrenRequest request = new ReadAllChildrenRequest(at);
- queue().submit(request);
+ requestQueue.submit(request);
return Batch.this.nextRequests;
}
};
@@ -2221,7 +2221,7 @@
return new At<BatchConjunction>() {
public BatchConjunction at( Location location ) {
ReadBranchRequest request = new ReadBranchRequest(location, depth);
- queue().submit(request);
+ requestQueue.submit(request);
return Batch.this.nextRequests;
}
@@ -3106,7 +3106,7 @@
}
public void submit( Request request ) {
- if (request instanceof UpdatePropertiesRequest) {
+ if (!requests.isEmpty() && request instanceof UpdatePropertiesRequest) {
// If the previous request was also an update, then maybe they can be merged ...
Request previous = requests.getLast();
if (previous instanceof UpdatePropertiesRequest) {
Added: trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/ReferentialIntegrityException.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/ReferentialIntegrityException.java (rev 0)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/ReferentialIntegrityException.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -0,0 +1,116 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.jboss.dna.graph.properties;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.jboss.dna.graph.Location;
+
+/**
+ * @author Randall Hauch
+ */
+public class ReferentialIntegrityException extends RuntimeException {
+
+ /**
+ */
+ private static final long serialVersionUID = -3703984046286975978L;
+
+ private final Map<Location, List<Reference>> invalidReferences;
+
+ /**
+ * @param location the location of the node containing the bad reference(s)
+ * @param invalidReferences the invalid references
+ */
+ public ReferentialIntegrityException( Location location,
+ Reference... invalidReferences ) {
+ this.invalidReferences = new HashMap<Location, List<Reference>>();
+ List<Reference> invalidRefList = null;
+ if (invalidReferences == null || invalidReferences.length == 0) {
+ invalidRefList = Collections.emptyList();
+ } else if (invalidReferences.length == 1) {
+ invalidRefList = Collections.singletonList(invalidReferences[0]);
+ } else {
+ invalidRefList = new ArrayList<Reference>();
+ for (Reference ref : invalidReferences) {
+ invalidRefList.add(ref);
+ }
+ }
+ this.invalidReferences.put(location, invalidRefList);
+ }
+
+ /**
+ * @param invalidReferences the map of locations to invalid references
+ */
+ public ReferentialIntegrityException( Map<Location, List<Reference>> invalidReferences ) {
+ this.invalidReferences = invalidReferences;
+ }
+
+ /**
+ * @param invalidReferences the map of locations to invalid references
+ * @param message
+ */
+ public ReferentialIntegrityException( Map<Location, List<Reference>> invalidReferences,
+ String message ) {
+ super(message);
+ this.invalidReferences = invalidReferences;
+ }
+
+ /**
+ * @param invalidReferences the map of locations to invalid references
+ * @param cause
+ */
+ public ReferentialIntegrityException( Map<Location, List<Reference>> invalidReferences,
+ Throwable cause ) {
+ super(cause);
+ this.invalidReferences = invalidReferences;
+ }
+
+ /**
+ * @param invalidReferences the map of locations to invalid references
+ * @param message
+ * @param cause
+ */
+ public ReferentialIntegrityException( Map<Location, List<Reference>> invalidReferences,
+ String message,
+ Throwable cause ) {
+ super(message, cause);
+ this.invalidReferences = invalidReferences;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public String toString() {
+ return super.toString();
+ }
+
+ /**
+ * @return invalidReferences
+ */
+ public Map<Location, List<Reference>> getInvalidReferences() {
+ return invalidReferences;
+ }
+}
Property changes on: trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/ReferentialIntegrityException.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnection.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnection.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnection.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -47,6 +47,7 @@
private final UUID rootNodeUuid;
private final long largeValueMinimumSizeInBytes;
private final boolean compressData;
+ private final boolean enforceReferentialIntegrity;
/*package*/JpaConnection( String sourceName,
CachePolicy cachePolicy,
@@ -54,7 +55,8 @@
Model model,
UUID rootNodeUuid,
long largeValueMinimumSizeInBytes,
- boolean compressData ) {
+ boolean compressData,
+ boolean enforceReferentialIntegrity ) {
assert sourceName != null;
assert entityManager != null;
assert model != null;
@@ -66,6 +68,7 @@
this.rootNodeUuid = rootNodeUuid;
this.largeValueMinimumSizeInBytes = largeValueMinimumSizeInBytes;
this.compressData = compressData;
+ this.enforceReferentialIntegrity = enforceReferentialIntegrity;
}
/**
@@ -125,7 +128,13 @@
public void execute( ExecutionContext context,
Request request ) throws RepositorySourceException {
long size = largeValueMinimumSizeInBytes;
- RequestProcessor proc = model.createRequestProcessor(name, context, entityManager, rootNodeUuid, size, compressData);
+ RequestProcessor proc = model.createRequestProcessor(name,
+ context,
+ entityManager,
+ rootNodeUuid,
+ size,
+ compressData,
+ enforceReferentialIntegrity);
try {
proc.process(request);
} finally {
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -41,6 +41,7 @@
public static I18n unableToReadLargeValue;
public static I18n unableToMoveRootNode;
public static I18n locationShouldHavePathAndOrProperty;
+ public static I18n invalidReferences;
public static I18n basicModelDescription;
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaSource.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaSource.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaSource.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -117,6 +117,7 @@
protected static final String MODEL_NAME = "modelName";
protected static final String LARGE_VALUE_SIZE_IN_BYTES = "largeValueSizeInBytes";
protected static final String COMPRESS_DATA = "compressData";
+ protected static final String ENFORCE_REFERENTIAL_INTEGRITY = "enforceReferentialIntegrity";
/**
* This source supports events.
@@ -148,6 +149,7 @@
private static final int DEFAULT_IDLE_TIME_IN_SECONDS_BEFORE_TESTING_CONNECTIONS = 60 * 3; // 3 minutes
private static final int DEFAULT_LARGE_VALUE_SIZE_IN_BYTES = 2 ^ 10; // 1 kilobyte
private static final boolean DEFAULT_COMPRESS_DATA = true;
+ private static final boolean DEFAULT_ENFORCE_REFERENTIAL_INTEGRITY = true;
/**
* The first serialized version of this source.
@@ -173,6 +175,7 @@
private int cacheTimeToLiveInMilliseconds = DEFAULT_CACHE_TIME_TO_LIVE_IN_SECONDS * 1000;
private long largeValueSizeInBytes = DEFAULT_LARGE_VALUE_SIZE_IN_BYTES;
private boolean compressData = DEFAULT_COMPRESS_DATA;
+ private boolean referentialIntegrityEnforced = DEFAULT_ENFORCE_REFERENTIAL_INTEGRITY;
private final Capabilities capabilities = new Capabilities();
private transient Model model;
private String modelName;
@@ -577,6 +580,20 @@
}
/**
+ * @return referentialIntegrityEnforced
+ */
+ public boolean isReferentialIntegrityEnforced() {
+ return referentialIntegrityEnforced;
+ }
+
+ /**
+ * @param referentialIntegrityEnforced Sets referentialIntegrityEnforced to the specified value.
+ */
+ public void setReferentialIntegrityEnforced( boolean referentialIntegrityEnforced ) {
+ this.referentialIntegrityEnforced = referentialIntegrityEnforced;
+ }
+
+ /**
* {@inheritDoc}
*
* @see org.jboss.dna.graph.connectors.RepositorySource#initialize(org.jboss.dna.graph.connectors.RepositoryContext)
@@ -634,6 +651,7 @@
ref.add(new StringRefAddr(CACHE_TIME_TO_LIVE_IN_MILLISECONDS, Integer.toString(getCacheTimeToLiveInMilliseconds())));
ref.add(new StringRefAddr(LARGE_VALUE_SIZE_IN_BYTES, Long.toString(getLargeValueSizeInBytes())));
ref.add(new StringRefAddr(COMPRESS_DATA, Boolean.toString(isCompressData())));
+ ref.add(new StringRefAddr(ENFORCE_REFERENTIAL_INTEGRITY, Boolean.toString(isReferentialIntegrityEnforced())));
if (getModel() != null) {
ref.add(new StringRefAddr(MODEL_NAME, getModel()));
}
@@ -680,6 +698,7 @@
String retryLimit = values.get(RETRY_LIMIT);
String largeModelSize = values.get(LARGE_VALUE_SIZE_IN_BYTES);
String compressData = values.get(COMPRESS_DATA);
+ String refIntegrity = values.get(ENFORCE_REFERENTIAL_INTEGRITY);
// Create the source instance ...
JpaSource source = new JpaSource();
@@ -703,6 +722,7 @@
if (modelName != null) source.setModel(modelName);
if (largeModelSize != null) source.setLargeValueSizeInBytes(Long.parseLong(largeModelSize));
if (compressData != null) source.setCompressData(Boolean.parseBoolean(compressData));
+ if (refIntegrity != null) source.setReferentialIntegrityEnforced(Boolean.parseBoolean(refIntegrity));
return source;
}
return null;
@@ -807,7 +827,8 @@
if (entityManager == null) {
entityManager = entityManagerFactory.createEntityManager();
}
- return new JpaConnection(getName(), cachePolicy, entityManager, model, rootUuid, largeValueSizeInBytes, compressData);
+ return new JpaConnection(getName(), cachePolicy, entityManager, model, rootUuid, largeValueSizeInBytes, compressData,
+ referentialIntegrityEnforced);
}
/**
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/Model.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/Model.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/Model.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -79,7 +79,8 @@
EntityManager entityManager,
UUID rootNodeUuid,
long largeValueMinimumSizeInBytes,
- boolean comparessData );
+ boolean comparessData,
+ boolean enforceReferentialIntegrity );
/**
* Configure the entity class that will be used by JPA to store information in the database.
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -50,6 +50,7 @@
* possible to efficiently work with nodes containing large numbers of children, where adding and removing child nodes is largely
* independent of the number of children. Also, working with properties is also completely independent of the number of child
* nodes.</li>
+ * <li>ReferenceChanges - the references from one node to another</li>
* <li>Subgraph - a working area for efficiently computing the space of a subgraph; see below</li>
* <li>Change log - a record of the changes that have been made to the repository. This is used to distribute change events across
* multiple distributed processes, and to allow a recently-connected client to identify the set of changes that have been made
@@ -98,7 +99,7 @@
* {@inheritDoc}
*
* @see org.jboss.dna.connector.store.jpa.Model#createRequestProcessor(java.lang.String, org.jboss.dna.graph.ExecutionContext,
- * javax.persistence.EntityManager, java.util.UUID, long, boolean)
+ * javax.persistence.EntityManager, java.util.UUID, long, boolean, boolean)
*/
@Override
public RequestProcessor createRequestProcessor( String sourceName,
@@ -106,9 +107,10 @@
EntityManager entityManager,
UUID rootNodeUuid,
long largeValueMinimumSizeInBytes,
- boolean compressData ) {
+ boolean compressData,
+ boolean enforceReferentialIntegrity ) {
return new BasicRequestProcessor(sourceName, context, entityManager, rootNodeUuid, largeValueMinimumSizeInBytes,
- compressData);
+ compressData, enforceReferentialIntegrity);
}
/**
@@ -126,6 +128,8 @@
configurator.addAnnotatedClass(LargeValueId.class);
configurator.addAnnotatedClass(ChildEntity.class);
configurator.addAnnotatedClass(ChildId.class);
+ configurator.addAnnotatedClass(ReferenceEntity.class);
+ configurator.addAnnotatedClass(ReferenceId.class);
configurator.addAnnotatedClass(SubgraphQueryEntity.class);
configurator.addAnnotatedClass(SubgraphNodeEntity.class);
configurator.addAnnotatedClass(ChangeLogEntity.class);
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -31,6 +31,7 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
@@ -68,8 +69,12 @@
import org.jboss.dna.graph.properties.PathNotFoundException;
import org.jboss.dna.graph.properties.Property;
import org.jboss.dna.graph.properties.PropertyType;
+import org.jboss.dna.graph.properties.Reference;
+import org.jboss.dna.graph.properties.ReferentialIntegrityException;
+import org.jboss.dna.graph.properties.UuidFactory;
import org.jboss.dna.graph.properties.ValueFactories;
import org.jboss.dna.graph.properties.ValueFactory;
+import org.jboss.dna.graph.properties.ValueFormatException;
import org.jboss.dna.graph.requests.CopyBranchRequest;
import org.jboss.dna.graph.requests.CreateNodeRequest;
import org.jboss.dna.graph.requests.DeleteBranchRequest;
@@ -95,6 +100,7 @@
protected final ValueFactory<String> stringFactory;
protected final PathFactory pathFactory;
protected final NameFactory nameFactory;
+ protected final UuidFactory uuidFactory;
protected final Namespaces namespaces;
protected final UUID rootNodeUuid;
protected final String rootNodeUuidString;
@@ -103,6 +109,8 @@
protected final boolean compressData;
protected final Logger logger;
protected final RequestProcessorCache cache;
+ protected final boolean enforceReferentialIntegrity;
+ private boolean referencesChanged;
/**
* @param sourceName
@@ -111,25 +119,30 @@
* @param rootNodeUuid
* @param largeValueMinimumSizeInBytes
* @param compressData
+ * @param enforceReferentialIntegrity
*/
public BasicRequestProcessor( String sourceName,
ExecutionContext context,
EntityManager entityManager,
UUID rootNodeUuid,
long largeValueMinimumSizeInBytes,
- boolean compressData ) {
+ boolean compressData,
+ boolean enforceReferentialIntegrity ) {
super(sourceName, context);
assert entityManager != null;
assert rootNodeUuid != null;
this.entities = entityManager;
- this.stringFactory = context.getValueFactories().getStringFactory();
- this.pathFactory = context.getValueFactories().getPathFactory();
- this.nameFactory = context.getValueFactories().getNameFactory();
+ ValueFactories valuesFactory = context.getValueFactories();
+ this.stringFactory = valuesFactory.getStringFactory();
+ this.pathFactory = valuesFactory.getPathFactory();
+ this.nameFactory = valuesFactory.getNameFactory();
+ this.uuidFactory = valuesFactory.getUuidFactory();
this.namespaces = new Namespaces(entityManager);
this.rootNodeUuid = rootNodeUuid;
this.rootNodeUuidString = this.rootNodeUuid.toString();
this.largeValueMinimumSizeInBytes = largeValueMinimumSizeInBytes;
this.compressData = compressData;
+ this.enforceReferentialIntegrity = enforceReferentialIntegrity;
this.serializer = new Serializer(context, true);
this.logger = getExecutionContext().getLogger(getClass());
this.cache = new RequestProcessorCache(this.pathFactory);
@@ -147,7 +160,6 @@
public void process( CreateNodeRequest request ) {
logger.trace(request.toString());
Location actualLocation = null;
- String childUuidString = null;
try {
// Create nodes have to be defined via a path ...
Location parentLocation = request.under();
@@ -165,7 +177,8 @@
}
}
if (uuidString == null) uuidString = UUID.randomUUID().toString();
- childUuidString = createProperties(uuidString, request.properties());
+ assert uuidString != null;
+ createProperties(uuidString, request.properties());
// Find or create the namespace for the child ...
Name childName = request.named();
@@ -222,14 +235,13 @@
}
// Create the new ChildEntity ...
- ChildId id = new ChildId(parentUuidString, childUuidString);
+ ChildId id = new ChildId(parentUuidString, uuidString);
ChildEntity entity = new ChildEntity(id, nextIndexInParent, ns, childName.getLocalName(), nextSnsIndex);
entities.persist(entity);
// Set the actual path, regardless of the supplied path...
- assert childUuidString != null;
Path path = pathFactory.create(parentPath, childName, nextSnsIndex);
- actualLocation = new Location(path, UUID.fromString(childUuidString));
+ actualLocation = new Location(path, UUID.fromString(uuidString));
// Finally, update the cache with the information we know ...
if (childrenOfParent != null) {
@@ -278,17 +290,19 @@
boolean compressed = entity.isCompressed();
Collection<Property> properties = new LinkedList<Property>();
byte[] data = entity.getData();
- LargeValueSerializer largeValues = new LargeValueSerializer(entity);
- ByteArrayInputStream bais = new ByteArrayInputStream(data);
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
- try {
- serializer.deserializeAllProperties(ois, properties, largeValues);
- for (Property property : properties) {
- request.addProperty(property);
+ if (data != null) {
+ LargeValueSerializer largeValues = new LargeValueSerializer(entity);
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.deserializeAllProperties(ois, properties, largeValues);
+ for (Property property : properties) {
+ request.addProperty(property);
+ }
+ } finally {
+ ois.close();
}
- } finally {
- ois.close();
}
} catch (NoResultException e) {
@@ -553,17 +567,19 @@
int propertyCount = entity.getPropertyCount();
Collection<Property> properties = new ArrayList<Property>(propertyCount);
byte[] data = entity.getData();
- LargeValueSerializer largeValues = new LargeValueSerializer(entity);
- ByteArrayInputStream bais = new ByteArrayInputStream(data);
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
- try {
- serializer.deserializeAllProperties(ois, properties, largeValues);
- for (Property property : properties) {
- request.addProperty(property);
+ if (data != null) {
+ LargeValueSerializer largeValues = new LargeValueSerializer(entity);
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.deserializeAllProperties(ois, properties, largeValues);
+ for (Property property : properties) {
+ request.addProperty(property);
+ }
+ } finally {
+ ois.close();
}
- } finally {
- ois.close();
}
} catch (NoResultException e) {
// there are no properties (probably not expected, but still okay) ...
@@ -613,18 +629,20 @@
int propertyCount = entity.getPropertyCount();
Collection<Property> properties = new ArrayList<Property>(propertyCount);
byte[] data = entity.getData();
- LargeValueSerializer largeValues = new LargeValueSerializer(entity);
- ByteArrayInputStream bais = new ByteArrayInputStream(data);
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
- try {
- Serializer.LargeValues skippedLargeValues = Serializer.NO_LARGE_VALUES;
- serializer.deserializeSomeProperties(ois, properties, largeValues, skippedLargeValues, propertyName);
- for (Property property : properties) {
- request.setProperty(property); // should be only one property
+ if (data != null) {
+ LargeValueSerializer largeValues = new LargeValueSerializer(entity);
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ Serializer.LargeValues skippedLargeValues = Serializer.NO_LARGE_VALUES;
+ serializer.deserializeSomeProperties(ois, properties, largeValues, skippedLargeValues, propertyName);
+ for (Property property : properties) {
+ request.setProperty(property); // should be only one property
+ }
+ } finally {
+ ois.close();
}
- } finally {
- ois.close();
}
} catch (NoResultException e) {
// there are no properties (probably not expected, but still okay) ...
@@ -655,42 +673,88 @@
PropertiesEntity entity = null;
try {
entity = (PropertiesEntity)query.getSingleResult();
- final boolean hadLargeValues = !entity.getLargeValues().isEmpty();
// Prepare the streams so we can deserialize all existing properties and reserialize the old and updated
// properties ...
boolean compressed = entity.isCompressed();
- ByteArrayInputStream bais = new ByteArrayInputStream(entity.getData());
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
+ byte[] originalData = entity.getData();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
OutputStream os = compressed ? new GZIPOutputStream(baos) : baos;
ObjectOutputStream oos = new ObjectOutputStream(os);
- int numProperties = 0;
- Set<String> largeValueHashesWritten = hadLargeValues ? new HashSet<String>() : null;
- LargeValueSerializer largeValues = new LargeValueSerializer(entity, largeValueHashesWritten);
- SkippedLargeValues removedValues = new SkippedLargeValues(largeValues);
- try {
- numProperties = serializer.reserializeProperties(ois, oos, request.properties(), largeValues, removedValues);
- } finally {
+ int numProps = 0;
+ LargeValueSerializer largeValues = null;
+ Collection<Property> props = request.properties();
+ References refs = enforceReferentialIntegrity ? new References() : null;
+ if (originalData == null) {
+ largeValues = new LargeValueSerializer(entity);
+ numProps = props.size();
+ serializer.serializeProperties(oos, numProps, props, largeValues, refs);
+ } else {
+ boolean hadLargeValues = !entity.getLargeValues().isEmpty();
+ Set<String> largeValueHashesWritten = hadLargeValues ? new HashSet<String>() : null;
+ largeValues = new LargeValueSerializer(entity, largeValueHashesWritten);
+ ByteArrayInputStream bais = new ByteArrayInputStream(originalData);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ SkippedLargeValues removedValues = new SkippedLargeValues(largeValues);
try {
- ois.close();
+ Serializer.ReferenceValues refValues = refs != null ? refs : Serializer.NO_REFERENCES_VALUES;
+ numProps = serializer.reserializeProperties(ois, oos, props, largeValues, removedValues, refValues);
} finally {
- oos.close();
+ try {
+ ois.close();
+ } finally {
+ oos.close();
+ }
}
+ // The new large values were recorded and associated with the properties entity during reserialization.
+ // However, any values no longer used now need to be removed ...
+ if (hadLargeValues) {
+ // Remove any large value from the 'skipped' list that was also written ...
+ removedValues.skippedKeys.removeAll(largeValueHashesWritten);
+ for (String oldHexKey : removedValues.skippedKeys) {
+ LargeValueId id = new LargeValueId(oldHexKey);
+ entity.getLargeValues().remove(id);
+ }
+ }
+
+ if (refs != null) {
+ // Remove any existing references ...
+ if (refs.hasRemoved()) {
+ for (Reference reference : refs.getRemoved()) {
+ String toUuid = resolveToUuid(reference);
+ if (toUuid != null) {
+ ReferenceId id = new ReferenceId(actual.uuid, toUuid);
+ ReferenceEntity refEntity = entities.find(ReferenceEntity.class, id);
+ if (refEntity != null) {
+ entities.remove(refEntity);
+ referencesChanged = true;
+ }
+ }
+ }
+ }
+ }
}
- entity.setPropertyCount(numProperties);
+ entity.setPropertyCount(numProps);
entity.setData(baos.toByteArray());
entity.setCompressed(compressData);
- // The new large values were recorded and associated with the properties entity during reserialization.
- // However, any values no longer used now need to be removed ...
- if (hadLargeValues) {
- // Remove any large value from the 'skipped' list that was also written ...
- removedValues.skippedKeys.removeAll(largeValueHashesWritten);
- for (String oldHexKey : removedValues.skippedKeys) {
- LargeValueId id = new LargeValueId(oldHexKey);
- entity.getLargeValues().remove(id);
+ if (refs != null && refs.hasWritten()) {
+ // If there were references from the updated node ...
+ Set<Reference> newReferences = refs.getWritten();
+ // Remove any reference that was written (and not removed) ...
+ newReferences.removeAll(refs.getRead());
+ if (newReferences.size() != 0) {
+ // Now save the new references ...
+ for (Reference reference : newReferences) {
+ String toUuid = resolveToUuid(reference);
+ if (toUuid != null) {
+ ReferenceId id = new ReferenceId(actual.uuid, toUuid);
+ ReferenceEntity refEntity = new ReferenceEntity(id);
+ entities.persist(refEntity);
+ referencesChanged = true;
+ }
+ }
}
}
} catch (NoResultException e) {
@@ -777,15 +841,17 @@
properties.add(actualLocation.getIdProperty(DnaLexicon.UUID));
// Deserialize all the properties (except the UUID)...
byte[] data = props.getData();
- LargeValueSerializer largeValues = new LargeValueSerializer(props);
- ByteArrayInputStream bais = new ByteArrayInputStream(data);
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
- try {
- serializer.deserializeAllProperties(ois, properties, largeValues);
- request.setProperties(nodeLocation, properties);
- } finally {
- ois.close();
+ if (data != null) {
+ LargeValueSerializer largeValues = new LargeValueSerializer(props);
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.deserializeAllProperties(ois, properties, largeValues);
+ request.setProperties(nodeLocation, properties);
+ } finally {
+ ois.close();
+ }
}
}
@@ -946,45 +1012,137 @@
request.setActualLocations(actualOldLocation, actualNewLocation);
}
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.requests.processor.RequestProcessor#close()
+ */
+ @Override
+ public void close() {
+ // Verify that the references are valid so far ...
+ verifyReferences();
+
+ // Now commit the transaction ...
+ EntityTransaction txn = entities.getTransaction();
+ if (txn != null) txn.commit();
+ super.close();
+ }
+
+ /**
+ * {@link ReferenceEntity Reference entities} are added and removed in the appropriate <code>process(...)</code> methods.
+ * However, this method is typically called in {@link BasicRequestProcessor#close()} and performs the following steps:
+ * <ol>
+ * <li>Remove all references that have a "from" node that is under the versions branch.</li>
+ * <li>Verify that all remaining references have a valid and existing "to" node</li>
+ * </ol>
+ */
+ protected void verifyReferences() {
+ if (!enforceReferentialIntegrity) return;
+ if (referencesChanged) {
+
+ // Remove all references that have a "from" node that doesn't support referential integrity ...
+ ReferenceEntity.deleteUnenforcedReferences(entities);
+
+ // Verify that all references are resolved to existing nodes ...
+ int numUnresolved = ReferenceEntity.countAllReferencesResolved(entities);
+ if (numUnresolved != 0) {
+ List<ReferenceEntity> references = ReferenceEntity.verifyAllReferencesResolved(entities);
+ ValueFactory<Reference> refFactory = getExecutionContext().getValueFactories().getReferenceFactory();
+ Map<Location, List<Reference>> invalidRefs = new HashMap<Location, List<Reference>>();
+ for (ReferenceEntity entity : references) {
+ UUID fromUuid = UUID.fromString(entity.getId().getFromUuidString());
+ Location location = new Location(fromUuid);
+ location = getActualLocation(location).location;
+ List<Reference> refs = invalidRefs.get(location);
+ if (refs == null) {
+ refs = new ArrayList<Reference>();
+ invalidRefs.put(location, refs);
+ }
+ UUID toUuid = UUID.fromString(entity.getId().getToUuidString());
+ refs.add(refFactory.create(toUuid));
+ }
+ String msg = JpaConnectorI18n.invalidReferences.text(getSourceName());
+ throw new ReferentialIntegrityException(invalidRefs, msg);
+ }
+
+ referencesChanged = false;
+ }
+ }
+
protected String createProperties( String uuidString,
Collection<Property> properties ) throws IOException {
assert uuidString != null;
- if (properties.isEmpty()) return uuidString;
- if (properties.size() == 1 && properties.iterator().next().getName().equals(JcrLexicon.NAME)) return uuidString;
// Create the PropertiesEntity ...
NodeId nodeId = new NodeId(uuidString);
PropertiesEntity props = new PropertiesEntity(nodeId);
- LargeValueSerializer largeValues = new LargeValueSerializer(props);
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- OutputStream os = compressData ? new GZIPOutputStream(baos) : baos;
- ObjectOutputStream oos = new ObjectOutputStream(os);
- int numProperties = properties.size();
- try {
- serializer.serializeProperties(oos, numProperties, properties, largeValues);
- } finally {
- oos.close();
+ // If there are properties ...
+ boolean processProperties = true;
+ if (properties.isEmpty()) processProperties = false;
+ else if (properties.size() == 1 && properties.iterator().next().getName().equals(JcrLexicon.NAME)) processProperties = false;
+
+ if (processProperties) {
+ References refs = enforceReferentialIntegrity ? new References() : null;
+ LargeValueSerializer largeValues = new LargeValueSerializer(props);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ OutputStream os = compressData ? new GZIPOutputStream(baos) : baos;
+ ObjectOutputStream oos = new ObjectOutputStream(os);
+ int numProperties = properties.size();
+ try {
+ Serializer.ReferenceValues refValues = refs != null ? refs : Serializer.NO_REFERENCES_VALUES;
+ serializer.serializeProperties(oos, numProperties, properties, largeValues, refValues);
+ } finally {
+ oos.close();
+ }
+
+ props.setData(baos.toByteArray());
+ props.setPropertyCount(numProperties);
+
+ // Record the changes to the references ...
+ if (refs != null && refs.hasWritten()) {
+ for (Reference reference : refs.getWritten()) {
+ String toUuid = resolveToUuid(reference);
+ if (toUuid != null) {
+ ReferenceId id = new ReferenceId(uuidString, toUuid);
+ ReferenceEntity refEntity = new ReferenceEntity(id);
+ entities.persist(refEntity);
+ referencesChanged = true;
+ }
+ }
+ }
+ } else {
+ props.setData(null);
+ props.setPropertyCount(0);
}
-
- props.setData(baos.toByteArray());
props.setCompressed(compressData);
- props.setPropertyCount(numProperties);
+ props.setReferentialIntegrityEnforced(true);
entities.persist(props);
+
+ // References will be persisted in the commit ...
return uuidString;
}
/**
- * {@inheritDoc}
+ * Attempt to resolve the reference.
*
- * @see org.jboss.dna.graph.requests.processor.RequestProcessor#close()
+ * @param reference the reference
+ * @return the UUID of the node to which the reference points, or null if the reference could not be resolved
*/
- @Override
- public void close() {
- EntityTransaction txn = entities.getTransaction();
- if (txn != null) txn.commit();
- super.close();
+ protected String resolveToUuid( Reference reference ) {
+ // See if the reference is by UUID ...
+ try {
+ UUID uuid = uuidFactory.create(reference);
+ ActualLocation actualLocation = getActualLocation(new Location(uuid));
+ return actualLocation.uuid;
+ } catch (ValueFormatException e) {
+ // Unknown kind of reference, which we don't track
+ } catch (PathNotFoundException e) {
+ // Unable to resolve reference ...
+ }
+ // Unable to resolve reference ...
+ return null;
}
/**
@@ -1400,4 +1558,79 @@
return this.location.toString() + " (uuid=" + uuid + ") " + childEntity;
}
}
+
+ protected class References implements Serializer.ReferenceValues {
+ private Set<Reference> read;
+ private Set<Reference> removed;
+ private Set<Reference> written;
+
+ protected References() {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.connector.store.jpa.util.Serializer.ReferenceValues#read(org.jboss.dna.graph.properties.Reference)
+ */
+ public void read( Reference reference ) {
+ if (read == null) read = new HashSet<Reference>();
+ read.add(reference);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.connector.store.jpa.util.Serializer.ReferenceValues#remove(org.jboss.dna.graph.properties.Reference)
+ */
+ public void remove( Reference reference ) {
+ if (removed == null) removed = new HashSet<Reference>();
+ removed.add(reference);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.connector.store.jpa.util.Serializer.ReferenceValues#write(org.jboss.dna.graph.properties.Reference)
+ */
+ public void write( Reference reference ) {
+ if (written == null) written = new HashSet<Reference>();
+ written.add(reference);
+ }
+
+ public boolean hasRead() {
+ return read != null;
+ }
+
+ public boolean hasRemoved() {
+ return removed != null;
+ }
+
+ public boolean hasWritten() {
+ return written != null;
+ }
+
+ /**
+ * @return read
+ */
+ public Set<Reference> getRead() {
+ if (read != null) return read;
+ return Collections.emptySet();
+ }
+
+ /**
+ * @return removed
+ */
+ public Set<Reference> getRemoved() {
+ if (removed != null) return removed;
+ return Collections.emptySet();
+ }
+
+ /**
+ * @return written
+ */
+ public Set<Reference> getWritten() {
+ if (written != null) return written;
+ return Collections.emptySet();
+ }
+ }
}
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -24,6 +24,7 @@
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Embeddable;
+import net.jcip.annotations.Immutable;
import org.jboss.dna.common.util.HashCode;
/**
@@ -32,6 +33,8 @@
* @author Randall Hauch
*/
@Embeddable
+@Immutable
+(a)org.hibernate.annotations.Immutable
public class ChildId implements Serializable {
/**
@@ -48,22 +51,16 @@
public ChildId() {
}
- // public ChildId( UUID parentUuid,
- // UUID childUuid ) {
- // setParentUuid(parentUuid);
- // setChildUuid(childUuid);
- // }
-
public ChildId( NodeId parentId,
NodeId childId ) {
- if (parentId != null) setParentUuidString(parentId.getUuidString());
- if (childId != null) setChildUuidString(childId.getUuidString());
+ if (parentId != null) this.parentUuidString = parentId.getUuidString();
+ if (childId != null) this.childUuidString = childId.getUuidString();
}
public ChildId( String parentUuid,
String childUuid ) {
- setParentUuidString(parentUuid);
- setChildUuidString(childUuid);
+ this.parentUuidString = parentUuid;
+ this.childUuidString = childUuid;
}
/**
@@ -74,13 +71,6 @@
}
/**
- * @param parentUuidString Sets parentUuidString to the specified value.
- */
- public void setParentUuidString( String parentUuidString ) {
- this.parentUuidString = parentUuidString;
- }
-
- /**
* @return childUuidString
*/
public String getChildUuidString() {
@@ -88,13 +78,6 @@
}
/**
- * @param childUuidString Sets childUuidString to the specified value.
- */
- public void setChildUuidString( String childUuidString ) {
- this.childUuidString = childUuidString;
- }
-
- /**
* {@inheritDoc}
*
* @see java.lang.Object#hashCode()
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -53,7 +53,7 @@
private NodeId id;
@Lob
- @Column( name = "DATA", nullable = false, unique = false )
+ @Column( name = "DATA", nullable = true, unique = false )
private byte[] data;
@Column( name = "NUM_PROPS", nullable = false )
@@ -65,6 +65,12 @@
@Column( name = "COMPRESSED", nullable = true )
private Boolean compressed;
+ /**
+ * Flag specifying whether the binary data is stored in a compressed format.
+ */
+ @Column( name = "ENFORCEREFINTEG", nullable = false )
+ private boolean referentialIntegrityEnforced = true;
+
@org.hibernate.annotations.CollectionOfElements( fetch = FetchType.LAZY )
@JoinTable( name = "DNA_LARGEVALUE_USAGES", joinColumns = @JoinColumn( name = "NODE_UUID" ) )
private Collection<LargeValueId> largeValues = new HashSet<LargeValueId>();
@@ -148,6 +154,20 @@
}
/**
+ * @return referentialIntegrityEnforced
+ */
+ public boolean isReferentialIntegrityEnforced() {
+ return referentialIntegrityEnforced;
+ }
+
+ /**
+ * @param referentialIntegrityEnforced Sets referentialIntegrityEnforced to the specified value.
+ */
+ public void setReferentialIntegrityEnforced( boolean referentialIntegrityEnforced ) {
+ this.referentialIntegrityEnforced = referentialIntegrityEnforced;
+ }
+
+ /**
* {@inheritDoc}
*
* @see java.lang.Object#hashCode()
Added: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceEntity.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceEntity.java (rev 0)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceEntity.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -0,0 +1,176 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.jboss.dna.connector.store.jpa.models.basic;
+
+import java.util.List;
+import javax.persistence.Entity;
+import javax.persistence.EntityManager;
+import javax.persistence.Id;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
+import javax.persistence.NoResultException;
+import javax.persistence.Query;
+import javax.persistence.Table;
+import org.hibernate.annotations.Index;
+
+/**
+ * A record of a reference from one node to another.
+ *
+ * @author Randall Hauch
+ */
+@Entity
+@Table( name = "DNA_BASIC_REFERENCES" )
+(a)org.hibernate.annotations.Table( appliesTo = "DNA_BASIC_REFERENCES", indexes = {
+ @Index( name = "REFINDEX_INX", columnNames = {"FROM_UUID", "TO_UUID"} ),
+ @Index( name = "REFTOUUID_INX", columnNames = {"TO_UUID"} )} )
+@NamedQueries( {
+ @NamedQuery( name = "ReferenceEntity.removeReferencesFrom", query = "delete ReferenceEntity where id.fromUuidString = :fromUuid" ),
+ @NamedQuery( name = "ReferenceEntity.removeNonEnforcedReferences", query = "delete ReferenceEntity as ref where ref.id.fromUuidString not in ( select props.id.uuidString from PropertiesEntity props where props.referentialIntegrityEnforced = true )" ),
+ @NamedQuery( name = "ReferenceEntity.countUnresolveReferences", query = "select count(*) from ReferenceEntity as ref where ref.id.toUuidString not in ( select props.id.uuidString from PropertiesEntity props where props.referentialIntegrityEnforced = true )" ),
+ @NamedQuery( name = "ReferenceEntity.getUnresolveReferences", query = "select ref from ReferenceEntity as ref where ref.id.toUuidString not in ( select props.id.uuidString from PropertiesEntity props where props.referentialIntegrityEnforced = true )" )} )
+public class ReferenceEntity {
+
+ @Id
+ private ReferenceId id;
+
+ /**
+ *
+ */
+ public ReferenceEntity() {
+ }
+
+ /**
+ * @param id the id
+ */
+ public ReferenceEntity( ReferenceId id ) {
+ this.id = id;
+ }
+
+ /**
+ * @return id
+ */
+ public ReferenceId getId() {
+ return id;
+ }
+
+ /**
+ * @param id Sets id to the specified value.
+ */
+ public void setId( ReferenceId id ) {
+ this.id = id;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#hashCode()
+ */
+ @Override
+ public int hashCode() {
+ return id.hashCode();
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#equals(java.lang.Object)
+ */
+ @Override
+ public boolean equals( Object obj ) {
+ if (obj == this) return true;
+ if (obj instanceof ReferenceEntity) {
+ ReferenceEntity that = (ReferenceEntity)obj;
+ if (this.getId().equals(that.getId())) return true;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return this.id.toString();
+ }
+
+ /**
+ * Delete all references that start from the node with the supplied UUID.
+ *
+ * @param uuid the UUID of the node from which the references start
+ * @param manager the manager; may not be null
+ * @return the number of deleted references
+ */
+ public static int deleteReferencesFrom( String uuid,
+ EntityManager manager ) {
+ assert manager != null;
+ Query delete = manager.createNamedQuery("ReferenceEntity.removeReferencesFrom");
+ delete.setParameter("fromUuid", uuid);
+ int result = delete.executeUpdate();
+ manager.flush();
+ return result;
+ }
+
+ /**
+ * Delete all references that start from nodes that do not support enforced referential integrity.
+ *
+ * @param manager the manager; may not be null
+ * @return the number of deleted references
+ */
+ public static int deleteUnenforcedReferences( EntityManager manager ) {
+ assert manager != null;
+ Query delete = manager.createNamedQuery("ReferenceEntity.removeNonEnforcedReferences");
+ int result = delete.executeUpdate();
+ manager.flush();
+ return result;
+ }
+
+ /**
+ * Delete all references that start from nodes that do not support enforced referential integrity.
+ *
+ * @param manager the manager; may not be null
+ * @return the number of deleted references
+ */
+ public static int countAllReferencesResolved( EntityManager manager ) {
+ assert manager != null;
+ Query query = manager.createNamedQuery("ReferenceEntity.getUnresolveReferences");
+ try {
+ return (Integer)query.getSingleResult();
+ } catch (NoResultException e) {
+ return 0;
+ }
+ }
+
+ /**
+ * Delete all references that start from nodes that do not support enforced referential integrity.
+ *
+ * @param manager the manager; may not be null
+ * @return the number of deleted references
+ */
+ @SuppressWarnings( "unchecked" )
+ public static List<ReferenceEntity> verifyAllReferencesResolved( EntityManager manager ) {
+ assert manager != null;
+ Query query = manager.createNamedQuery("ReferenceEntity.getUnresolveReferences");
+ return query.getResultList();
+ }
+}
Property changes on: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceEntity.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Added: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceId.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceId.java (rev 0)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceId.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -0,0 +1,120 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.jboss.dna.connector.store.jpa.models.basic;
+
+import java.io.Serializable;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+import net.jcip.annotations.Immutable;
+import org.jboss.dna.common.util.HashCode;
+
+/**
+ * An identifier for a reference, comprised of a single {@link NodeId} of the node containing the reference and a single
+ * {@link NodeId} of the node being referenced.
+ *
+ * @author Randall Hauch
+ */
+@Embeddable
+@Immutable
+(a)org.hibernate.annotations.Immutable
+public class ReferenceId implements Serializable {
+
+ /**
+ * Version {@value}
+ */
+ private static final long serialVersionUID = 1L;
+
+ @Column( name = "FROM_UUID", nullable = false, updatable = false, length = 36 )
+ private String fromUuidString;
+
+ @Column( name = "TO_UUID", nullable = false, updatable = false, length = 36 )
+ private String toUuidString;
+
+ public ReferenceId() {
+ }
+
+ public ReferenceId( String fromUuid,
+ String toUuid ) {
+ this.fromUuidString = fromUuid;
+ this.toUuidString = toUuid;
+ }
+
+ /**
+ * @return fromUuidString
+ */
+ public String getFromUuidString() {
+ return fromUuidString;
+ }
+
+ /**
+ * @return toUuidString
+ */
+ public String getToUuidString() {
+ return toUuidString;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#hashCode()
+ */
+ @Override
+ public int hashCode() {
+ return HashCode.compute(fromUuidString, toUuidString);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#equals(java.lang.Object)
+ */
+ @Override
+ public boolean equals( Object obj ) {
+ if (obj == this) return true;
+ if (obj instanceof ReferenceId) {
+ ReferenceId that = (ReferenceId)obj;
+ if (this.fromUuidString == null) {
+ if (that.fromUuidString != null) return false;
+ } else {
+ if (!this.fromUuidString.equals(that.fromUuidString)) return false;
+ }
+ if (this.toUuidString == null) {
+ if (that.toUuidString != null) return false;
+ } else {
+ if (!this.toUuidString.equals(that.toUuidString)) return false;
+ }
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "Reference from " + fromUuidString + " to " + toUuidString;
+ }
+
+}
Property changes on: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceId.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -55,6 +55,7 @@
public class Serializer {
public static final LargeValues NO_LARGE_VALUES = new NoLargeValues();
+ public static final ReferenceValues NO_REFERENCES_VALUES = new NoReferenceValues();
private final PropertyFactory propertyFactory;
private final ValueFactories valueFactories;
@@ -111,6 +112,30 @@
}
/**
+ * Interface used to record how Reference values are processed during serialization and deserialization.
+ *
+ * @author Randall Hauch
+ */
+ public interface ReferenceValues {
+ void read( Reference reference );
+
+ void write( Reference reference );
+
+ void remove( Reference reference );
+ }
+
+ protected static class NoReferenceValues implements ReferenceValues {
+ public void read( Reference arg0 ) {
+ }
+
+ public void remove( Reference arg0 ) {
+ }
+
+ public void write( Reference arg0 ) {
+ }
+ }
+
+ /**
* Serialize the properties' values to the object stream.
* <p>
* If any of the property values are considered {@link LargeValues#getMinimumSize() large}, the value's hash and length of the
@@ -129,22 +154,25 @@
* @param number the number of properties exposed by the supplied <code>properties</code> iterator; must be 0 or positive
* @param properties the iterator over the properties that are to be serialized; may not be null
* @param largeValues the interface to use for writing large values; may not be null
+ * @param references the interface to use for recording which {@link Reference} values were found during serialization, or
+ * null if the references do not need to be accumulated
* @throws IOException if there is an error writing to the <code>stream</code> or <code>largeValues</code>
* @see #deserializeAllProperties(ObjectInputStream, Collection, LargeValues)
* @see #deserializeSomeProperties(ObjectInputStream, Collection, LargeValues, LargeValues, Name...)
- * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
+ * @see #serializeProperty(ObjectOutputStream, Property, LargeValues, ReferenceValues)
*/
public void serializeProperties( ObjectOutputStream stream,
int number,
Iterable<Property> properties,
- LargeValues largeValues ) throws IOException {
+ LargeValues largeValues,
+ ReferenceValues references ) throws IOException {
assert number >= 0;
assert properties != null;
assert largeValues != null;
stream.writeInt(number);
for (Property property : properties) {
if (property == null) continue;
- serializeProperty(stream, property, largeValues);
+ serializeProperty(stream, property, largeValues, references);
}
}
@@ -166,17 +194,21 @@
* @param stream the stream where the property's values are to be serialized; may not be null
* @param property the property to be serialized; may not be null
* @param largeValues the interface to use for writing large values; may not be null
+ * @param references the interface to use for recording which {@link Reference} values were found during serialization, or
+ * null if the references do not need to be accumulated
* @return true if the property was serialized, or false if it was not
* @throws IOException if there is an error writing to the <code>stream</code> or <code>largeValues</code>
- * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
- * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues, LargeValues)
+ * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues, ReferenceValues)
+ * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues, LargeValues, ReferenceValues)
*/
public boolean serializeProperty( ObjectOutputStream stream,
Property property,
- LargeValues largeValues ) throws IOException {
+ LargeValues largeValues,
+ ReferenceValues references ) throws IOException {
assert stream != null;
assert property != null;
assert largeValues != null;
+ assert references != null;
final Name name = property.getName();
if (this.excludeUuidProperty && DnaLexicon.UUID.equals(name)) return false;
// Write the name ...
@@ -243,7 +275,9 @@
stream.writeChar(c);
} else if (value instanceof Reference) {
stream.writeChar('R');
- stream.writeObject(((Reference)value).getString());
+ Reference ref = (Reference)value;
+ stream.writeObject(ref.getString());
+ references.write(ref);
} else if (value instanceof Binary) {
Binary binary = (Binary)value;
byte[] hash = null;
@@ -300,6 +334,8 @@
* @param updatedProperties the properties that are being updated (or removed, if there are no values); may not be null
* @param largeValues the interface to use for writing large values; may not be null
* @param removedLargeValues the interface to use for recording the large values that were removed; may not be null
+ * @param references the interface to use for recording which {@link Reference} values were found during serialization, or
+ * null if the references do not need to be accumulated
* @return the number of properties
* @throws IOException if there is an error writing to the <code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not be found
@@ -308,11 +344,13 @@
ObjectOutputStream output,
Collection<Property> updatedProperties,
LargeValues largeValues,
- LargeValues removedLargeValues ) throws IOException, ClassNotFoundException {
+ LargeValues removedLargeValues,
+ ReferenceValues references ) throws IOException, ClassNotFoundException {
assert input != null;
assert output != null;
assert updatedProperties != null;
assert largeValues != null;
+ assert references != null;
// Assemble a set of property names to skip deserializing
Set<Name> skipNames = new HashSet<Name>();
for (Property property : updatedProperties) {
@@ -330,10 +368,10 @@
assert name != null;
if (skipNames.contains(name)) {
// Deserialized, but don't materialize ...
- deserializePropertyValues(input, name, true, largeValues, removedLargeValues);
+ deserializePropertyValues(input, name, true, largeValues, removedLargeValues, references);
} else {
// Now read the property values ...
- Object[] values = deserializePropertyValues(input, name, false, largeValues, removedLargeValues);
+ Object[] values = deserializePropertyValues(input, name, false, largeValues, removedLargeValues, references);
// Add the property to the collection ...
Property property = propertyFactory.create(name, values);
assert property != null;
@@ -355,7 +393,7 @@
output.writeInt(numProperties);
for (Property property : allProperties.values()) {
if (property == null) continue;
- serializeProperty(output, property, largeValues);
+ serializeProperty(output, property, largeValues, references);
}
return numProperties;
}
@@ -368,8 +406,8 @@
* @param largeValues the interface to use for writing large values; may not be null
* @throws IOException if there is an error writing to the <code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not be found
- * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues, LargeValues)
- * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
+ * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues, LargeValues, ReferenceValues)
+ * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues, ReferenceValues)
*/
public void deserializeAllProperties( ObjectInputStream stream,
Collection<Property> properties,
@@ -395,8 +433,8 @@
* @param skippedLargeValues the interface to use for recording the large values that were skipped; may not be null
* @throws IOException if there is an error writing to the <code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not be found
- * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues, LargeValues)
- * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
+ * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues, LargeValues, ReferenceValues)
+ * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues, ReferenceValues)
*/
public void deserializeSomeProperties( ObjectInputStream stream,
Collection<Property> properties,
@@ -431,14 +469,14 @@
read = name.equals(nameToRead) || (namesToRead != null && namesToRead.contains(namesToRead));
if (read) {
// Now read the property values ...
- Object[] values = deserializePropertyValues(stream, name, false, skippedLargeValues, skippedLargeValues);
+ Object[] values = deserializePropertyValues(stream, name, false, skippedLargeValues, skippedLargeValues, null);
// Add the property to the collection ...
Property property = propertyFactory.create(name, values);
assert property != null;
properties.add(property);
} else {
// Skip the property ...
- deserializePropertyValues(stream, name, true, largeValues, skippedLargeValues);
+ deserializePropertyValues(stream, name, true, largeValues, skippedLargeValues, null);
}
}
}
@@ -452,7 +490,7 @@
* @throws IOException if there is an error writing to the <code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not be found
* @see #deserializeAllProperties(ObjectInputStream, Collection, LargeValues)
- * @see #serializeProperty(ObjectOutputStream, Property, LargeValues)
+ * @see #serializeProperty(ObjectOutputStream, Property, LargeValues, ReferenceValues)
*/
public Property deserializeProperty( ObjectInputStream stream,
LargeValues largeValues ) throws IOException, ClassNotFoundException {
@@ -461,7 +499,7 @@
Name name = valueFactories.getNameFactory().create(nameStr);
assert name != null;
// Now read the property values ...
- Object[] values = deserializePropertyValues(stream, name, false, largeValues, largeValues);
+ Object[] values = deserializePropertyValues(stream, name, false, largeValues, largeValues, null);
// Add the property to the collection ...
return propertyFactory.create(name, values);
}
@@ -474,19 +512,24 @@
* @param skip true if the values don't need to be read, or false if they are to be read
* @param largeValues the interface to use for writing large values; may not be null
* @param skippedLargeValues the interface to use for recording the large values that were skipped; may not be null
+ * @param references the interface to use for recording which {@link Reference} values were found (and/or removed) during
+ * deserialization; may not be null
* @return the deserialized property values, or an empty list if there are no values
* @throws IOException if there is an error writing to the <code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not be found
* @see #deserializeAllProperties(ObjectInputStream, Collection, LargeValues)
- * @see #serializeProperty(ObjectOutputStream, Property, LargeValues)
+ * @see #serializeProperty(ObjectOutputStream, Property, LargeValues, ReferenceValues)
*/
public Object[] deserializePropertyValues( ObjectInputStream stream,
Name propertyName,
boolean skip,
LargeValues largeValues,
- LargeValues skippedLargeValues ) throws IOException, ClassNotFoundException {
+ LargeValues skippedLargeValues,
+ ReferenceValues references ) throws IOException, ClassNotFoundException {
assert stream != null;
assert propertyName != null;
+ assert largeValues != null;
+ assert skippedLargeValues != null;
// Read the number of values ...
int size = stream.readInt();
Object[] values = skip ? null : new Object[size];
@@ -565,7 +608,16 @@
case 'R':
// Reference
String refValue = (String)stream.readObject();
- if (!skip) value = valueFactories.getReferenceFactory().create(refValue);
+ Reference ref = valueFactories.getReferenceFactory().create(refValue);
+ if (!skip || references != null) {
+ if (!skip) {
+ value = ref;
+ if (references != null) references.remove(ref);
+ } else {
+ assert references != null;
+ references.read(ref);
+ }
+ }
break;
case 'B':
// Binary
Modified: trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties 2008-12-16 16:19:33 UTC (rev 686)
@@ -31,6 +31,6 @@
unableToReadLargeValue = Unable to read from {0} the large property with hash = {1}
unableToMoveRootNode = Unable to move the root node to another location in {0}
locationShouldHavePathAndOrProperty = The source {0} is unable to find a node without a path or a {1} property
+invalidReferences = One or more references were invalid in {0}
-
basicModelDescription = Database model that stores node properties as opaque records and children as transparent records. Large property values are stored separately.
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -70,6 +70,7 @@
private UUID rootNodeUuid;
private long largeValueSize;
private boolean compressData;
+ private boolean enforceReferentialIntegrity;
private Graph graph;
private String[] validLargeValues;
private int numPropsOnEach;
@@ -81,6 +82,7 @@
rootNodeUuid = UUID.randomUUID();
largeValueSize = 2 ^ 10; // 1 kilobyte
compressData = true;
+ enforceReferentialIntegrity = true;
numPropsOnEach = 0;
// Load in the large value ...
@@ -101,7 +103,8 @@
// Create the connection ...
cachePolicy = mock(CachePolicy.class);
- connection = new JpaConnection("source", cachePolicy, manager, model, rootNodeUuid, largeValueSize, compressData);
+ connection = new JpaConnection("source", cachePolicy, manager, model, rootNodeUuid, largeValueSize, compressData,
+ enforceReferentialIntegrity);
// And create the graph ...
graph = Graph.create(connection, context);
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/ModelTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/ModelTest.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/ModelTest.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -108,7 +108,8 @@
EntityManager entityManager,
UUID rootNodeUuid,
long largeValueMinimumSizeInBytes,
- boolean compressData ) {
+ boolean compressData,
+ boolean enforceReferentialIntegrity ) {
return requestProcessor;
}
}
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -125,7 +125,13 @@
EntityManager manager = mock(EntityManager.class);
EntityTransaction txn = mock(EntityTransaction.class);
stub(manager.getTransaction()).toReturn(txn);
- RequestProcessor proc = model.createRequestProcessor("test source", context, manager, UUID.randomUUID(), 100, false);
+ RequestProcessor proc = model.createRequestProcessor("test source",
+ context,
+ manager,
+ UUID.randomUUID(),
+ 100,
+ false,
+ false);
assertThat(proc, is(notNullValue()));
}
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java 2008-12-12 23:00:10 UTC (rev 685)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java 2008-12-16 16:19:33 UTC (rev 686)
@@ -45,6 +45,7 @@
import org.jboss.dna.common.util.CheckArg;
import org.jboss.dna.common.util.SecureHash;
import org.jboss.dna.common.util.StringUtil;
+import org.jboss.dna.connector.store.jpa.util.Serializer.ReferenceValues;
import org.jboss.dna.graph.BasicExecutionContext;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.properties.Binary;
@@ -66,6 +67,7 @@
private LargeValuesHolder largeValues;
private PropertyFactory propertyFactory;
private ValueFactories valueFactories;
+ private ReferenceValues references;
@Before
public void beforeEach() {
@@ -74,6 +76,7 @@
valueFactories = context.getValueFactories();
serializer = new Serializer(context, false);
largeValues = new LargeValuesHolder();
+ references = Serializer.NO_REFERENCES_VALUES;
}
@Test
@@ -274,7 +277,7 @@
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
- serializer.serializeProperty(oos, property, largeValues);
+ serializer.serializeProperty(oos, property, largeValues, references);
} finally {
oos.close();
}
@@ -301,7 +304,7 @@
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
- serializer.serializeProperties(oos, propertyList.size(), propertyList, largeValues);
+ serializer.serializeProperties(oos, propertyList.size(), propertyList, largeValues, references);
} finally {
oos.close();
}
@@ -344,7 +347,7 @@
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
- serializer.serializeProperties(oos, initialProps.size(), initialProps, largeValues);
+ serializer.serializeProperties(oos, initialProps.size(), initialProps, largeValues, references);
} finally {
oos.close();
}
@@ -357,7 +360,7 @@
baos = new ByteArrayOutputStream();
oos = new ObjectOutputStream(baos);
try {
- serializer.reserializeProperties(ois, oos, updatedProps, largeValues, removedLargeValues);
+ serializer.reserializeProperties(ois, oos, updatedProps, largeValues, removedLargeValues, references);
} finally {
oos.close();
ois.close();
15 years, 3 months
DNA SVN: r685 - in trunk: dna-graph/src/main/java/org/jboss/dna/graph/requests and 4 other directories.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2008-12-12 18:00:10 -0500 (Fri, 12 Dec 2008)
New Revision: 685
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/ReadBlockOfChildrenRequest.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/ReadNextBlockOfChildrenRequest.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java
trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildEntity.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
Log:
DNA-40 and DNA-265
Changed the Graph API to improve the methods of getting blocks of children, including adding a new technique for getting the 'n' children that follow a previously-returned sibling. This did change the API, but these were methods that were not used except in test cases.
Also implemented in the JPA Store connector the process methods for ReadBlockOfChildrenRequest and ReadNextBlockOfChildrenRequest, utilizing efficient database operations for these methods to efficiently find and return the block of children.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-12 00:05:32 UTC (rev 684)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-12 23:00:10 UTC (rev 685)
@@ -56,6 +56,7 @@
import org.jboss.dna.graph.requests.ReadAllPropertiesRequest;
import org.jboss.dna.graph.requests.ReadBlockOfChildrenRequest;
import org.jboss.dna.graph.requests.ReadBranchRequest;
+import org.jboss.dna.graph.requests.ReadNextBlockOfChildrenRequest;
import org.jboss.dna.graph.requests.ReadNodeRequest;
import org.jboss.dna.graph.requests.ReadPropertyRequest;
import org.jboss.dna.graph.requests.RemovePropertiesRequest;
@@ -877,12 +878,13 @@
/**
* Request that the children be read on the node defined via the <code>of(...)</code> method on the returned {@link Of}
- * object. Once the location is specified, the {@link List list of children} are read and then returned.
+ * object. The returned object is used to supply the remaining information, including either the {@link Children#of(Location)
+ * location of the parent}, or that a subset of the children should be retrieved {@link Children#inBlockOf(int) in a block}.
*
- * @return the object that is used to specified the node whose children are to be read, and which will return the children
+ * @return the object that is used to specify the remaining inputs for the request, and which will return the children
*/
- public Of<List<Location>> getChildren() {
- return new Of<List<Location>>() {
+ public Children<List<Location>> getChildren() {
+ return new Children<List<Location>>() {
public List<Location> of( String path ) {
return of(new Location(createPath(path)));
}
@@ -909,64 +911,68 @@
queue().submit(request);
return request.getChildren();
}
- };
- }
- /**
- * Request that the children in the specified index range be read on the node defined via the <code>of(...)</code> method on
- * the returned {@link Of} object. Once the location is specified, the {@link List list of children} are read and then
- * returned.
- *
- * @param startingIndex the index of the first child to be read
- * @param endingIndex the index past the last the first child to be read
- * @return the object that is used to specified the node whose children are to be read, and which will return the children
- */
- public Of<List<Location>> getChildrenInRange( final int startingIndex,
- final int endingIndex ) {
- CheckArg.isNonNegative(startingIndex, "startingIndex");
- CheckArg.isPositive(endingIndex, "endingIndex");
- int count = endingIndex - startingIndex;
- return getChildrenInBlock(startingIndex, count);
- }
+ public BlockOfChildren<List<Location>> inBlockOf( final int blockSize ) {
+ return new BlockOfChildren<List<Location>>() {
+ public Under<List<Location>> startingAt( final int startingIndex ) {
+ return new Under<List<Location>>() {
+ public List<Location> under( String path ) {
+ return under(new Location(createPath(path)));
+ }
- /**
- * Request that the children in the specified block be read on the node defined via the <code>of(...)</code> method on the
- * returned {@link Of} object. Once the location is specified, the {@link List list of children} are read and then returned.
- *
- * @param startingIndex the index of the first child to be read
- * @param blockSize the maximum number of children that should be read
- * @return the object that is used to specified the node whose children are to be read, and which will return the children
- */
- public Of<List<Location>> getChildrenInBlock( final int startingIndex,
- final int blockSize ) {
- CheckArg.isNonNegative(startingIndex, "startingIndex");
- CheckArg.isPositive(blockSize, "blockSize");
- return new Of<List<Location>>() {
- public List<Location> of( String path ) {
- return of(new Location(createPath(path)));
- }
+ public List<Location> under( Path path ) {
+ return under(new Location(path));
+ }
- public List<Location> of( Path path ) {
- return of(new Location(path));
- }
+ public List<Location> under( Property idProperty ) {
+ return under(new Location(idProperty));
+ }
- public List<Location> of( Property idProperty ) {
- return of(new Location(idProperty));
- }
+ public List<Location> under( Property firstIdProperty,
+ Property... additionalIdProperties ) {
+ return under(new Location(firstIdProperty, additionalIdProperties));
+ }
- public List<Location> of( Property firstIdProperty,
- Property... additionalIdProperties ) {
- return of(new Location(firstIdProperty, additionalIdProperties));
- }
+ public List<Location> under( UUID uuid ) {
+ return under(new Location(uuid));
+ }
- public List<Location> of( UUID uuid ) {
- return of(new Location(uuid));
- }
+ public List<Location> under( Location at ) {
+ ReadBlockOfChildrenRequest request = new ReadBlockOfChildrenRequest(at, startingIndex, blockSize);
+ queue().submit(request);
+ return request.getChildren();
+ }
+ };
+ }
- public List<Location> of( Location at ) {
- ReadBlockOfChildrenRequest request = new ReadBlockOfChildrenRequest(at, startingIndex, blockSize);
- queue().submit(request);
- return request.getChildren();
+ public List<Location> startingAfter( final Location previousSibling ) {
+ ReadNextBlockOfChildrenRequest request = new ReadNextBlockOfChildrenRequest(previousSibling, blockSize);
+ queue().submit(request);
+ return request.getChildren();
+ }
+
+ public List<Location> startingAfter( String pathOfPreviousSibling ) {
+ return startingAfter(new Location(createPath(pathOfPreviousSibling)));
+ }
+
+ public List<Location> startingAfter( Path pathOfPreviousSibling ) {
+ return startingAfter(new Location(pathOfPreviousSibling));
+ }
+
+ public List<Location> startingAfter( UUID uuidOfPreviousSibling ) {
+ return startingAfter(new Location(uuidOfPreviousSibling));
+ }
+
+ public List<Location> startingAfter( Property idPropertyOfPreviousSibling ) {
+ return startingAfter(new Location(idPropertyOfPreviousSibling));
+ }
+
+ public List<Location> startingAfter( Property firstIdProperyOfPreviousSibling,
+ Property... additionalIdPropertiesOfPreviousSibling ) {
+ return startingAfter(new Location(firstIdProperyOfPreviousSibling,
+ additionalIdPropertiesOfPreviousSibling));
+ }
+ };
}
};
}
@@ -2696,6 +2702,166 @@
}
/**
+ * A component used to supply the details for getting children of another node. If all of the children are to be obtained,
+ * then the parent can be specified using one of the <code>of(...)</code> methods on this component. If, however, only some of
+ * the nodes are to be returned (e.g., a "block" of children), then specify the {@link #inBlockOf(int) block size} followed by
+ * the {@link BlockOfChildren block size and parent}.
+ *
+ * @param <Next>
+ * @author Randall Hauch
+ */
+ public interface Children<Next> extends Of<Next> {
+ /**
+ * Specify that a block of children are to be retreived, and in particular the number of children that are to be returned.
+ *
+ * @param blockSize the number of children that are to be retrieved in the block; must be positive
+ * @return the interface used to specify the starting point for the block and the parent
+ */
+ BlockOfChildren<Next> inBlockOf( int blockSize );
+ }
+
+ /**
+ * A component used to specify a block of children starting either {@link #startingAt(int) at a particular index} or
+ * {@link #startingAfter(Location) after a previous sibling}.
+ *
+ * @param <Next>
+ * @author Randall Hauch
+ */
+ public interface BlockOfChildren<Next> {
+ /**
+ * Specify the block of children is to start at the supplied index.
+ *
+ * @param startingIndex the zero-based index of the first child to be returned in the block
+ * @return interface used to specify the parent of the children; never null
+ */
+ Under<Next> startingAt( int startingIndex );
+
+ /**
+ * Specify the block of children is to start with the child immediately following the supplied node. This method is
+ * typically used when a previous block of children has already been retrieved and this request is retrieving the next
+ * block.
+ *
+ * @param previousSibling the location of the sibling node that is before the first node in the block
+ * @return the children; never null
+ */
+ Next startingAfter( Location previousSibling );
+
+ /**
+ * Specify the block of children is to start with the child immediately following the supplied node. This method is
+ * typically used when a previous block of children has already been retrieved and this request is retrieving the next
+ * block.
+ *
+ * @param pathToPreviousSiblingName the path of the sibling node that is before the first node in the block
+ * @return the children; never null
+ */
+ Next startingAfter( String pathToPreviousSiblingName );
+
+ /**
+ * Specify the block of children is to start with the child immediately following the supplied node. This method is
+ * typically used when a previous block of children has already been retrieved and this request is retrieving the next
+ * block.
+ *
+ * @param previousSibling the path of the sibling node that is before the first node in the block
+ * @return the children; never null
+ */
+ Next startingAfter( Path previousSibling );
+
+ /**
+ * Specify the block of children is to start with the child immediately following the supplied node. This method is
+ * typically used when a previous block of children has already been retrieved and this request is retrieving the next
+ * block.
+ *
+ * @param previousSiblingUuid the UUID of the sibling node that is before the first node in the block
+ * @return the children; never null
+ */
+ Next startingAfter( UUID previousSiblingUuid );
+
+ /**
+ * Specify the block of children is to start with the child immediately following the supplied node. This method is
+ * typically used when a previous block of children has already been retrieved and this request is retrieving the next
+ * block.
+ *
+ * @param idPropertyOfPreviousSibling the property that uniquely identifies the previous sibling
+ * @return the children; never null
+ */
+ Next startingAfter( Property idPropertyOfPreviousSibling );
+
+ /**
+ * Specify the block of children is to start with the child immediately following the supplied node. This method is
+ * typically used when a previous block of children has already been retrieved and this request is retrieving the next
+ * block.
+ *
+ * @param firstIdPropertyOfPreviousSibling the first property that, with the <code>additionalIdProperties</code>, uniquely
+ * identifies the previous sibling
+ * @param additionalIdPropertiesOfPreviousSibling the additional properties that, with the
+ * <code>additionalIdProperties</code>, uniquely identifies the previous sibling
+ * @return the children; never null
+ */
+ Next startingAfter( Property firstIdPropertyOfPreviousSibling,
+ Property... additionalIdPropertiesOfPreviousSibling );
+ }
+
+ /**
+ * The interface for defining the node under which which a request operates.
+ *
+ * @param <Next> The interface that is to be returned when the request is completed
+ * @author Randall Hauch
+ */
+ public interface Under<Next> {
+ /**
+ * Specify the location of the node under which the request is to operate.
+ *
+ * @param to the location of the new parent
+ * @return the interface for additional requests or actions
+ */
+ Next under( Location to );
+
+ /**
+ * Specify the path of the node under which the request is to operate.
+ *
+ * @param toPath the path of the new parent
+ * @return the interface for additional requests or actions
+ */
+ Next under( String toPath );
+
+ /**
+ * Specify the path of the node under which the request is to operate.
+ *
+ * @param to the path of the new parent
+ * @return the interface for additional requests or actions
+ */
+ Next under( Path to );
+
+ /**
+ * Specify the UUID of the node under which the request is to operate.
+ *
+ * @param to the UUID of the new parent
+ * @return the interface for additional requests or actions
+ */
+ Next under( UUID to );
+
+ /**
+ * Specify the unique identification property that identifies the node under which the request is to operate.
+ *
+ * @param idProperty the property that uniquely identifies the new parent
+ * @return the interface for additional requests or actions
+ */
+ Next under( Property idProperty );
+
+ /**
+ * Specify the unique identification properties that identify the node under which the request is to operate.
+ *
+ * @param firstIdProperty the first property that, with the <code>additionalIdProperties</code>, uniquely identifies the
+ * new parent
+ * @param additionalIdProperties the additional properties that, with the <code>additionalIdProperties</code>, uniquely
+ * identifies the new parent
+ * @return the interface for additional requests or actions
+ */
+ Next under( Property firstIdProperty,
+ Property... additionalIdProperties );
+ }
+
+ /**
* A component used to set the values on a property.
*
* @param <Next>
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/ReadBlockOfChildrenRequest.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/ReadBlockOfChildrenRequest.java 2008-12-12 00:05:32 UTC (rev 684)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/ReadBlockOfChildrenRequest.java 2008-12-12 23:00:10 UTC (rev 685)
@@ -58,7 +58,7 @@
* an array.
*
* @param of the location of the node whose children are to be read
- * @param startingIndex the index of the first child to be included in the block
+ * @param startingIndex the zero-based index of the first child to be included in the block
* @param count the maximum number of children that should be included in the block
* @throws IllegalArgumentException if the location is null, if <code>startingIndex</code> is negative, or if
* <code>count</count> is less than 1.
@@ -108,7 +108,8 @@
* Get the starting index of the block, which is the index of the first child to include. This index corresponds to the index
* of all children in the list, not the {@link Path.Segment#getIndex() same-name-sibiling index}.
*
- * @return the child index at which this block starts; never negative and always less than {@link #endingBefore()}
+ * @return the (zero-based) child index at which this block starts; never negative and always less than
+ * {@link #endingBefore()}
* @see #endingBefore()
* @see #count()
*/
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/ReadNextBlockOfChildrenRequest.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/ReadNextBlockOfChildrenRequest.java 2008-12-12 00:05:32 UTC (rev 684)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/ReadNextBlockOfChildrenRequest.java 2008-12-12 23:00:10 UTC (rev 685)
@@ -33,7 +33,8 @@
/**
* Instruction to read a block of the children of a node, where the block is dictated by the {@link #startingAfter location of the
* child preceding the block} and the {@link #count() maximum number of children} to include in the block. This command is useful
- * when paging through a large number of children.
+ * when paging through a large number of children, when the previous block of children was already retrieved and the next block is
+ * to be read.
*
* @see ReadBlockOfChildrenRequest
* @author Randall Hauch
@@ -44,31 +45,23 @@
private static final long serialVersionUID = 1L;
- private final Location of;
private final List<Location> children = new LinkedList<Location>();
private final Location startingAfter;
private final int count;
- private Location actualLocation;
+ private Location actualStartingAfter;
/**
- * Create a request to read a block of the children of a node at the supplied location. The block is defined by the starting
- * index of the first child and the number of children to include. Note that this index is <i>not</i> the
- * {@link Path.Segment#getIndex() same-name-sibiling index}, but rather is the index of the child as if the children were in
- * an array.
+ * Create a request to read those children of a node that are immediately after a supplied sibling node.
*
- * @param of the location of the node whose children are to be read
- * @param startingAfter the child that was the last child of the previous block of children read
+ * @param startingAfter the location of the previous sibling that was the last child of the previous block of children read
* @param count the maximum number of children that should be included in the block
* @throws IllegalArgumentException if the location is null, if <code>startingAfter</code> is null, or if
* <code>count</count> is less than 1.
*/
- public ReadNextBlockOfChildrenRequest( Location of,
- Location startingAfter,
+ public ReadNextBlockOfChildrenRequest( Location startingAfter,
int count ) {
- CheckArg.isNotNull(of, "of");
CheckArg.isNotNull(startingAfter, "startingAfter");
CheckArg.isPositive(count, "count");
- this.of = of;
this.startingAfter = startingAfter;
this.count = count;
}
@@ -84,15 +77,6 @@
}
/**
- * Get the location defining the node whose children are to be read.
- *
- * @return the location of the parent node; never null
- */
- public Location of() {
- return of;
- }
-
- /**
* Get the maximum number of children that may be returned in the block.
*
* @return the block's maximum count
@@ -176,28 +160,29 @@
* Sets the actual and complete location of the node whose children have been read. This method must be called when processing
* the request, and the actual location must have a {@link Location#getPath() path}.
*
- * @param actual the actual location of the node being read, or null if the {@link #of() current location} should be used
+ * @param actual the actual location of the node being read, or null if the {@link #startingAfter() starting after location}
+ * should be used
* @throws IllegalArgumentException if the actual location does not represent the {@link Location#isSame(Location) same
- * location} as the {@link #of() current location}, or if the actual location does not have a path.
+ * location} as the {@link #startingAfter() starting after location}, or if the actual location does not have a path.
*/
- public void setActualLocationOfNode( Location actual ) {
- if (!of.isSame(actual)) { // not same if actual is null
- throw new IllegalArgumentException(GraphI18n.actualLocationIsNotSameAsInputLocation.text(actual, of));
+ public void setActualLocationOfStartingAfterNode( Location actual ) {
+ if (!startingAfter.isSame(actual)) { // not same if actual is null
+ throw new IllegalArgumentException(GraphI18n.actualLocationIsNotSameAsInputLocation.text(actual, startingAfter));
}
assert actual != null;
if (!actual.hasPath()) {
throw new IllegalArgumentException(GraphI18n.actualLocationMustHavePath.text(actual));
}
- this.actualLocation = actual;
+ this.actualStartingAfter = actual;
}
/**
- * Get the actual location of the node whose children were read.
+ * Get the actual location of the {@link #startingAfter() starting after} sibling.
*
* @return the actual location, or null if the actual location was not set
*/
- public Location getActualLocationOfNode() {
- return actualLocation;
+ public Location getActualLocationOfStartingAfterNode() {
+ return actualStartingAfter;
}
/**
@@ -209,7 +194,6 @@
public boolean equals( Object obj ) {
if (this.getClass().isInstance(obj)) {
ReadNextBlockOfChildrenRequest that = (ReadNextBlockOfChildrenRequest)obj;
- if (!this.of().equals(that.of())) return false;
if (!this.startingAfter().equals(that.startingAfter())) return false;
if (this.count() != that.count()) return false;
return true;
@@ -225,9 +209,9 @@
@Override
public String toString() {
if (count() == 1) {
- return "read one child of " + of() + " starting after " + startingAfter();
+ return "read the next child after " + startingAfter();
}
- return "read " + count() + " children of " + of();
+ return "read the next " + count() + " children after " + startingAfter();
}
}
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java 2008-12-12 00:05:32 UTC (rev 684)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/processor/RequestProcessor.java 2008-12-12 23:00:10 UTC (rev 685)
@@ -283,8 +283,22 @@
*/
public void process( ReadNextBlockOfChildrenRequest request ) {
if (request == null) return;
+
+ // Get the parent path ...
+ Path path = request.startingAfter().getPath();
+ Location actualSiblingLocation = request.startingAfter();
+ Path parentPath = null;
+ if (path != null) parentPath = path.getParent();
+ if (parentPath == null) {
+ ReadAllPropertiesRequest readPropertiesOfSibling = new ReadAllPropertiesRequest(request.startingAfter());
+ process(readPropertiesOfSibling);
+ actualSiblingLocation = readPropertiesOfSibling.getActualLocationOfNode();
+ parentPath = actualSiblingLocation.getPath().getParent();
+ }
+ assert parentPath != null;
+
// Convert the request to a ReadAllChildrenRequest and execute it ...
- ReadAllChildrenRequest readAll = new ReadAllChildrenRequest(request.of());
+ ReadAllChildrenRequest readAll = new ReadAllChildrenRequest(new Location(parentPath));
process(readAll);
if (readAll.hasError()) {
request.setError(readAll.getError());
@@ -308,7 +322,7 @@
}
// Set the actual location ...
- request.setActualLocationOfNode(readAll.getActualLocationOfNode());
+ request.setActualLocationOfStartingAfterNode(actualSiblingLocation);
}
/**
Modified: trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java
===================================================================
--- trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java 2008-12-12 00:05:32 UTC (rev 684)
+++ trunk/dna-graph/src/test/java/org/jboss/dna/graph/GraphTest.java 2008-12-12 23:00:10 UTC (rev 685)
@@ -56,6 +56,7 @@
import org.jboss.dna.graph.requests.ReadAllChildrenRequest;
import org.jboss.dna.graph.requests.ReadAllPropertiesRequest;
import org.jboss.dna.graph.requests.ReadBlockOfChildrenRequest;
+import org.jboss.dna.graph.requests.ReadNextBlockOfChildrenRequest;
import org.jboss.dna.graph.requests.ReadNodeRequest;
import org.jboss.dna.graph.requests.ReadPropertyRequest;
import org.jboss.dna.graph.requests.Request;
@@ -214,11 +215,15 @@
assertThat(read.getChildren(), hasItems(children));
}
- protected void assertNextRequestReadRangeOfChildren( Location at,
- int startIndex,
- int endIndex,
- Location... children ) {
- assertNextRequestReadBlockOfChildren(at, startIndex, endIndex - startIndex, children);
+ protected void assertNextRequestReadNextBlockOfChildren( Location previousSibling,
+ int maxCount,
+ Location... children ) {
+ Request request = executedRequests.poll();
+ assertThat(request, is(instanceOf(ReadNextBlockOfChildrenRequest.class)));
+ ReadNextBlockOfChildrenRequest read = (ReadNextBlockOfChildrenRequest)request;
+ assertThat(read.startingAfter(), is(previousSibling));
+ assertThat(read.count(), is(maxCount));
+ assertThat(read.getChildren(), hasItems(children));
}
protected void assertNextRequestReadNode( Location at ) {
@@ -418,36 +423,36 @@
}
@Test
- public void shouldGetChildrenInBlockOnNode() {
+ public void shouldGetChildrenInBlockAtStartingIndex() {
Location child1 = new Location(createPath(validPath, "x"));
Location child2 = new Location(createPath(validPath, "y"));
Location child3 = new Location(createPath(validPath, "z"));
setChildrenToReadOn(new Location(validPath), child1, child2, child3);
- List<Location> children = graph.getChildrenInBlock(0, 2).of(validPath);
+ List<Location> children = graph.getChildren().inBlockOf(2).startingAt(0).under(validPath);
assertThat(numberOfExecutions, is(1));
assertNextRequestReadBlockOfChildren(new Location(validPath), 0, 2, child1, child2);
assertNoMoreRequests();
assertThat(children, hasItems(child1, child2));
- children = graph.getChildrenInBlock(1, 2).of(validPath);
+ children = graph.getChildren().inBlockOf(2).startingAt(1).under(validPath);
assertThat(numberOfExecutions, is(1));
assertNextRequestReadBlockOfChildren(new Location(validPath), 1, 2, child2, child3);
assertNoMoreRequests();
assertThat(children, hasItems(child2, child3));
- children = graph.getChildrenInBlock(2, 2).of(validPath);
+ children = graph.getChildren().inBlockOf(2).startingAt(2).under(validPath);
assertThat(numberOfExecutions, is(1));
assertNextRequestReadBlockOfChildren(new Location(validPath), 2, 2, child3);
assertNoMoreRequests();
assertThat(children, hasItems(child3));
- children = graph.getChildrenInBlock(20, 2).of(validPath);
+ children = graph.getChildren().inBlockOf(2).startingAt(20).under(validPath);
assertThat(numberOfExecutions, is(1));
assertNextRequestReadBlockOfChildren(new Location(validPath), 20, 2);
assertNoMoreRequests();
assertThat(children.isEmpty(), is(true));
- children = graph.getChildrenInBlock(0, 20).of(validPath);
+ children = graph.getChildren().inBlockOf(20).startingAt(0).under(validPath);
assertThat(numberOfExecutions, is(1));
assertNextRequestReadBlockOfChildren(new Location(validPath), 0, 20, child1, child2, child3);
assertNoMoreRequests();
@@ -455,40 +460,32 @@
}
@Test
- public void shouldGetChildrenInRangeOnNode() {
- Location child1 = new Location(createPath(validPath, "x"));
- Location child2 = new Location(createPath(validPath, "y"));
- Location child3 = new Location(createPath(validPath, "z"));
+ public void shouldGetChildrenInBlockAfterPreviousSibling() {
+ Path pathX = createPath(validPath, "x");
+ Path pathY = createPath(validPath, "y");
+ Path pathZ = createPath(validPath, "z");
+ Location child1 = new Location(pathX);
+ Location child2 = new Location(pathY);
+ Location child3 = new Location(pathZ);
setChildrenToReadOn(new Location(validPath), child1, child2, child3);
- List<Location> children = graph.getChildrenInRange(0, 2).of(validPath);
- assertThat(numberOfExecutions, is(1));
- assertNextRequestReadRangeOfChildren(new Location(validPath), 0, 2, child1, child2);
- assertNoMoreRequests();
- assertThat(children, hasItems(child1, child2));
- children = graph.getChildrenInRange(1, 3).of(validPath);
+ List<Location> children = graph.getChildren().inBlockOf(2).startingAfter(pathX);
assertThat(numberOfExecutions, is(1));
- assertNextRequestReadRangeOfChildren(new Location(validPath), 1, 3, child2, child3);
+ assertNextRequestReadNextBlockOfChildren(new Location(pathX), 2, child2, child3);
assertNoMoreRequests();
assertThat(children, hasItems(child2, child3));
- children = graph.getChildrenInRange(2, 4).of(validPath);
+ children = graph.getChildren().inBlockOf(3).startingAfter(pathX);
assertThat(numberOfExecutions, is(1));
- assertNextRequestReadRangeOfChildren(new Location(validPath), 2, 4, child3);
+ assertNextRequestReadNextBlockOfChildren(new Location(pathX), 3, child2, child3);
assertNoMoreRequests();
- assertThat(children, hasItems(child3));
+ assertThat(children, hasItems(child2, child3));
- children = graph.getChildrenInRange(20, 21).of(validPath);
+ children = graph.getChildren().inBlockOf(2).startingAfter(pathY);
assertThat(numberOfExecutions, is(1));
- assertNextRequestReadRangeOfChildren(new Location(validPath), 20, 21);
+ assertNextRequestReadNextBlockOfChildren(new Location(pathY), 2, child3);
assertNoMoreRequests();
- assertThat(children.isEmpty(), is(true));
-
- children = graph.getChildrenInRange(0, 20).of(validPath);
- assertThat(numberOfExecutions, is(1));
- assertNextRequestReadRangeOfChildren(new Location(validPath), 0, 20, child1, child2, child3);
- assertNoMoreRequests();
- assertThat(children, hasItems(child1, child2, child3));
+ assertThat(children, hasItems(child3));
}
@Test
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-12 00:05:32 UTC (rev 684)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-12 23:00:10 UTC (rev 685)
@@ -77,7 +77,9 @@
import org.jboss.dna.graph.requests.MoveBranchRequest;
import org.jboss.dna.graph.requests.ReadAllChildrenRequest;
import org.jboss.dna.graph.requests.ReadAllPropertiesRequest;
+import org.jboss.dna.graph.requests.ReadBlockOfChildrenRequest;
import org.jboss.dna.graph.requests.ReadBranchRequest;
+import org.jboss.dna.graph.requests.ReadNextBlockOfChildrenRequest;
import org.jboss.dna.graph.requests.ReadNodeRequest;
import org.jboss.dna.graph.requests.ReadPropertyRequest;
import org.jboss.dna.graph.requests.UpdatePropertiesRequest;
@@ -172,8 +174,8 @@
assert ns != null;
// Figure out the next SNS index and index-in-parent for this new child ...
- int nextSnsIndex = 1;
- int nextIndexInParent = 1;
+ int nextSnsIndex = 1; // SNS index is 1-based
+ int nextIndexInParent = 0; // index-in-parent is 0-based
final Path parentPath = actual.location.getPath();
assert parentPath != null;
// Look in the cache for the children of the parent node.
@@ -181,7 +183,7 @@
if (childrenOfParent != null) {
// The cache had the complete list of children for the parent node, which means
// we know about all of the children and can walk the children to figure out the next indexes.
- nextIndexInParent = childrenOfParent.size() + 1;
+ nextIndexInParent = childrenOfParent.size();
if (nextIndexInParent > 1) {
// Since we want the last indexes, process the list backwards ...
ListIterator<Location> iter = childrenOfParent.listIterator(childrenOfParent.size());
@@ -205,7 +207,7 @@
query.setParameter("childName", childLocalName);
try {
Integer result = (Integer)query.getSingleResult();
- nextSnsIndex = result != null ? result + 1 : 1;
+ nextSnsIndex = result != null ? result + 1 : 1; // SNS index is 1-based
} catch (NoResultException e) {
}
@@ -214,7 +216,7 @@
query.setParameter("parentUuid", parentUuidString);
try {
Integer result = (Integer)query.getSingleResult();
- nextIndexInParent = result != null ? result + 1 : 1;
+ nextIndexInParent = result != null ? result + 1 : 0; // index-in-parent is 0-based
} catch (NoResultException e) {
}
}
@@ -374,6 +376,158 @@
/**
* {@inheritDoc}
*
+ * @see org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.ReadBlockOfChildrenRequest)
+ */
+ @Override
+ public void process( ReadBlockOfChildrenRequest request ) {
+ logger.trace(request.toString());
+ Location actualLocation = null;
+ final int startingIndex = request.startingAtIndex();
+ try {
+ Location parentLocation = request.of();
+ ActualLocation actualParent = getActualLocation(parentLocation);
+ actualLocation = actualParent.location;
+
+ Path parentPath = actualParent.location.getPath();
+ assert parentPath != null;
+ LinkedList<Location> cachedChildren = cache.getAllChildren(parentPath);
+ if (cachedChildren != null) {
+ // The cache has all of the children for the node ...
+ if (startingIndex < cachedChildren.size()) {
+ ListIterator<Location> iter = cachedChildren.listIterator(startingIndex);
+ for (int i = 0; i != request.count() && iter.hasNext(); ++i) {
+ Location child = iter.next();
+ request.addChild(child);
+ }
+ }
+ } else {
+ // Nothing was cached, so we need to search the database for the children ...
+ Query query = entities.createNamedQuery("ChildEntity.findRangeUnderParent");
+ query.setParameter("parentUuidString", actualParent.uuid);
+ query.setParameter("firstIndex", startingIndex);
+ query.setParameter("afterIndex", startingIndex + request.count());
+ @SuppressWarnings( "unchecked" )
+ List<ChildEntity> children = query.getResultList();
+ for (ChildEntity child : children) {
+ String namespaceUri = child.getChildNamespace().getUri();
+ String localName = child.getChildName();
+ Name childName = nameFactory.create(namespaceUri, localName);
+ int sns = child.getSameNameSiblingIndex();
+ Path childPath = pathFactory.create(parentPath, childName, sns);
+ String childUuidString = child.getId().getChildUuidString();
+ Location childLocation = new Location(childPath, UUID.fromString(childUuidString));
+ request.addChild(childLocation);
+ }
+ // Do not update the cache, since we don't know all of the children.
+ }
+
+ } catch (NoResultException e) {
+ // there are no properties (probably not expected, but still okay) ...
+ } catch (Throwable e) { // Includes PathNotFoundException
+ request.setError(e);
+ return;
+ }
+ if (actualLocation != null) request.setActualLocationOfNode(actualLocation);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.ReadNextBlockOfChildrenRequest)
+ */
+ @Override
+ public void process( ReadNextBlockOfChildrenRequest request ) {
+ logger.trace(request.toString());
+ Location actualLocation = null;
+ final Location previousSibling = request.startingAfter();
+ final int count = request.count();
+ try {
+ ActualLocation actualSibling = getActualLocation(previousSibling);
+ actualLocation = actualSibling.location;
+ if (!actualLocation.getPath().isRoot()) {
+ // First look in the cache for the children of the parent ...
+ Path parentPath = actualSibling.location.getPath().getParent();
+ assert parentPath != null;
+ LinkedList<Location> cachedChildren = cache.getAllChildren(parentPath);
+ if (cachedChildren != null) {
+ // The cache has all of the children for the node.
+ // First find the location of the previous sibling ...
+ boolean accumulate = false;
+ int counter = 0;
+ for (Location child : cachedChildren) {
+ if (accumulate) {
+ // We're accumulating children ...
+ request.addChild(child);
+ ++counter;
+ if (counter <= count) continue;
+ break;
+ }
+ // Haven't found the previous sibling yet ...
+ if (child.isSame(previousSibling)) {
+ accumulate = true;
+ }
+ }
+ } else {
+ // The children were not found in the cache, so we have to search the database.
+ // We don't know the UUID of the parent, so find the previous sibling and
+ // then get the starting index and the parent UUID ...
+ ChildEntity previousChild = actualSibling.childEntity;
+ if (previousChild == null) {
+ Query query = entities.createNamedQuery("ChildEntity.findByChildUuid");
+ query.setParameter("childUuidString", actualSibling.uuid);
+ previousChild = (ChildEntity)query.getSingleResult();
+ }
+ int startingIndex = previousChild.getIndexInParent() + 1;
+ String parentUuid = previousChild.getId().getParentUuidString();
+
+ // Now search the database for the children ...
+ Query query = entities.createNamedQuery("ChildEntity.findRangeUnderParent");
+ query.setParameter("parentUuidString", parentUuid);
+ query.setParameter("firstIndex", startingIndex);
+ query.setParameter("afterIndex", startingIndex + request.count());
+ @SuppressWarnings( "unchecked" )
+ List<ChildEntity> children = query.getResultList();
+ LinkedList<Location> allChildren = null;
+ if (startingIndex == 1 && children.size() < request.count()) {
+ // The previous child was the first sibling, and we got fewer children than
+ // the max count. This means we know all of the children, so accumulate the locations
+ // so they can be cached ...
+ allChildren = new LinkedList<Location>();
+ allChildren.add(actualSibling.location);
+ }
+ for (ChildEntity child : children) {
+ String namespaceUri = child.getChildNamespace().getUri();
+ String localName = child.getChildName();
+ Name childName = nameFactory.create(namespaceUri, localName);
+ int sns = child.getSameNameSiblingIndex();
+ Path childPath = pathFactory.create(parentPath, childName, sns);
+ String childUuidString = child.getId().getChildUuidString();
+ Location childLocation = new Location(childPath, UUID.fromString(childUuidString));
+ request.addChild(childLocation);
+ if (allChildren != null) {
+ // We're going to cache the results, so add this child ...
+ allChildren.add(childLocation);
+ }
+ }
+
+ if (allChildren != null) {
+ cache.setAllChildren(parentPath, allChildren);
+ }
+ }
+ }
+
+ } catch (NoResultException e) {
+ // there are no properties (probably not expected, but still okay) ...
+ } catch (Throwable e) { // Includes PathNotFoundException
+ request.setError(e);
+ return;
+ }
+ if (actualLocation != null) request.setActualLocationOfStartingAfterNode(actualLocation);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
* @see org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.ReadAllPropertiesRequest)
*/
@Override
@@ -882,12 +1036,14 @@
String nodeUuidString = uuidString;
LinkedList<Path.Segment> segments = new LinkedList<Path.Segment>();
ChildEntity entity = null;
+ ChildEntity originalEntity = null;
while (uuidString != null && !uuidString.equals(this.rootNodeUuidString)) {
Query query = entities.createNamedQuery("ChildEntity.findByChildUuid");
query.setParameter("childUuidString", uuidString);
try {
// Find the parent of the UUID ...
entity = (ChildEntity)query.getSingleResult();
+ if (originalEntity == null) originalEntity = entity;
String localName = entity.getChildName();
String uri = entity.getChildNamespace().getUri();
int sns = entity.getSameNameSiblingIndex();
@@ -901,7 +1057,7 @@
Path fullPath = pathFactory.createAbsolutePath(segments);
Location newLocation = new Location(fullPath, uuidProperty);
cache.addNewNode(newLocation);
- return new ActualLocation(newLocation, nodeUuidString, entity);
+ return new ActualLocation(newLocation, nodeUuidString, originalEntity);
}
// There is no UUID, so look for a path ...
@@ -917,6 +1073,18 @@
cache.addNewNode(newLocation);
return new ActualLocation(newLocation, rootNodeUuidString, null);
}
+ // See if the parent location is known in the cache ...
+ Location cachedParent = cache.getLocationFor(path.getParent());
+ if (cachedParent != null) {
+ // We know the UUID of the parent, so we can find the child a little faster ...
+ ChildEntity child = findByPathSegment(cachedParent.getUuid().toString(), path.getLastSegment());
+ uuidString = child.getId().getChildUuidString();
+ Location newLocation = original.with(UUID.fromString(uuidString));
+ cache.addNewNode(newLocation);
+ return new ActualLocation(newLocation, uuidString, child);
+ }
+
+ // We couldn't find the parent, so we need to search by path ...
String parentUuid = this.rootNodeUuidString;
ChildEntity child = null;
for (Path.Segment segment : path) {
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildEntity.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildEntity.java 2008-12-12 00:05:32 UTC (rev 684)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildEntity.java 2008-12-12 23:00:10 UTC (rev 685)
@@ -64,6 +64,7 @@
@Id
private ChildId id;
+ /** The zero-based index */
@Column( name = "CHILD_INDEX", nullable = false, unique = false )
private int indexInParent;
@@ -118,7 +119,9 @@
}
/**
- * @return indexInParent
+ * Get the zero-based index of this child within the parent's list of children
+ *
+ * @return the zero-based index of this child
*/
public int getIndexInParent() {
return indexInParent;
Modified: trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-12 00:05:32 UTC (rev 684)
+++ trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-12 23:00:10 UTC (rev 685)
@@ -29,6 +29,7 @@
import static org.jboss.dna.graph.IsNodeWithProperty.hasProperty;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
+import java.util.List;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import javax.persistence.EntityManager;
@@ -58,7 +59,7 @@
*
* @author Randall Hauch
*/
-public abstract class JpaConnectionTest {
+public class JpaConnectionTest {
private ExecutionContext context;
private JpaConnection connection;
@@ -91,11 +92,6 @@
Ejb3Configuration configurator = new Ejb3Configuration();
model.configure(configurator);
configureDatabaseProperties(configurator);
- // configurator.setProperty("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");
- // configurator.setProperty("hibernate.connection.driver_class", "org.hsqldb.jdbcDriver");
- // configurator.setProperty("hibernate.connection.username", "sa");
- // configurator.setProperty("hibernate.connection.password", "");
- // configurator.setProperty("hibernate.connection.url", "jdbc:hsqldb:.");
configurator.setProperty("hibernate.show_sql", "false");
configurator.setProperty("hibernate.format_sql", "true");
configurator.setProperty("hibernate.use_sql_comments", "true");
@@ -111,7 +107,13 @@
graph = Graph.create(connection, context);
}
- protected abstract void configureDatabaseProperties( Ejb3Configuration configurator );
+ protected void configureDatabaseProperties( Ejb3Configuration configurator ) {
+ configurator.setProperty("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");
+ configurator.setProperty("hibernate.connection.driver_class", "org.hsqldb.jdbcDriver");
+ configurator.setProperty("hibernate.connection.username", "sa");
+ configurator.setProperty("hibernate.connection.password", "");
+ configurator.setProperty("hibernate.connection.url", "jdbc:hsqldb:.");
+ }
@After
public void afterEach() throws Exception {
@@ -686,6 +688,94 @@
assertThat(subgraph.getNode("node3"), hasProperty("property3", "The quick brown fox jumped over the moon. What? "));
}
+ @Test
+ public void shouldReadRangeOfChildren() {
+ // Create a shallow tree with many children under one node ...
+ // /
+ // /node1
+ // /node1/node1
+ // /node1/node2
+ // ...
+ // /node1/node10
+ // /node1/secondBranch1
+ // ...
+
+ graph.batch().create("/node1").with("prop1", "value1").and("prop2", "value2").execute();
+ numPropsOnEach = 3;
+ createTree("/node1", 10, 2, numPropsOnEach, null, true, false);
+
+ // Verify that the children were created ...
+ List<Location> allChildren = graph.getChildren().of("/node1");
+ assertThat(allChildren, hasChildren(child("node1"),
+ child("node2"),
+ child("node3"),
+ child("node4"),
+ child("node5"),
+ child("node6"),
+ child("node7"),
+ child("node8"),
+ child("node9"),
+ child("node10")));
+
+ // Now test reading children in various ranges ...
+ List<Location> children = graph.getChildren().inBlockOf(4).startingAt(4).under("/node1");
+ assertThat(children, is(notNullValue()));
+ assertThat(children, hasChildren(child("node5"), child("node6"), child("node7"), child("node8")));
+
+ children = graph.getChildren().inBlockOf(3).startingAt(4).under("/node1");
+ assertThat(children, is(notNullValue()));
+ assertThat(children, hasChildren(child("node5"), child("node6"), child("node7")));
+
+ children = graph.getChildren().inBlockOf(10).startingAt(7).under("/node1");
+ assertThat(children, is(notNullValue()));
+ assertThat(children, hasChildren(child("node8"), child("node9"), child("node10")));
+ }
+
+ @Test
+ public void shouldReadNextBlockOfChildren() {
+ // Create a shallow tree with many children under one node ...
+ // /
+ // /node1
+ // /node1/node1
+ // /node1/node2
+ // ...
+ // /node1/node10
+ // /node1/secondBranch1
+ // ...
+
+ graph.batch().create("/node1").with("prop1", "value1").and("prop2", "value2").execute();
+ numPropsOnEach = 3;
+ createTree("/node1", 10, 2, numPropsOnEach, null, true, false);
+
+ // Verify that the children were created ...
+ List<Location> allChildren = graph.getChildren().of("/node1");
+ assertThat(allChildren, hasChildren(child("node1"),
+ child("node2"),
+ child("node3"),
+ child("node4"),
+ child("node5"),
+ child("node6"),
+ child("node7"),
+ child("node8"),
+ child("node9"),
+ child("node10")));
+
+ // Now test reading children in various ranges ...
+ Location node4 = allChildren.get(3);
+ List<Location> children = graph.getChildren().inBlockOf(4).startingAfter(node4);
+ assertThat(children, is(notNullValue()));
+ assertThat(children, hasChildren(child("node5"), child("node6"), child("node7"), child("node8")));
+
+ children = graph.getChildren().inBlockOf(3).startingAfter(node4);
+ assertThat(children, is(notNullValue()));
+ assertThat(children, hasChildren(child("node5"), child("node6"), child("node7")));
+
+ Location node7 = allChildren.get(6);
+ children = graph.getChildren().inBlockOf(10).startingAfter(node7);
+ assertThat(children, is(notNullValue()));
+ assertThat(children, hasChildren(child("node8"), child("node9"), child("node10")));
+ }
+
protected int createTree( String initialPath,
int numberPerDepth,
int depth,
15 years, 3 months