Author: rhauch
Date: 2008-12-03 17:46:57 -0500 (Wed, 03 Dec 2008)
New Revision: 652
Added:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/LargeValueId.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/NodeId.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryEntity.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum1.txt
trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum2.txt
trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum3.txt
Removed:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/common/NodeId.java
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/basic/BasicProperty.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/UpdatePropertiesRequest.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/LargeValueEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Namespaces.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java
trunk/extensions/dna-connector-store-jpa/src/test/resources/log4j.properties
Log:
DNA-40 Persistant storage for information not stored in other repository sources
Implemented most of the connector behavior (just CopyBranchRequest remains, with a number
of minor tasks). Delete now uses an efficient mechanism for finding subgraphs. The
searches are stores in the database (in a "temporary" area), but this makes
computing the subgraph very efficient (one self-join on the children's table per LEVEL
of subgraph, not per node) and helps to keep most of the information out of memory. The
same mechanism is used for ReadBranchRequest (and will be used for CopyBranchRequest).
A number of methods also use Hibernate bulk HQL statements, which should be faster. (The
subgraph query is just one way in which they're used.)
Note that the "mark and sweep" approach to deleting nodes was abandoned, because
the JCR spec requires that repositories generate delete events for every node that is
deleted, not just for the top node in the subgraph that was deleted.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-02 17:24:06 UTC
(rev 651)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-03 22:46:57 UTC
(rev 652)
@@ -2929,7 +2929,7 @@
*/
@NotThreadSafe
/*package*/class CompositingRequestQueue implements RequestQueue {
- private final List<Request> requests = new LinkedList<Request>();
+ private final LinkedList<Request> requests = new
LinkedList<Request>();
public Graph getGraph() {
return Graph.this;
@@ -2940,6 +2940,19 @@
}
public void submit( Request request ) {
+ if (request instanceof UpdatePropertiesRequest) {
+ // If the previous request was also an update, then maybe they can be
merged ...
+ Request previous = requests.getLast();
+ if (previous instanceof UpdatePropertiesRequest) {
+ // They can be merged if the have the same location ...
+ UpdatePropertiesRequest next = (UpdatePropertiesRequest)request;
+ UpdatePropertiesRequest prev = (UpdatePropertiesRequest)previous;
+ if (next.on().equals(prev.on())) {
+ requests.removeLast();
+ requests.add(prev.mergeWith(next));
+ }
+ }
+ }
this.requests.add(request);
}
@@ -3334,6 +3347,8 @@
}
}
+ protected static final List<Location> NO_CHILDREN = Collections.emptyList();
+
@Immutable
class SubgraphNode implements Node {
private final Location location;
@@ -3346,7 +3361,9 @@
}
public List<Location> getChildren() {
- return request.getChildren(location);
+ List<Location> children = request.getChildren(location);
+ if (children == null) children = NO_CHILDREN;
+ return children;
}
public Graph getGraph() {
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/basic/BasicProperty.java
===================================================================
---
trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/basic/BasicProperty.java 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/basic/BasicProperty.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -121,6 +121,8 @@
sb.append(" = ");
if (isSingle()) {
sb.append(getValues().next());
+ } else if (isEmpty()) {
+ sb.append("null");
} else {
sb.append(Arrays.asList(getValuesAsArray()));
}
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/UpdatePropertiesRequest.java
===================================================================
---
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/UpdatePropertiesRequest.java 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/dna-graph/src/main/java/org/jboss/dna/graph/requests/UpdatePropertiesRequest.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -24,12 +24,15 @@
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
+import java.util.Set;
import org.jboss.dna.common.util.CheckArg;
import org.jboss.dna.graph.GraphI18n;
import org.jboss.dna.graph.Location;
+import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.Property;
/**
@@ -102,6 +105,22 @@
}
/**
+ * Create a request to update the properties on the node at the supplied location.
+ *
+ * @param on the location of the node to be read
+ * @param properties the new properties on the node
+ * @throws IllegalArgumentException if the location is null or if there are no
properties to update
+ */
+ private UpdatePropertiesRequest( Location on,
+ List<Property> properties ) {
+ CheckArg.isNotNull(on, "on");
+ CheckArg.isNotNull(properties, "properties");
+ this.on = on;
+ this.properties = properties;
+ CheckArg.isNotEmpty(this.properties, "properties");
+ }
+
+ /**
* {@inheritDoc}
*
* @see org.jboss.dna.graph.requests.Request#isReadOnly()
@@ -192,4 +211,39 @@
return "update properties on " + on() + " to " +
properties();
}
+ /**
+ * Merge these updates with those in the supplied request, with the supplied changes
overwriting any similar changes on this
+ * node.
+ *
+ * @param other the other updates that are to be merged with these
+ * @return the merged request
+ */
+ public UpdatePropertiesRequest mergeWith( UpdatePropertiesRequest other ) {
+ if (other == null) return this;
+ if (other.properties().size() == 1) {
+ Property newProp = other.properties.get(0);
+ List<Property> newProps = new LinkedList<Property>();
+ for (Property prop : this.properties) {
+ if (!prop.getName().equals(newProp.getName())) {
+ newProps.add(prop);
+ }
+ }
+ newProps.add(newProp);
+ return new UpdatePropertiesRequest(on,
Collections.unmodifiableList(newProps));
+ }
+ Set<Name> otherNames = new HashSet<Name>();
+ for (Property prop : other.properties()) {
+ otherNames.add(prop.getName());
+ }
+ List<Property> newProps = new LinkedList<Property>();
+ for (Property prop : this.properties) {
+ if (!otherNames.contains(prop.getName())) {
+ newProps.add(prop);
+ }
+ }
+ newProps.addAll(other.properties);
+ return new UpdatePropertiesRequest(on, Collections.unmodifiableList(newProps));
+
+ }
+
}
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -27,7 +27,6 @@
import org.jboss.dna.connector.store.jpa.JpaConnectorI18n;
import org.jboss.dna.connector.store.jpa.Model;
import org.jboss.dna.connector.store.jpa.models.common.NamespaceEntity;
-import org.jboss.dna.connector.store.jpa.models.common.NodeId;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.requests.processor.RequestProcessor;
@@ -72,8 +71,11 @@
configurator.addAnnotatedClass(NodeId.class);
configurator.addAnnotatedClass(PropertiesEntity.class);
configurator.addAnnotatedClass(LargeValueEntity.class);
+ configurator.addAnnotatedClass(LargeValueId.class);
configurator.addAnnotatedClass(ChildEntity.class);
configurator.addAnnotatedClass(ChildId.class);
+ configurator.addAnnotatedClass(SubgraphQueryEntity.class);
+ configurator.addAnnotatedClass(SubgraphNodeEntity.class);
// Set the cache information for each persistent class ...
// configurator.setProperty("hibernate.ejb.classcache." +
KidpackNode.class.getName(), "read-write");
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -36,6 +36,7 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.UUID;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
@@ -50,7 +51,7 @@
import org.jboss.dna.common.util.StringUtil;
import org.jboss.dna.connector.store.jpa.JpaConnectorI18n;
import org.jboss.dna.connector.store.jpa.models.common.NamespaceEntity;
-import org.jboss.dna.connector.store.jpa.models.common.NodeId;
+import org.jboss.dna.connector.store.jpa.util.Namespaces;
import org.jboss.dna.connector.store.jpa.util.Serializer;
import org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues;
import org.jboss.dna.graph.DnaLexicon;
@@ -73,6 +74,7 @@
import org.jboss.dna.graph.requests.MoveBranchRequest;
import org.jboss.dna.graph.requests.ReadAllChildrenRequest;
import org.jboss.dna.graph.requests.ReadAllPropertiesRequest;
+import org.jboss.dna.graph.requests.ReadBranchRequest;
import org.jboss.dna.graph.requests.ReadNodeRequest;
import org.jboss.dna.graph.requests.ReadPropertyRequest;
import org.jboss.dna.graph.requests.UpdatePropertiesRequest;
@@ -82,18 +84,18 @@
* @author Randall Hauch
*/
@NotThreadSafe
-public class BasicRequestProcessor extends RequestProcessor implements LargeValues {
+public class BasicRequestProcessor extends RequestProcessor {
- private final EntityManager entities;
- private final ValueFactory<String> stringFactory;
- private final PathFactory pathFactory;
- private final NameFactory nameFactory;
- private final Namespaces namespaces;
- private final UUID rootNodeUuid;
- private final String rootNodeUuidString;
- private final Serializer serializer;
- private final long largeValueMinimumSizeInBytes;
- private final boolean compressData;
+ protected final EntityManager entities;
+ protected final ValueFactory<String> stringFactory;
+ protected final PathFactory pathFactory;
+ protected final NameFactory nameFactory;
+ protected final Namespaces namespaces;
+ protected final UUID rootNodeUuid;
+ protected final String rootNodeUuidString;
+ protected final Serializer serializer;
+ protected final long largeValueMinimumSizeInBytes;
+ protected final boolean compressData;
protected final Logger logger;
/**
@@ -120,9 +122,9 @@
this.namespaces = new Namespaces(entityManager);
this.rootNodeUuid = rootNodeUuid;
this.rootNodeUuidString = this.rootNodeUuid.toString();
- this.serializer = new Serializer(context, true);
this.largeValueMinimumSizeInBytes = largeValueMinimumSizeInBytes;
this.compressData = compressData;
+ this.serializer = new Serializer(context, true);
this.logger = getExecutionContext().getLogger(getClass());
// Start the transaction ...
@@ -148,19 +150,27 @@
// We need to look for an existing UUID property in the request,
// so since we have to iterate through the properties, go ahead an serialize
them right away ...
- childUuidString = createProperties(null, request.properties());
+ String uuidString = null;
+ for (Property property : request.properties()) {
+ if (property.getName().equals(DnaLexicon.UUID)) {
+ uuidString = stringFactory.create(property.getFirstValue());
+ break;
+ }
+ }
+ if (uuidString == null) uuidString = UUID.randomUUID().toString();
+ childUuidString = createProperties(uuidString, request.properties());
// Find or create the namespace for the child ...
Name childName = request.named();
String childNsUri = childName.getNamespaceUri();
- Integer nsId = namespaces.getId(childNsUri, true);
- assert nsId != null;
+ NamespaceEntity ns = namespaces.get(childNsUri, true);
+ assert ns != null;
// Find the largest SNS index in the existing ChildEntity objects with the
same name ...
String childLocalName = childName.getLocalName();
Query query =
entities.createNamedQuery("ChildEntity.findMaximumSnsIndex");
query.setParameter("parentUuid", parentUuidString);
- query.setParameter("ns", nsId);
+ query.setParameter("ns", ns.getId());
query.setParameter("childName", childLocalName);
int nextSnsIndex = 1;
try {
@@ -180,8 +190,6 @@
}
// Create the new ChildEntity ...
- NamespaceEntity ns = entities.find(NamespaceEntity.class, nsId);
- assert ns != null;
ChildId id = new ChildId(parentUuidString, childUuidString);
ChildEntity entity = new ChildEntity(id, nextIndexInParent, ns,
childLocalName, nextSnsIndex);
entities.persist(entity);
@@ -230,11 +238,12 @@
boolean compressed = entity.isCompressed();
Collection<Property> properties = new
LinkedList<Property>();
byte[] data = entity.getData();
+ LargeValueSerializer largeValues = new LargeValueSerializer(entity);
ByteArrayInputStream bais = new ByteArrayInputStream(data);
InputStream is = compressed ? new ZipInputStream(bais) : bais;
ObjectInputStream ois = new ObjectInputStream(is);
try {
- serializer.deserializeAllProperties(ois, properties, this);
+ serializer.deserializeAllProperties(ois, properties, largeValues);
for (Property property : properties) {
request.addProperty(property);
}
@@ -336,11 +345,12 @@
int propertyCount = entity.getPropertyCount();
Collection<Property> properties = new
ArrayList<Property>(propertyCount);
byte[] data = entity.getData();
+ LargeValueSerializer largeValues = new LargeValueSerializer(entity);
ByteArrayInputStream bais = new ByteArrayInputStream(data);
InputStream is = compressed ? new ZipInputStream(bais) : bais;
ObjectInputStream ois = new ObjectInputStream(is);
try {
- serializer.deserializeAllProperties(ois, properties, this);
+ serializer.deserializeAllProperties(ois, properties, largeValues);
for (Property property : properties) {
request.addProperty(property);
}
@@ -395,12 +405,13 @@
int propertyCount = entity.getPropertyCount();
Collection<Property> properties = new
ArrayList<Property>(propertyCount);
byte[] data = entity.getData();
+ LargeValueSerializer largeValues = new LargeValueSerializer(entity);
ByteArrayInputStream bais = new ByteArrayInputStream(data);
InputStream is = compressed ? new ZipInputStream(bais) : bais;
ObjectInputStream ois = new ObjectInputStream(is);
try {
Serializer.LargeValues skippedLargeValues = Serializer.NO_LARGE_VALUES;
- serializer.deserializeSomeProperties(ois, properties, this,
skippedLargeValues, propertyName);
+ serializer.deserializeSomeProperties(ois, properties, largeValues,
skippedLargeValues, propertyName);
for (Property property : properties) {
request.setProperty(property); // should be only one property
}
@@ -436,14 +447,8 @@
PropertiesEntity entity = null;
try {
entity = (PropertiesEntity)query.getSingleResult();
+ final boolean hadLargeValues = !entity.getLargeValues().isEmpty();
- // Determine which large values are referenced ...
- Collection<String> hexKeys = null;
- String largeValueHexKeys = entity.getLargeValueKeys();
- if (largeValueHexKeys != null) {
- hexKeys = createHexValues(largeValueHexKeys);
- }
-
// Prepare the streams so we can deserialize all existing properties and
reserialize the old and updated
// properties ...
boolean compressed = entity.isCompressed();
@@ -454,10 +459,11 @@
OutputStream os = compressed ? new ZipOutputStream(baos) : baos;
ObjectOutputStream oos = new ObjectOutputStream(os);
int numProperties = 0;
- SkippedLargeValues skipped = new SkippedLargeValues();
- RecordingLargeValues largeValues = new RecordingLargeValues();
+ Set<String> largeValueHashesWritten = hadLargeValues ? new
HashSet<String>() : null;
+ LargeValueSerializer largeValues = new LargeValueSerializer(entity,
largeValueHashesWritten);
+ SkippedLargeValues removedValues = new SkippedLargeValues(largeValues);
try {
- numProperties = serializer.reserializeProperties(ois, oos,
request.properties(), largeValues, skipped);
+ numProperties = serializer.reserializeProperties(ois, oos,
request.properties(), largeValues, removedValues);
} finally {
try {
ois.close();
@@ -465,21 +471,18 @@
oos.close();
}
}
- largeValueHexKeys = createHexValuesString(largeValues.writtenKeys);
entity.setPropertyCount(numProperties);
entity.setData(baos.toByteArray());
entity.setCompressed(compressData);
- entity.setLargeValueKeys(largeValueHexKeys);
- // Update the large values that used to be reference but no longer are
...
- if (hexKeys != null) {
- for (String oldHexKey : skipped.skippedKeys) {
- LargeValueEntity largeValue =
entities.find(LargeValueEntity.class, oldHexKey);
- if (largeValue != null) {
- if (largeValue.decrementUsageCount() == 0) {
- entities.remove(largeValue);
- }
- }
+ // The new large values were recorded and associated with the properties
entity during reserialization.
+ // However, any values no longer used now need to be removed ...
+ if (hadLargeValues) {
+ // Remove any large value from the 'skipped' list that was
also written ...
+ removedValues.skippedKeys.removeAll(largeValueHashesWritten);
+ for (String oldHexKey : removedValues.skippedKeys) {
+ LargeValueId id = new LargeValueId(oldHexKey);
+ entity.getLargeValues().remove(id);
}
}
} catch (NoResultException e) {
@@ -497,6 +500,90 @@
/**
* {@inheritDoc}
*
+ * @see
org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.ReadBranchRequest)
+ */
+ @Override
+ public void process( ReadBranchRequest request ) {
+ logger.trace(request.toString());
+ Location actualLocation = null;
+ try {
+ Location location = request.at();
+ ActualLocation actual = getActualLocation(location);
+ actualLocation = actual.location;
+ Path path = actualLocation.getPath();
+
+ // Record the location of each node by its UUID; we'll use this when
processing the properties ...
+ Map<String, Location> locationsByUuid = new HashMap<String,
Location>();
+ locationsByUuid.put(actual.uuid, location);
+
+ // Compute the subgraph, including the root ...
+ SubgraphQuery query = SubgraphQuery.create(getExecutionContext(), entities,
actualLocation.getUuid(), path, 0);
+
+ // Record all of the children ...
+ Path parent = path;
+ Location parentLocation = actualLocation;
+ List<Location> children = new LinkedList<Location>();
+ for (ChildEntity child : query.getNodes(false)) {
+ String namespaceUri = child.getChildNamespace().getUri();
+ String localName = child.getChildName();
+ Name childName = nameFactory.create(namespaceUri, localName);
+ int sns = child.getSameNameSiblingIndex();
+ Path childPath = pathFactory.create(path, childName, sns);
+ String childUuidString = child.getId().getChildUuidString();
+ Location childLocation = new Location(childPath,
UUID.fromString(childUuidString));
+ locationsByUuid.put(childUuidString, childLocation);
+ // Determine if this child goes into the current list of children ...
+ Path childParent = childPath.getParent();
+ if (childParent.equals(parent)) {
+ children.add(childLocation);
+ } else {
+ // Record the children found so far ...
+ request.setChildren(parentLocation, children);
+ parentLocation =
locationsByUuid.get(child.getId().getParentUuidString());
+ parent = parentLocation.getPath();
+ children = new LinkedList<Location>();
+ children.add(childLocation);
+ }
+ }
+ if (!children.isEmpty()) {
+ request.setChildren(parentLocation, children);
+ }
+
+ // Now record all of the properties ...
+ for (PropertiesEntity props : query.getProperties(true)) {
+ boolean compressed = props.isCompressed();
+ int propertyCount = props.getPropertyCount();
+ Collection<Property> properties = new
ArrayList<Property>(propertyCount);
+ Location nodeLocation =
locationsByUuid.get(props.getId().getUuidString());
+ assert nodeLocation != null;
+ // Record the UUID as a property, since it's not stored in the
serialized properties...
+ properties.add(actualLocation.getIdProperty(DnaLexicon.UUID));
+ // Deserialize all the properties (except the UUID)...
+ byte[] data = props.getData();
+ LargeValueSerializer largeValues = new LargeValueSerializer(props);
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new ZipInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.deserializeAllProperties(ois, properties, largeValues);
+ request.setProperties(nodeLocation, properties);
+ } finally {
+ ois.close();
+ }
+ }
+
+ // TODO: Now update the 'index in parent' and SNS indexes of the
siblings of the deleted node.
+
+ } catch (Throwable e) { // Includes PathNotFoundException
+ request.setError(e);
+ return;
+ }
+ request.setActualLocationOfNode(actualLocation);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
* @see
org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.CopyBranchRequest)
*/
@Override
@@ -512,6 +599,27 @@
@Override
public void process( DeleteBranchRequest request ) {
logger.trace(request.toString());
+ Location actualLocation = null;
+ try {
+ Location location = request.at();
+ ActualLocation actual = getActualLocation(location);
+ actualLocation = actual.location;
+ Path path = actualLocation.getPath();
+
+ // Compute the subgraph, including the root ...
+ SubgraphQuery query = SubgraphQuery.create(getExecutionContext(), entities,
actualLocation.getUuid(), path, 0);
+
+ // Get the locations of all deleted nodes, which will be required by events
...
+ // List<Location> deletedLocations = query.getNodeLocations(true);
+
+ // Now delete the subgraph ...
+ query.deleteSubgraph(true);
+
+ } catch (Throwable e) { // Includes PathNotFoundException
+ request.setError(e);
+ return;
+ }
+ request.setActualLocationOfNode(actualLocation);
}
/**
@@ -603,32 +711,27 @@
protected String createProperties( String uuidString,
Collection<Property> properties ) throws
IOException {
- RecordingLargeValues largeValues = new RecordingLargeValues();
+ assert uuidString != null;
+
+ // Create the PropertiesEntity ...
+ NodeId nodeId = new NodeId(uuidString);
+ PropertiesEntity props = new PropertiesEntity(nodeId);
+
+ LargeValueSerializer largeValues = new LargeValueSerializer(props);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
OutputStream os = compressData ? new ZipOutputStream(baos) : baos;
ObjectOutputStream oos = new ObjectOutputStream(os);
int numProperties = properties.size();
try {
- oos.writeInt(numProperties);
- for (Property property : properties) {
- if (uuidString == null &&
property.getName().equals(DnaLexicon.UUID)) {
- uuidString = stringFactory.create(property.getFirstValue());
- }
- if (serializer.serializeProperty(oos, property, largeValues))
++numProperties;
- }
+ serializer.serializeProperties(oos, numProperties, properties, largeValues);
} finally {
oos.close();
}
- String largeValueHexHashesString =
createHexValuesString(largeValues.writtenKeys);
- if (uuidString == null) uuidString = stringFactory.create(UUID.randomUUID());
- // Create the PropertiesEntity ...
- NodeId nodeId = new NodeId(uuidString);
- PropertiesEntity props = new PropertiesEntity(nodeId);
props.setData(baos.toByteArray());
props.setCompressed(compressData);
props.setPropertyCount(numProperties);
- props.setLargeValueKeys(largeValueHexHashesString);
+
entities.persist(props);
return uuidString;
}
@@ -684,26 +787,6 @@
// Then walk up the ancestors and build the path.
String nodeUuidString = uuidString;
LinkedList<Path.Segment> segments = new
LinkedList<Path.Segment>();
- // while (uuidString != null &&
!uuidString.equals(this.rootNodeUuidString)) {
- // // Find the parent of the child, along with the child's name and SNS
index ...
- // Query query =
entities.createNamedQuery("ChildEntity.findValuesByChildUuid");
- // query.setParameter("childUuidString", uuidString);
- // try {
- // Object[] record = (Object[])query.getSingleResult();
- // String parentUuidString = (String)record[0];
- // String uri = (String)record[1];
- // String localName = (String)record[2];
- // int sns = (Integer)record[3];
- // // Now create the path segment and set the next child UUID as the parent
of this child ...
- // Name name = nameFactory.create(uri, localName);
- // segments.addFirst(pathFactory.createSegment(name, sns));
- // uuidString = parentUuidString;
- // } catch (NoResultException e) {
- // uuidString = null;
- // }
- // }
- // Path fullPath = pathFactory.createAbsolutePath(segments);
- // return new ActualLocation(new Location(fullPath, uuidProperty),
nodeUuidString, null);
ChildEntity entity = null;
while (uuidString != null &&
!uuidString.equals(this.rootNodeUuidString)) {
Query query =
entities.createNamedQuery("ChildEntity.findByChildUuid");
@@ -800,15 +883,15 @@
Name name = pathSegment.getName();
String localName = name.getLocalName();
String nsUri = name.getNamespaceUri();
- Integer nsId = namespaces.getId(nsUri, false);
- int snsIndex = pathSegment.hasIndex() ? pathSegment.getIndex() : 1;
- if (nsId == null) {
+ NamespaceEntity ns = namespaces.get(nsUri, false);
+ if (ns == null) {
// The namespace can't be found, then certainly the node won't be
found ...
return null;
}
+ int snsIndex = pathSegment.hasIndex() ? pathSegment.getIndex() : 1;
Query query =
entities.createNamedQuery("ChildEntity.findByPathSegment");
query.setParameter("parentUuidString", parentUuid);
- query.setParameter("ns", nsId);
+ query.setParameter("ns", ns.getId());
query.setParameter("childName", localName);
query.setParameter("sns", snsIndex);
try {
@@ -837,97 +920,131 @@
return Arrays.asList(hexValuesString.split(","));
}
- /**
- * {@inheritDoc}
- *
- * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#getMinimumSize()
- */
- public long getMinimumSize() {
- return largeValueMinimumSizeInBytes;
- }
+ protected class LargeValueSerializer implements LargeValues {
+ private final PropertiesEntity properties;
+ private Set<String> written;
- /**
- * {@inheritDoc}
- *
- * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#read(org.jboss.dna.graph.properties.ValueFactories,
- * byte[], long)
- */
- public Object read( ValueFactories valueFactories,
- byte[] hash,
- long length ) throws IOException {
- String hashStr = StringUtil.getHexString(hash);
- LargeValueEntity entity = entities.find(LargeValueEntity.class, hashStr);
- if (entity == null) {
+ public LargeValueSerializer( PropertiesEntity entity ) {
+ this.properties = entity;
+ this.written = null;
+ }
+
+ public LargeValueSerializer( PropertiesEntity entity,
+ Set<String> written ) {
+ this.properties = entity;
+ this.written = written;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#getMinimumSize()
+ */
+ public long getMinimumSize() {
+ return largeValueMinimumSizeInBytes;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#read(org.jboss.dna.graph.properties.ValueFactories,
+ * byte[], long)
+ */
+ public Object read( ValueFactories valueFactories,
+ byte[] hash,
+ long length ) throws IOException {
+ String hashStr = StringUtil.getHexString(hash);
+ // Find the large value ...
+ LargeValueId largeValueId = new LargeValueId(hashStr);
+ LargeValueEntity entity = entities.find(LargeValueEntity.class,
largeValueId);
+ if (entity != null) {
+ // Find the large value from the existing property entity ...
+ byte[] data = entity.getData();
+ return valueFactories.getValueFactory(entity.getType()).create(data);
+ }
throw new
IOException(JpaConnectorI18n.unableToReadLargeValue.text(getSourceName(), hashStr));
}
- byte[] data = entity.getData();
- return valueFactories.getValueFactory(entity.getType()).create(data);
- }
- /**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#write(byte[],
long,
- * org.jboss.dna.graph.properties.PropertyType, java.lang.Object)
- */
- public void write( byte[] hash,
- long length,
- PropertyType type,
- Object value ) throws IOException {
- if (value == null) return;
- String hashStr = StringUtil.getHexString(hash);
- LargeValueEntity entity = entities.find(LargeValueEntity.class, hashStr);
- if (entity == null) {
- entity = new LargeValueEntity();
- entity.setCompressed(true);
- entity.setHash(hashStr);
- entity.setLength(length);
- entity.setType(type);
- ValueFactories factories = getExecutionContext().getValueFactories();
- byte[] bytes = null;
- switch (type) {
- case BINARY:
- Binary binary = factories.getBinaryFactory().create(value);
- InputStream stream = null;
- try {
- binary.acquire();
- stream = binary.getStream();
- if (compressData) stream = new ZipInputStream(stream);
- bytes = IoUtil.readBytes(stream);
- } finally {
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#write(byte[], long,
+ * org.jboss.dna.graph.properties.PropertyType, java.lang.Object)
+ */
+ public void write( byte[] hash,
+ long length,
+ PropertyType type,
+ Object value ) throws IOException {
+ if (value == null) return;
+ String hashStr = StringUtil.getHexString(hash);
+ if (written != null) written.add(hashStr);
+
+ // Look for an existing value in the collection ...
+ final LargeValueId id = new LargeValueId(hashStr);
+ for (LargeValueId existing : properties.getLargeValues()) {
+ if (existing.equals(id)) {
+ // Already associated with this properties entity
+ return;
+ }
+ }
+ LargeValueEntity entity = entities.find(LargeValueEntity.class, id);
+ if (entity == null) {
+ // We have to create the large value entity ...
+ entity = new LargeValueEntity();
+ entity.setCompressed(true);
+ entity.setId(id);
+ entity.setLength(length);
+ entity.setType(type);
+ ValueFactories factories = getExecutionContext().getValueFactories();
+ byte[] bytes = null;
+ switch (type) {
+ case BINARY:
+ Binary binary = factories.getBinaryFactory().create(value);
+ InputStream stream = null;
try {
- if (stream != null) stream.close();
+ binary.acquire();
+ stream = binary.getStream();
+ if (compressData) stream = new ZipInputStream(stream);
+ bytes = IoUtil.readBytes(stream);
} finally {
- binary.release();
+ try {
+ if (stream != null) stream.close();
+ } finally {
+ binary.release();
+ }
}
- }
- break;
- default:
- String str = factories.getStringFactory().create(value);
- bytes = str.getBytes();
- if (compressData) {
- InputStream strStream = new ZipInputStream(new
ByteArrayInputStream(bytes));
- try {
- bytes = IoUtil.readBytes(strStream);
- } finally {
- strStream.close();
+ break;
+ default:
+ String str = factories.getStringFactory().create(value);
+ bytes = str.getBytes();
+ if (compressData) {
+ InputStream strStream = new ZipInputStream(new
ByteArrayInputStream(bytes));
+ try {
+ bytes = IoUtil.readBytes(strStream);
+ } finally {
+ strStream.close();
+ }
}
- }
- break;
+ break;
+ }
+ entity.setData(bytes);
+ entities.persist(entity);
}
- entity.setData(bytes);
- entities.persist(entity);
- } else {
- // There is already an existing value, so we'll reuse it and increment
the usage count ...
- entity.incrementUsageCount();
+ // Now associate the large value with the properties entity ...
+ assert id.getHash() != null;
+ properties.getLargeValues().add(id);
}
+
}
protected class RecordingLargeValues implements LargeValues {
- protected Collection<String> readKeys = new HashSet<String>();
- protected Collection<String> writtenKeys = new HashSet<String>();
+ protected final Collection<String> readKeys = new HashSet<String>();
+ protected final Collection<String> writtenKeys = new
HashSet<String>();
+ protected final LargeValues delegate;
- RecordingLargeValues() {
+ RecordingLargeValues( LargeValues delegate ) {
+ assert delegate != null;
+ this.delegate = delegate;
}
/**
@@ -936,7 +1053,7 @@
* @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#getMinimumSize()
*/
public long getMinimumSize() {
- return BasicRequestProcessor.this.getMinimumSize();
+ return delegate.getMinimumSize();
}
/**
@@ -950,7 +1067,7 @@
long length ) throws IOException {
String key = StringUtil.getHexString(hash);
readKeys.add(key);
- return BasicRequestProcessor.this.read(valueFactories, hash, length);
+ return delegate.read(valueFactories, hash, length);
}
public void write( byte[] hash,
@@ -959,14 +1076,17 @@
Object value ) throws IOException {
String key = StringUtil.getHexString(hash);
writtenKeys.add(key);
- BasicRequestProcessor.this.write(hash, length, type, value);
+ delegate.write(hash, length, type, value);
}
}
protected class SkippedLargeValues implements LargeValues {
protected Collection<String> skippedKeys = new HashSet<String>();
+ protected final LargeValues delegate;
- SkippedLargeValues() {
+ SkippedLargeValues( LargeValues delegate ) {
+ assert delegate != null;
+ this.delegate = delegate;
}
/**
@@ -975,7 +1095,7 @@
* @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#getMinimumSize()
*/
public long getMinimumSize() {
- return BasicRequestProcessor.this.getMinimumSize();
+ return delegate.getMinimumSize();
}
/**
@@ -1029,27 +1149,4 @@
return this.location.toString() + " (uuid=" + uuid + ") "
+ childEntity;
}
}
-
- protected static class Namespaces {
-
- private final EntityManager entityManager;
- private final Map<String, Integer> cache = new HashMap<String,
Integer>();
-
- public Namespaces( EntityManager manager ) {
- this.entityManager = manager;
- }
-
- public Integer getId( String namespaceUri,
- boolean createIfRequired ) {
- Integer id = cache.get(namespaceUri);
- if (id == null) {
- NamespaceEntity entity = NamespaceEntity.findByUri(entityManager,
namespaceUri, createIfRequired);
- if (entity == null) return null;
- id = entity.getId();
- cache.put(namespaceUri, id);
- }
- assert id != null;
- return id;
- }
- }
}
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -22,11 +22,9 @@
package org.jboss.dna.connector.store.jpa.models.basic;
import java.io.Serializable;
-import java.util.UUID;
import javax.persistence.Column;
import javax.persistence.Embeddable;
import org.jboss.dna.common.util.HashCode;
-import org.jboss.dna.connector.store.jpa.models.common.NodeId;
/**
* A unique identifer for a parent-child relationship.
@@ -41,28 +39,25 @@
*/
private static final long serialVersionUID = 1L;
- @Column( name = "PARENT_UUID", nullable = false )
+ @Column( name = "PARENT_UUID", nullable = false, length = 36 )
private String parentUuidString;
- @Column( name = "CHILD_UUID", nullable = false )
+ @Column( name = "CHILD_UUID", nullable = false, length = 36 )
private String childUuidString;
- private transient UUID parentUuid;
- private transient UUID childUuid;
-
public ChildId() {
}
- public ChildId( UUID parentUuid,
- UUID childUuid ) {
- setParentUuid(parentUuid);
- setChildUuid(childUuid);
- }
+ // public ChildId( UUID parentUuid,
+ // UUID childUuid ) {
+ // setParentUuid(parentUuid);
+ // setChildUuid(childUuid);
+ // }
public ChildId( NodeId parentId,
NodeId childId ) {
- if (parentId != null) setParentUuid(parentId.getUuid());
- if (childId != null) setChildUuid(childId.getUuid());
+ if (parentId != null) setParentUuidString(parentId.getUuidString());
+ if (childId != null) setChildUuidString(childId.getUuidString());
}
public ChildId( String parentUuid,
@@ -71,32 +66,6 @@
setChildUuidString(childUuid);
}
- public UUID getParentUuid() {
- if (parentUuidString == null) return null;
- if (parentUuid == null) {
- parentUuid = UUID.fromString(parentUuidString);
- }
- return parentUuid;
- }
-
- public void setParentUuid( UUID uuid ) {
- this.parentUuid = uuid;
- this.parentUuidString = uuid != null ? uuid.toString() : null;
- }
-
- public UUID getChildUuid() {
- if (childUuidString == null) return null;
- if (childUuid == null) {
- childUuid = UUID.fromString(childUuidString);
- }
- return childUuid;
- }
-
- public void setChildUuid( UUID uuid ) {
- this.childUuid = uuid;
- this.childUuidString = uuid != null ? uuid.toString() : null;
- }
-
/**
* @return parentUuidString
*/
@@ -108,7 +77,6 @@
* @param parentUuidString Sets parentUuidString to the specified value.
*/
public void setParentUuidString( String parentUuidString ) {
- this.parentUuid = null;
this.parentUuidString = parentUuidString;
}
@@ -123,7 +91,6 @@
* @param childUuidString Sets childUuidString to the specified value.
*/
public void setChildUuidString( String childUuidString ) {
- this.childUuid = null;
this.childUuidString = childUuidString;
}
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/LargeValueEntity.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/LargeValueEntity.java 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/LargeValueEntity.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -23,28 +23,34 @@
import javax.persistence.Column;
import javax.persistence.Entity;
+import javax.persistence.EntityManager;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Id;
import javax.persistence.Lob;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
+import javax.persistence.Query;
+import javax.persistence.Table;
import org.jboss.dna.graph.properties.PropertyType;
/**
- * A single property value that is too large to be stored on the individual node.
+ * A single property value that is too large to be stored on the individual node, and
which will be shared among all properties
+ * that have the same value.
*
* @author Randall Hauch
*/
-@Entity( name = "DNA_BASIC_LARGE_VALUES" )
+@Entity
+@Table( name = "DNA_BASIC_LARGE_VALUES" )
+@NamedQueries( {
+// @NamedQuery( name = "LargeValueEntity.selectUsed", query =
+// "select value from LargeValueEntity value where value.id in (select values.hash
from PropertiesEntity prop join prop.largeValues values)"
+// ),
+@NamedQuery( name = "LargeValueEntity.deleteUnused", query = "delete
LargeValueEntity value where value.id not in (select values.hash from PropertiesEntity
prop join prop.largeValues values)" )} )
public class LargeValueEntity {
- /**
- * The 160-bit SHA-1 hash of this value, in hex form (40-bytes). The SHA-1 algorithm
is fast and has not yet proven to have
- * any duplicates. Even if SHA-2 and SHA-3 are better for cryptographically secure
purposes, it is doubtful whether a
- * repository needs more than SHA-1.
- */
@Id
- @Column( name = "SHA1", nullable = false, unique = true, length = 40 )
- private String hash;
+ private LargeValueId id;
/**
* The property type for this value. Typically, this is {@link PropertyType#STRING}
or {@link PropertyType#BINARY}, although
@@ -61,12 +67,6 @@
private long length;
/**
- * The number of times this value is used. If this value drops below 1, the value
could be removed from the store.
- */
- @Column( name = "USAGE_COUNT", nullable = false )
- private int usageCount = 1;
-
- /**
* Flag specifying whether the binary data is stored in a compressed format.
*/
@Column( name = "COMPRESSED", nullable = true )
@@ -80,17 +80,17 @@
private byte[] data;
/**
- * @return hash
+ * @return id
*/
- public String getHash() {
- return hash;
+ public LargeValueId getId() {
+ return id;
}
/**
- * @param hash Sets hash to the specified value.
+ * @param id Sets id to the specified value.
*/
- public void setHash( String hash ) {
- this.hash = hash;
+ public void setId( LargeValueId id ) {
+ this.id = id;
}
/**
@@ -136,29 +136,6 @@
}
/**
- * @return usageCount
- */
- public int getUsageCount() {
- return usageCount;
- }
-
- /**
- * @param usageCount Sets usageCount to the specified value.
- */
- public void setUsageCount( int usageCount ) {
- this.usageCount = usageCount;
- }
-
- public void incrementUsageCount() {
- this.usageCount++;
- }
-
- public int decrementUsageCount() {
- if (this.usageCount == 0) return 0;
- return --this.usageCount;
- }
-
- /**
* @return compressed
*/
public boolean isCompressed() {
@@ -179,7 +156,7 @@
*/
@Override
public int hashCode() {
- return getHash().hashCode();
+ return id.hashCode();
}
/**
@@ -192,7 +169,7 @@
if (obj == this) return true;
if (obj instanceof LargeValueEntity) {
LargeValueEntity that = (LargeValueEntity)obj;
- if (this.getHash().equals(that.getHash())) return true;
+ if (this.getId().equals(that.getId())) return true;
}
return false;
}
@@ -204,6 +181,20 @@
*/
@Override
public String toString() {
- return "Large " + this.type + " value (hash=" + this.hash +
",compressed=" + isCompressed() + ")";
+ return "Large " + this.type + " value (hash=" +
this.getId().getHash() + ",compressed=" + isCompressed() + ")";
}
+
+ /**
+ * Delete all unused large value entities.
+ *
+ * @param manager the manager; never null
+ * @return the number of deleted large values
+ */
+ public static int deleteUnused( EntityManager manager ) {
+ assert manager != null;
+ Query delete =
manager.createNamedQuery("LargeValueEntity.deleteUnused");
+ int result = delete.executeUpdate();
+ manager.flush();
+ return result;
+ }
}
Added:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/LargeValueId.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/LargeValueId.java
(rev 0)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/LargeValueId.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -0,0 +1,98 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site:
http://www.fsf.org.
+ */
+package org.jboss.dna.connector.store.jpa.models.basic;
+
+import java.io.Serializable;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+import net.jcip.annotations.Immutable;
+
+/**
+ * A unique identifer for a large value, which is the 160-bit SHA-1 hash of this value,
in hex form (40-bytes). The SHA-1
+ * algorithm is fast and has not yet proven to have any duplicates. Even if SHA-2 and
SHA-3 are better for cryptographically
+ * secure purposes, it is doubtful whether a repository needs more than SHA-1 for
identity purposes.
+ *
+ * @author Randall Hauch
+ */
+@Embeddable
+@Immutable
+(a)org.hibernate.annotations.Immutable
+public class LargeValueId implements Serializable {
+
+ /**
+ * Version {@value}
+ */
+ private static final long serialVersionUID = 1L;
+
+ @Column( name = "SHA1", nullable = false, length = 40 )
+ private String hash;
+
+ public LargeValueId() {
+ }
+
+ public LargeValueId( String hash ) {
+ this.hash = hash;
+ }
+
+ /**
+ * @return hash
+ */
+ public String getHash() {
+ return hash;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#hashCode()
+ */
+ @Override
+ public int hashCode() {
+ return hash.hashCode();
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#equals(java.lang.Object)
+ */
+ @Override
+ public boolean equals( Object obj ) {
+ if (obj == this) return true;
+ if (obj instanceof LargeValueId) {
+ LargeValueId that = (LargeValueId)obj;
+ return this.hash.equals(that.hash);
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "Large value " + hash;
+ }
+
+}
Property changes on:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/LargeValueId.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Copied:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/NodeId.java
(from rev 647,
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/common/NodeId.java)
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/NodeId.java
(rev 0)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/NodeId.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -0,0 +1,110 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site:
http://www.fsf.org.
+ */
+package org.jboss.dna.connector.store.jpa.models.basic;
+
+import java.io.Serializable;
+import java.util.UUID;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+import net.jcip.annotations.Immutable;
+
+/**
+ * An identifier for a node, comprised of a single {@link UUID}, and {@link Embeddable
embeddable} in a persistent entity. The
+ * identifier takes the form of two <code>long</code> columns: one for the
UUID's {@link UUID#getMostSignificantBits() most
+ * significant bits} and one for its {@link UUID#getLeastSignificantBits() least
significant bits}.
+ *
+ * @author Randall Hauch
+ */
+@Embeddable
+@Immutable
+(a)org.hibernate.annotations.Immutable
+public class NodeId implements Serializable {
+
+ /**
+ * Version {@value}
+ */
+ private static final long serialVersionUID = 1L;
+
+ @Column( name = "UUID", nullable = true )
+ private String uuidString;
+
+ public NodeId() {
+ }
+
+ public NodeId( String uuidString ) {
+ this.uuidString = uuidString;
+ }
+
+ /**
+ * @return uuidString
+ */
+ public String getUuidString() {
+ return uuidString;
+ }
+
+ /**
+ * @param uuidString Sets uuidString to the specified value.
+ */
+ public void setUuidString( String uuidString ) {
+ this.uuidString = uuidString;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#hashCode()
+ */
+ @Override
+ public int hashCode() {
+ return uuidString.hashCode();
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#equals(java.lang.Object)
+ */
+ @Override
+ public boolean equals( Object obj ) {
+ if (obj == this) return true;
+ if (obj instanceof NodeId) {
+ NodeId that = (NodeId)obj;
+ if (this.uuidString == null) {
+ if (that.uuidString != null) return false;
+ } else {
+ if (!this.uuidString.equals(that.uuidString)) return false;
+ }
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return uuidString;
+ }
+}
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -21,14 +21,18 @@
*/
package org.jboss.dna.connector.store.jpa.models.basic;
+import java.util.Collection;
+import java.util.HashSet;
import javax.persistence.Column;
-import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.JoinTable;
import javax.persistence.Lob;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
-import org.jboss.dna.connector.store.jpa.models.common.NodeId;
import org.jboss.dna.connector.store.jpa.util.Serializer;
/**
@@ -41,9 +45,11 @@
*/
@Entity
@Table( name = "DNA_BASIC_NODEPROPS" )
-@NamedQueries( {@NamedQuery( name = "PropertiesEntity.findByUuid", query =
"select prop from PropertiesEntity as prop where prop.id.uuidString = :uuid" )}
)
+@NamedQueries( {
+ @NamedQuery( name = "PropertiesEntity.findByUuid", query = "select
prop from PropertiesEntity as prop where prop.id.uuidString = :uuid" ),
+ @NamedQuery( name = "PropertiesEntity.deleteByUuid", query = "delete
PropertiesEntity prop where prop.id.uuidString = :uuid" )} )
public class PropertiesEntity {
- @EmbeddedId
+ @Id
private NodeId id;
@Lob
@@ -59,8 +65,9 @@
@Column( name = "COMPRESSED", nullable = true )
private Boolean compressed;
- @Column( name = "LRG_VL_KEYS", nullable = true )
- private String largeValueKeys;
+ @org.hibernate.annotations.CollectionOfElements( fetch = FetchType.LAZY )
+ @JoinTable( name = "DNA_LARGEVALUE_USAGES", joinColumns = @JoinColumn( name
= "NODE_UUID" ) )
+ private Collection<LargeValueId> largeValues = new
HashSet<LargeValueId>();
public PropertiesEntity() {
}
@@ -134,20 +141,13 @@
}
/**
- * @return largeValueKeys
+ * @return largeValues
*/
- public String getLargeValueKeys() {
- return largeValueKeys;
+ public Collection<LargeValueId> getLargeValues() {
+ return largeValues;
}
/**
- * @param largeValueKeys Sets largeValueKeys to the specified value.
- */
- public void setLargeValueKeys( String largeValueKeys ) {
- this.largeValueKeys = largeValueKeys;
- }
-
- /**
* {@inheritDoc}
*
* @see java.lang.Object#hashCode()
Added:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
(rev 0)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -0,0 +1,162 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site:
http://www.fsf.org.
+ */
+package org.jboss.dna.connector.store.jpa.models.basic;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
+import javax.persistence.Table;
+import org.hibernate.annotations.Index;
+
+/**
+ * Represents a single node that appears in a subgraph.
+ *
+ * @author Randall Hauch
+ * @see SubgraphQueryEntity
+ */
+@Entity
+@Table( name = "DNA_SUBGRAPH_NODES" )
+(a)org.hibernate.annotations.Table( appliesTo = "DNA_SUBGRAPH_NODES", indexes =
@Index( name = "QUERYID_INX", columnNames = {"QUERY_ID"} ) )
+@NamedQueries( {
+ @NamedQuery( name = "SubgraphNodeEntity.insertChildren", query =
"insert into
SubgraphNodeEntity(queryId,nodeUuid,depth,parentIndexInParent,indexInParent) select
parentNode.queryId, child.id.childUuidString, parentNode.depth+1,
parentNode.indexInParent, child.indexInParent from ChildEntity child, SubgraphNodeEntity
parentNode where child.deleted is null and child.id.parentUuidString = parentNode.nodeUuid
and parentNode.queryId = :queryId and parentNode.depth = :parentDepth" ),
+ @NamedQuery( name = "SubgraphNodeEntity.getCount", query = "select
count(*) from SubgraphNodeEntity where queryId = :queryId" ),
+ @NamedQuery( name = "SubgraphNodeEntity.getPropertiesEntities", query =
"select props from PropertiesEntity props, SubgraphNodeEntity node where
props.id.uuidString = node.nodeUuid and node.queryId = :queryId and node.depth >=
:depth order by node.depth, node.parentIndexInParent, node.indexInParent" ),
+ @NamedQuery( name =
"SubgraphNodeEntity.getPropertiesEntitiesWithLargeValues", query = "select
props from PropertiesEntity props, SubgraphNodeEntity node where props.id.uuidString =
node.nodeUuid and node.queryId = :queryId and node.depth >= :depth and
size(props.largeValues) > 0" ),
+ @NamedQuery( name = "SubgraphNodeEntity.getChildEntities", query =
"select child from ChildEntity child, SubgraphNodeEntity node where
child.id.childUuidString = node.nodeUuid and node.queryId = :queryId and node.depth >=
:depth order by node.depth, node.parentIndexInParent, node.indexInParent" ),
+ @NamedQuery( name = "SubgraphNodeEntity.deletePropertiesEntities", query =
"delete PropertiesEntity props where props.id.uuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId and node.depth >= :depth
)" ),
+ @NamedQuery( name = "SubgraphNodeEntity.deleteChildEntities", query =
"delete ChildEntity child where child.id.childUuidString in ( select node.nodeUuid
from SubgraphNodeEntity node where node.queryId = :queryId and node.depth >= :depth
)" ),
+ @NamedQuery( name = "SubgraphNodeEntity.deleteByQueryId", query =
"delete SubgraphNodeEntity where queryId = :queryId" )} )
+public class SubgraphNodeEntity {
+
+ @Id
+ @Column( name = "ID" )
+ @GeneratedValue( strategy = GenerationType.AUTO )
+ private Integer id;
+
+ @Column( name = "QUERY_ID", nullable = false, unique = false, updatable =
false )
+ private Long queryId;
+
+ @Column( name = "UUID", updatable = false, nullable = false, length = 36 )
+ private String nodeUuid;
+
+ @Column( name = "DEPTH", updatable = false, nullable = false )
+ private int depth;
+
+ @Column( name = "PARENT_NUM", updatable = false, nullable = false )
+ private int parentIndexInParent;
+
+ @Column( name = "CHILD_NUM", updatable = false, nullable = false )
+ private int indexInParent;
+
+ public SubgraphNodeEntity() {
+ }
+
+ public SubgraphNodeEntity( Long queryId,
+ String nodeUuid,
+ int depth ) {
+ this.queryId = queryId;
+ this.nodeUuid = nodeUuid;
+ this.depth = depth;
+ }
+
+ /**
+ * @return id
+ */
+ public Integer getId() {
+ return id;
+ }
+
+ /**
+ * @return depth
+ */
+ public int getDepth() {
+ return depth;
+ }
+
+ /**
+ * @return nodeUuid
+ */
+ public String getNodeUuid() {
+ return nodeUuid;
+ }
+
+ /**
+ * @return queryId
+ */
+ public Long getQueryId() {
+ return queryId;
+ }
+
+ /**
+ * @return indexInParent
+ */
+ public int getIndexInParent() {
+ return indexInParent;
+ }
+
+ /**
+ * @return parentIndexInParent
+ */
+ public int getParentIndexInParent() {
+ return parentIndexInParent;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#hashCode()
+ */
+ @Override
+ public int hashCode() {
+ return id;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#equals(java.lang.Object)
+ */
+ @Override
+ public boolean equals( Object obj ) {
+ if (obj == this) return true;
+ if (obj instanceof SubgraphNodeEntity) {
+ SubgraphNodeEntity that = (SubgraphNodeEntity)obj;
+ if (this.id.equals(that.id)) return true;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "" + id + " - Query " + queryId + "; depth="
+ depth + "; node=" + nodeUuid + " at index " + indexInParent;
+ }
+
+}
Property changes on:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphNodeEntity.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Added:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
(rev 0)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -0,0 +1,316 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site:
http://www.fsf.org.
+ */
+package org.jboss.dna.connector.store.jpa.models.basic;
+
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import javax.persistence.EntityManager;
+import javax.persistence.NoResultException;
+import javax.persistence.Query;
+import org.jboss.dna.graph.ExecutionContext;
+import org.jboss.dna.graph.Location;
+import org.jboss.dna.graph.properties.Name;
+import org.jboss.dna.graph.properties.NameFactory;
+import org.jboss.dna.graph.properties.Path;
+import org.jboss.dna.graph.properties.PathFactory;
+
+/**
+ * Represents a temporary working area for a query that efficiently retrieves the nodes
in a subgraph. This class uses the
+ * database to build up the content of the subgraph, and therefore requires write
privilege on the database. The benefit is that
+ * it minimizes the amount of memory required to process the subgraph, plus the set of
nodes that make up the subgraph can be
+ * produced with database joins.
+ * <p>
+ * The use of database joins also produces another benefit: the number of SQL statements
necessary to build the set of nodes in a
+ * subgraph is equal to the depth of the subgraph, regardless of the number of child
nodes at any level.
+ * </p>
+ *
+ * @author Randall Hauch
+ */
+public class SubgraphQuery {
+
+ /**
+ * Create a query that returns a subgraph at and below the node with the supplied
path and the supplied UUID.
+ *
+ * @param context the execution context; may not be null
+ * @param entities the entity manager; may not be null
+ * @param subgraphRootUuid the UUID (in string form) of the root node in the
subgraph
+ * @param subgraphRootPath the path of the root node in the subgraph
+ * @param maxDepth the maximum depth of the subgraph, or 0 if there is no maximum
depth
+ * @return the object representing the subgraph
+ */
+ public static SubgraphQuery create( ExecutionContext context,
+ EntityManager entities,
+ UUID subgraphRootUuid,
+ Path subgraphRootPath,
+ int maxDepth ) {
+ assert entities != null;
+ assert subgraphRootUuid != null;
+ assert maxDepth >= 0;
+ if (maxDepth == 0) maxDepth = Integer.MAX_VALUE;
+ final String subgraphRootUuidString = subgraphRootUuid.toString();
+ // Create a new subgraph query, and add a child for the root ...
+ SubgraphQueryEntity query = new SubgraphQueryEntity(subgraphRootUuidString);
+ entities.persist(query);
+ Long queryId = query.getId();
+
+ try {
+ // Insert a node for the root (this will be the starting point for the
recursive operation) ...
+ SubgraphNodeEntity root = new SubgraphNodeEntity(queryId,
subgraphRootUuidString, 0);
+ entities.persist(root);
+
+ // Now add the children by inserting the children, one level at a time ...
+ Query statement =
entities.createNamedQuery("SubgraphNodeEntity.insertChildren");
+ int numChildrenInserted = 0;
+ int parentLevel = 0;
+ while (parentLevel < maxDepth - 1) {
+ // Insert the children of the next level by inserting via a select (join)
of the children
+ statement.setParameter("queryId", queryId);
+ statement.setParameter("parentDepth", parentLevel);
+ numChildrenInserted = statement.executeUpdate();
+ if (numChildrenInserted == 0) break;
+ parentLevel = parentLevel + 1;
+ }
+ } catch (RuntimeException t) {
+ // Clean up the search and results ...
+ try {
+ Query search =
entities.createNamedQuery("SubgraphNodeEntity.deleteByQueryId");
+ search.setParameter("queryId", query.getId());
+ search.executeUpdate();
+ } finally {
+ entities.remove(query);
+ }
+ throw t;
+ }
+ return new SubgraphQuery(context, entities, query, subgraphRootPath, maxDepth);
+ }
+
+ private final ExecutionContext context;
+ private final EntityManager manager;
+ private SubgraphQueryEntity query;
+ private final int maxDepth;
+ private final Path subgraphRootPath;
+
+ protected SubgraphQuery( ExecutionContext context,
+ EntityManager manager,
+ SubgraphQueryEntity query,
+ Path subgraphRootPath,
+ int maxDepth ) {
+ assert manager != null;
+ assert query != null;
+ assert context != null;
+ assert subgraphRootPath != null;
+ this.context = context;
+ this.manager = manager;
+ this.query = query;
+ this.maxDepth = maxDepth;
+ this.subgraphRootPath = subgraphRootPath;
+ }
+
+ /**
+ * @return maxDepth
+ */
+ public int getMaxDepth() {
+ return maxDepth;
+ }
+
+ /**
+ * @return manager
+ */
+ public EntityManager getEntityManager() {
+ return manager;
+ }
+
+ /**
+ * @return subgraphRootPath
+ */
+ public Path getSubgraphRootPath() {
+ return subgraphRootPath;
+ }
+
+ /**
+ * @return query
+ */
+ public SubgraphQueryEntity getSubgraphQueryEntity() {
+ if (query == null) throw new IllegalStateException();
+ return query;
+ }
+
+ public int getNodeCount( boolean includeRoot ) {
+ if (query == null) throw new IllegalStateException();
+ // Now query for all the nodes and put into a list ...
+ Query search =
manager.createNamedQuery("SubgraphNodeEntity.getCount");
+ search.setParameter("queryId", query.getId());
+
+ // Now process the nodes below the subgraph's root ...
+ try {
+ return (Integer)search.getSingleResult() - (includeRoot ? 0 : 1);
+ } catch (NoResultException e) {
+ return 0;
+ }
+ }
+
+ /**
+ * Get the {@link ChildEntity nodes} in the subgraph. This must be called before the
query is {@link #close() closed}.
+ *
+ * @param includeRoot true if the subgraph's root node is to be included, or
false otherwise
+ * @return the list of nodes, in breadth-first order
+ */
+ @SuppressWarnings( "unchecked" )
+ public List<ChildEntity> getNodes( boolean includeRoot ) {
+ if (query == null) throw new IllegalStateException();
+ // Now query for all the nodes and put into a list ...
+ Query search =
manager.createNamedQuery("SubgraphNodeEntity.getChildEntities");
+ search.setParameter("queryId", query.getId());
+ search.setParameter("depth", includeRoot ? 0 : 1);
+
+ // Now process the nodes below the subgraph's root ...
+ return search.getResultList();
+ }
+
+ /**
+ * Get the {@link PropertiesEntity properties} for each of the nodes in the subgraph.
This must be called before the query is
+ * {@link #close() closed}.
+ *
+ * @param includeRoot true if the properties for the subgraph's root node are to
be included, or false otherwise
+ * @return the list of properties for each of the nodes, in breadth-first order
+ */
+ @SuppressWarnings( "unchecked" )
+ public List<PropertiesEntity> getProperties( boolean includeRoot ) {
+ if (query == null) throw new IllegalStateException();
+ // Now query for all the nodes and put into a list ...
+ Query search =
manager.createNamedQuery("SubgraphNodeEntity.getPropertiesEntities");
+ search.setParameter("queryId", query.getId());
+ search.setParameter("depth", includeRoot ? 0 : 1);
+
+ // Now process the nodes below the subgraph's root ...
+ return search.getResultList();
+ }
+
+ /**
+ * Get the {@link Location} for each of the nodes in the subgraph. This must be
called before the query is {@link #close()
+ * closed}.
+ * <p>
+ * This method calls {@link #getNodes(boolean)}. Therefore, calling {@link
#getNodes(boolean)} and this method for the same
+ * subgraph is not efficient; consider just calling {@link #getNodes(boolean)}
alone.
+ * </p>
+ *
+ * @param includeRoot true if the properties for the subgraph's root node are to
be included, or false otherwise
+ * @return the list of {@link Location locations}, one for each of the nodes in the
subgraph, in breadth-first order
+ */
+ public List<Location> getNodeLocations( boolean includeRoot ) {
+ if (query == null) throw new IllegalStateException();
+ // Set up a map of the paths to the nodes, keyed by UUIDs. This saves us from
having to build
+ // the paths every time ...
+ Map<String, Path> pathByUuid = new HashMap<String, Path>();
+ LinkedList<Location> locations = new LinkedList<Location>();
+ String subgraphRootUuid = query.getRootUuid();
+ pathByUuid.put(subgraphRootUuid, subgraphRootPath);
+ UUID uuid = UUID.fromString(subgraphRootUuid);
+ if (includeRoot) {
+ locations.add(new Location(subgraphRootPath, uuid));
+ }
+
+ // Now iterate over the child nodes in the subgraph (we've already included
the root) ...
+ final PathFactory pathFactory = context.getValueFactories().getPathFactory();
+ final NameFactory nameFactory = context.getValueFactories().getNameFactory();
+ for (ChildEntity entity : getNodes(false)) {
+ String parentUuid = entity.getId().getParentUuidString();
+ Path parentPath = pathByUuid.get(parentUuid);
+ assert parentPath != null;
+ String nsUri = entity.getChildNamespace().getUri();
+ String localName = entity.getChildName();
+ int sns = entity.getSameNameSiblingIndex();
+ Name childName = nameFactory.create(nsUri, localName);
+ Path childPath = pathFactory.create(parentPath, childName, sns);
+ String childUuid = entity.getId().getChildUuidString();
+ pathByUuid.put(childUuid, childPath);
+ uuid = UUID.fromString(childUuid);
+ locations.add(new Location(childPath, uuid));
+
+ }
+ return locations;
+ }
+
+ @SuppressWarnings( "unchecked" )
+ public void deleteSubgraph( boolean includeRoot ) {
+ if (query == null) throw new IllegalStateException();
+
+ // Delete the PropertiesEntities ...
+ //
+ // Right now, Hibernate is not able to support deleting PropertiesEntity in bulk
because of the
+ // large value association (and there's no way to clear the association in
bulk).
+ // Therefore, the only way to do this with Hibernate is to load each
PropertiesEntity that has
+ // large values and clear them. (Theoretically, fewer PropertiesEntity objects
will have large values
+ // than the total number in the subgraph.)
+ // Then we can delete the properties.
+ Query withLargeValues =
manager.createNamedQuery("SubgraphNodeEntity.getPropertiesEntitiesWithLargeValues");
+ withLargeValues.setParameter("queryId", query.getId());
+ withLargeValues.setParameter("depth", includeRoot ? 0 : 1);
+ List<PropertiesEntity> propertiesWithLargeValues =
withLargeValues.getResultList();
+ if (propertiesWithLargeValues.size() != 0) {
+ for (PropertiesEntity props : propertiesWithLargeValues) {
+ props.getLargeValues().clear();
+ }
+ manager.flush();
+ }
+
+ // Delete the PropertiesEntities, none of which will have large values ...
+ Query delete =
manager.createNamedQuery("SubgraphNodeEntity.deletePropertiesEntities");
+ delete.setParameter("queryId", query.getId());
+ delete.setParameter("depth", includeRoot ? 0 : 1);
+ delete.executeUpdate();
+
+ // Delete the ChildEntities ...
+ delete =
manager.createNamedQuery("SubgraphNodeEntity.deleteChildEntities");
+ delete.setParameter("queryId", query.getId());
+ delete.setParameter("depth", includeRoot ? 0 : 1);
+ delete.executeUpdate();
+
+ // Delete unused large values ...
+ LargeValueEntity.deleteUnused(manager);
+
+ manager.flush();
+ }
+
+ /**
+ * Close this query object and clean up all in-database records associated with this
query. This method <i>must</i> be called
+ * when this query is no longer needed, and once it is called, this subgraph query is
no longer usable.
+ */
+ public void close() {
+ if (query == null) return;
+ // Clean up the search and results ...
+ try {
+ Query search =
manager.createNamedQuery("SubgraphNodeEntity.deleteByQueryId");
+ search.setParameter("queryId", query.getId());
+ search.executeUpdate();
+ } finally {
+ try {
+ manager.remove(query);
+ } finally {
+ query = null;
+ }
+ }
+ }
+}
Property changes on:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQuery.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Added:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryEntity.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryEntity.java
(rev 0)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryEntity.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -0,0 +1,63 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site:
http://www.fsf.org.
+ */
+package org.jboss.dna.connector.store.jpa.models.basic;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+
+/**
+ * Represents a temporary working area for a query that retrieves the nodes in a
subgraph.
+ *
+ * @author Randall Hauch
+ */
+@Entity( name = "DNA_SUBGRAPH_QUERIES" )
+public class SubgraphQueryEntity {
+
+ @Id
+ @GeneratedValue( strategy = GenerationType.AUTO )
+ @Column( name = "ID", updatable = false )
+ private Long id;
+
+ @Column( name = "ROOT_UUID", updatable = false, nullable = false, length =
36 )
+ private String rootUuid;
+
+ public SubgraphQueryEntity( String rootUuid ) {
+ this.rootUuid = rootUuid;
+ }
+
+ /**
+ * @return id
+ */
+ public Long getId() {
+ return id;
+ }
+
+ /**
+ * @return rootUuid
+ */
+ public String getRootUuid() {
+ return rootUuid;
+ }
+}
Property changes on:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryEntity.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Deleted:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/common/NodeId.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/common/NodeId.java 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/common/NodeId.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -1,127 +0,0 @@
-/*
- * JBoss, Home of Professional Open Source.
- * Copyright 2008, Red Hat Middleware LLC, and individual contributors
- * as indicated by the @author tags. See the copyright.txt file in the
- * distribution for a full listing of individual contributors.
- *
- * This is free software; you can redistribute it and/or modify it
- * under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2.1 of
- * the License, or (at your option) any later version.
- *
- * This software is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with this software; if not, write to the Free
- * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
- * 02110-1301 USA, or see the FSF site:
http://www.fsf.org.
- */
-package org.jboss.dna.connector.store.jpa.models.common;
-
-import java.io.Serializable;
-import java.util.UUID;
-import javax.persistence.Column;
-import javax.persistence.Embeddable;
-
-/**
- * An identifier for a node, comprised of a single {@link UUID}, and {@link Embeddable
embeddable} in a persistent entity. The
- * identifier takes the form of two <code>long</code> columns: one for the
UUID's {@link UUID#getMostSignificantBits() most
- * significant bits} and one for its {@link UUID#getLeastSignificantBits() least
significant bits}.
- *
- * @author Randall Hauch
- */
-@Embeddable
-public class NodeId implements Serializable {
-
- /**
- * Version {@value}
- */
- private static final long serialVersionUID = 1L;
-
- @Column( name = "UUID", nullable = true )
- private String uuidString;
-
- private transient UUID uuid;
-
- public NodeId() {
- }
-
- public NodeId( String uuidString ) {
- this.uuidString = uuidString;
- }
-
- public NodeId( UUID uuid ) {
- setUuid(uuid);
- }
-
- public UUID getUuid() {
- if (uuid == null) {
- // No need to synchronize, since it is idempotent ...
- uuid = UUID.fromString(uuidString);
- }
- return uuid;
- }
-
- public void setUuid( UUID uuid ) {
- assert uuid != null;
- this.uuid = uuid;
- this.uuidString = uuid.toString();
- }
-
- /**
- * @return uuidString
- */
- public String getUuidString() {
- return uuidString;
- }
-
- /**
- * @param uuidString Sets uuidString to the specified value.
- */
- public void setUuidString( String uuidString ) {
- this.uuidString = uuidString;
- }
-
- /**
- * {@inheritDoc}
- *
- * @see java.lang.Object#hashCode()
- */
- @Override
- public int hashCode() {
- return getUuid().hashCode();
- }
-
- /**
- * {@inheritDoc}
- *
- * @see java.lang.Object#equals(java.lang.Object)
- */
- @Override
- public boolean equals( Object obj ) {
- if (obj == this) return true;
- if (obj instanceof NodeId) {
- NodeId that = (NodeId)obj;
- if (this.uuidString == null) {
- if (that.uuidString != null) return false;
- } else {
- if (!this.uuidString.equals(that.uuidString)) return false;
- }
- return true;
- }
- return false;
- }
-
- /**
- * {@inheritDoc}
- *
- * @see java.lang.Object#toString()
- */
- @Override
- public String toString() {
- return getUuid().toString();
- }
-}
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Namespaces.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Namespaces.java 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Namespaces.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -32,20 +32,21 @@
public class Namespaces {
private final EntityManager entityManager;
- private final Map<String, Integer> cache = new HashMap<String,
Integer>();
+ private final Map<String, NamespaceEntity> cache = new HashMap<String,
NamespaceEntity>();
public Namespaces( EntityManager manager ) {
this.entityManager = manager;
}
- public int getId( String namespaceUri ) {
- Integer id = cache.get(namespaceUri);
- if (id == null) {
- NamespaceEntity entity = NamespaceEntity.findByUri(entityManager,
namespaceUri, true);
- id = entity.getId();
- cache.put(namespaceUri, id);
+ public NamespaceEntity get( String namespaceUri,
+ boolean createIfRequired ) {
+ NamespaceEntity entity = cache.get(namespaceUri);
+ if (entity == null) {
+ entity = NamespaceEntity.findByUri(entityManager, namespaceUri,
createIfRequired);
+ if (entity != null) {
+ cache.put(namespaceUri, entity);
+ }
}
- assert id != null;
- return id;
+ return entity;
}
}
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -35,6 +35,7 @@
import javax.persistence.EntityManagerFactory;
import org.hibernate.ejb.Ejb3Configuration;
import org.jboss.dna.common.stats.Stopwatch;
+import org.jboss.dna.common.util.IoUtil;
import org.jboss.dna.connector.store.jpa.models.basic.BasicModel;
import org.jboss.dna.graph.BasicExecutionContext;
import org.jboss.dna.graph.DnaLexicon;
@@ -69,6 +70,7 @@
private long largeValueSize;
private boolean compressData;
private Graph graph;
+ private String[] validLargeValues;
@Before
public void beforeEach() throws Exception {
@@ -78,6 +80,11 @@
largeValueSize = 2 ^ 10; // 1 kilobyte
compressData = false;
+ // Load in the large value ...
+ validLargeValues = new String[]
{IoUtil.read(getClass().getClassLoader().getResourceAsStream("LoremIpsum1.txt")),
+
IoUtil.read(getClass().getClassLoader().getResourceAsStream("LoremIpsum2.txt")),
+
IoUtil.read(getClass().getClassLoader().getResourceAsStream("LoremIpsum3.txt"))};
+
// Connect to the database ...
Ejb3Configuration configurator = new Ejb3Configuration();
model.configure(configurator);
@@ -121,6 +128,13 @@
}
}
+ @Test
+ public void shouldFindLargeValueContentFromFile() {
+ for (int i = 0; i != validLargeValues.length; ++i) {
+ assertThat(validLargeValues[i].startsWith((i + 1) + ". Lorem ipsum dolor
sit amet"), is(true));
+ }
+ }
+
/**
* Override this method in subclasses to create test cases that test other models.
*
@@ -190,8 +204,10 @@
@Test
public void shouldAddChildrenOnRootNode() {
-
graph.batch().set("propA").to("valueA").on("/").and().create("/a").with("propB",
"valueB").and("propC", "valueC").and()
- .create("/b").with("propD",
"valueD").and("propE", "valueE").execute();
+
graph.batch().set("propA").to("valueA").on("/").and()
+ .create("/a").with("propB",
"valueB").and("propC", "valueC").and()
+ .create("/b").with("propD",
"valueD").and("propE", "valueE")
+ .execute();
// Now look up the root node ...
Node root = graph.getNodeAt("/");
assertThat(root, is(notNullValue()));
@@ -246,6 +262,91 @@
}
@Test
+ public void shouldUpdateSmallPropertiesOnANode() {
+ // Create the property and add some properties (including 2 large values) ...
+ Graph.Create<Graph.Batch> create = graph.batch().create("/a");
+ for (int i = 0; i != 10; ++i) {
+ create = create.with("property" + i, "value" + i);
+ }
+ create.execute();
+
+ // Now look up all the properties ...
+ Node nodeA = graph.getNodeAt("/a");
+ assertThat(nodeA, is(notNullValue()));
+ for (int i = 0; i != 10; ++i) {
+ assertThat(nodeA, hasProperty("property" + i, "value" +
i));
+ }
+ assertThat(nodeA, hasNoChildren());
+
+ // Now, remove some of the properties and add some others ...
+ Graph.Batch batch = graph.batch();
+ batch.remove("property0", "property1").on("/a");
+ batch.set("property6").to("new valid 6").on("/a");
+ batch.execute();
+
+ // Re-read the properties ...
+ nodeA = graph.getNodeAt("/a");
+ assertThat(nodeA, is(notNullValue()));
+ for (int i = 0; i != 10; ++i) {
+ if (i == 0 || i == 1) {
+ continue;
+ } else if (i == 6) {
+ assertThat(nodeA, hasProperty("property" + i, "new valid
6"));
+ } else {
+ assertThat(nodeA, hasProperty("property" + i, "value"
+ i));
+ }
+ }
+ assertThat(nodeA, hasNoChildren());
+
+ }
+
+ @Test
+ public void shouldUpdateLargePropertiesOnANode() {
+ // Create the property and add some properties (including 2 large values) ...
+ Graph.Create<Graph.Batch> create = graph.batch().create("/a");
+ for (int i = 0; i != 100; ++i) {
+ create = create.with("property" + i, "value" + i);
+ }
+ create = create.with("largeProperty1", validLargeValues[0]);
+ create = create.with("largeProperty2", validLargeValues[1]);
+ create.execute();
+
+ // Now look up all the properties ...
+ Node nodeA = graph.getNodeAt("/a");
+ assertThat(nodeA, is(notNullValue()));
+ for (int i = 0; i != 100; ++i) {
+ assertThat(nodeA, hasProperty("property" + i, "value" +
i));
+ }
+ assertThat(nodeA, hasProperty("largeProperty1", validLargeValues[0]));
+ assertThat(nodeA, hasProperty("largeProperty2", validLargeValues[1]));
+ assertThat(nodeA, hasNoChildren());
+
+ // Now, remove some of the properties and add some others ...
+ Graph.Batch batch = graph.batch();
+ batch.remove("largeProperty1", "property0",
"property1").on("/a");
+ batch.set("property50").to("new valid
50").on("/a");
+
batch.set("largeProperty3").to(validLargeValues[2]).on("/a");
+ batch.execute();
+
+ // Re-read the properties ...
+ nodeA = graph.getNodeAt("/a");
+ assertThat(nodeA, is(notNullValue()));
+ for (int i = 0; i != 100; ++i) {
+ if (i == 0 || i == 1) {
+ continue;
+ } else if (i == 50) {
+ assertThat(nodeA, hasProperty("property" + i, "new valid
50"));
+ } else {
+ assertThat(nodeA, hasProperty("property" + i, "value"
+ i));
+ }
+ }
+ assertThat(nodeA, hasProperty("largeProperty2", validLargeValues[1]));
+ assertThat(nodeA, hasProperty("largeProperty3", validLargeValues[2]));
+ assertThat(nodeA, hasNoChildren());
+
+ }
+
+ @Test
public void shouldGetOnePropertyOnNode() {
Graph.Create<Graph.Batch> create = graph.batch().create("/a");
for (int i = 0; i != 100; ++i) {
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -39,7 +39,6 @@
import org.jboss.dna.common.util.StringUtil;
import org.jboss.dna.connector.store.jpa.JpaConnectorI18n;
import org.jboss.dna.connector.store.jpa.models.common.NamespaceEntity;
-import org.jboss.dna.connector.store.jpa.models.common.NodeId;
import org.jboss.dna.graph.BasicExecutionContext;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.properties.PropertyType;
@@ -100,7 +99,7 @@
configurator.setProperty("hibernate.connection.username",
"sa");
configurator.setProperty("hibernate.connection.password",
"");
configurator.setProperty("hibernate.connection.url",
"jdbc:hsqldb:.");
- configurator.setProperty("hibernate.show_sql", "true");
+ configurator.setProperty("hibernate.show_sql", "false");
configurator.setProperty("hibernate.format_sql",
"true");
configurator.setProperty("hibernate.use_sql_comments",
"true");
configurator.setProperty("hibernate.hbm2ddl.auto",
"create");
@@ -133,7 +132,7 @@
@Test
public void shouldPersistPropertyEntityWithCompressedFlagAndNoChildren() {
startEntityManager();
- NodeId nodeId = new NodeId(UUID.randomUUID());
+ NodeId nodeId = new NodeId(UUID.randomUUID().toString());
PropertiesEntity prop = new PropertiesEntity();
prop.setCompressed(true);
prop.setData("Hello, World".getBytes());
@@ -162,7 +161,7 @@
@Test
public void shouldPersistPropertyEntityWithUncompressedFlagAndNoChildren() {
startEntityManager();
- NodeId nodeId = new NodeId(UUID.randomUUID());
+ NodeId nodeId = new NodeId(UUID.randomUUID().toString());
PropertiesEntity prop = new PropertiesEntity();
prop.setData("Hello, World".getBytes());
prop.setId(nodeId);
@@ -192,9 +191,10 @@
startEntityManager();
byte[] content = "Jack and Jill went up the hill to grab a pail of
water.".getBytes();
String hash =
StringUtil.getHexString(SecureHash.getHash(SecureHash.Algorithm.SHA_1, content));
+ LargeValueId id = new LargeValueId(hash);
LargeValueEntity entity = new LargeValueEntity();
entity.setCompressed(true);
- entity.setHash(hash);
+ entity.setId(id);
entity.setLength(content.length);
entity.setData(content);
entity.setType(PropertyType.STRING);
@@ -210,9 +210,9 @@
// Look up the object ...
manager.getTransaction().begin();
try {
- LargeValueEntity entity2 = manager.find(LargeValueEntity.class, hash);
+ LargeValueEntity entity2 = manager.find(LargeValueEntity.class, id);
assertThat(entity2.isCompressed(), is(entity.isCompressed()));
- assertThat(entity2.getHash(), is(entity.getHash()));
+ assertThat(entity2.getId(), is(id));
assertThat(entity2.getData(), is(entity.getData()));
assertThat(entity2.getLength(), is(entity.getLength()));
assertThat(entity2.getType(), is(entity.getType()));
@@ -226,9 +226,10 @@
startEntityManager();
byte[] content = "Jack and Jill went up the hill to grab a pail of
water.".getBytes();
String hash =
StringUtil.getHexString(SecureHash.getHash(SecureHash.Algorithm.SHA_1, content));
+ LargeValueId id = new LargeValueId(hash);
LargeValueEntity entity = new LargeValueEntity();
// entity.setCompressed(false);
- entity.setHash(hash);
+ entity.setId(id);
entity.setLength(content.length);
entity.setData(content);
entity.setType(PropertyType.STRING);
@@ -244,9 +245,9 @@
// Look up the object ...
manager.getTransaction().begin();
try {
- LargeValueEntity entity2 = manager.find(LargeValueEntity.class, hash);
+ LargeValueEntity entity2 = manager.find(LargeValueEntity.class, id);
assertThat(entity2.isCompressed(), is(entity.isCompressed()));
- assertThat(entity2.getHash(), is(entity.getHash()));
+ assertThat(entity2.getId(), is(entity.getId()));
assertThat(entity2.getData(), is(entity.getData()));
assertThat(entity2.getLength(), is(entity.getLength()));
assertThat(entity2.getType(), is(entity.getType()));
@@ -295,9 +296,9 @@
UUID parentId = UUID.randomUUID();
// Create UUIDs for several children ...
- ChildId childId1 = new ChildId(parentId, UUID.randomUUID());
- ChildId childId2 = new ChildId(parentId, UUID.randomUUID());
- ChildId childId3 = new ChildId(parentId, UUID.randomUUID());
+ ChildId childId1 = new ChildId(parentId.toString(),
UUID.randomUUID().toString());
+ ChildId childId2 = new ChildId(parentId.toString(),
UUID.randomUUID().toString());
+ ChildId childId3 = new ChildId(parentId.toString(),
UUID.randomUUID().toString());
assertThat(childId1, is(not(childId2)));
assertThat(childId1, is(not(childId3)));
assertThat(childId2, is(not(childId3)));
Added:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
(rev 0)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java 2008-12-03
22:46:57 UTC (rev 652)
@@ -0,0 +1,436 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site:
http://www.fsf.org.
+ */
+package org.jboss.dna.connector.store.jpa.models.basic;
+
+import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsNull.notNullValue;
+import static org.junit.Assert.assertThat;
+import static org.junit.matchers.IsCollectionContaining.hasItems;
+import java.io.UnsupportedEncodingException;
+import java.security.NoSuchAlgorithmException;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import javax.persistence.EntityManager;
+import javax.persistence.EntityManagerFactory;
+import javax.persistence.Query;
+import org.hibernate.ejb.Ejb3Configuration;
+import org.jboss.dna.common.util.IoUtil;
+import org.jboss.dna.common.util.SecureHash;
+import org.jboss.dna.common.util.StringUtil;
+import org.jboss.dna.connector.store.jpa.models.common.NamespaceEntity;
+import org.jboss.dna.connector.store.jpa.util.Namespaces;
+import org.jboss.dna.graph.BasicExecutionContext;
+import org.jboss.dna.graph.ExecutionContext;
+import org.jboss.dna.graph.Location;
+import org.jboss.dna.graph.properties.Name;
+import org.jboss.dna.graph.properties.Path;
+import org.jboss.dna.graph.properties.PropertyType;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * @author Randall Hauch
+ */
+public class SubgraphQueryTest {
+
+ private EntityManagerFactory factory;
+ private EntityManager manager;
+ private BasicModel model;
+ private ExecutionContext context;
+ private Map<Path, UUID> uuidByPath;
+ private Namespaces namespaces;
+ private List<Location> locations;
+ private String[] validLargeValues;
+ private SubgraphQuery query;
+
+ @BeforeClass
+ public static void beforeAll() throws Exception {
+ }
+
+ @Before
+ public void beforeEach() throws Exception {
+ model = new BasicModel();
+ context = new BasicExecutionContext();
+
+ // Load in the large value ...
+ validLargeValues = new String[]
{IoUtil.read(getClass().getClassLoader().getResourceAsStream("LoremIpsum1.txt")),
+
IoUtil.read(getClass().getClassLoader().getResourceAsStream("LoremIpsum2.txt")),
+
IoUtil.read(getClass().getClassLoader().getResourceAsStream("LoremIpsum3.txt"))};
+
+ // Connect to the database ...
+ Ejb3Configuration configurator = new Ejb3Configuration();
+ model.configure(configurator);
+ configurator.setProperty("hibernate.dialect",
"org.hibernate.dialect.HSQLDialect");
+ configurator.setProperty("hibernate.connection.driver_class",
"org.hsqldb.jdbcDriver");
+ configurator.setProperty("hibernate.connection.username",
"sa");
+ configurator.setProperty("hibernate.connection.password",
"");
+ configurator.setProperty("hibernate.connection.url",
"jdbc:hsqldb:.");
+ configurator.setProperty("hibernate.show_sql", "false");
+ configurator.setProperty("hibernate.format_sql", "true");
+ configurator.setProperty("hibernate.use_sql_comments",
"true");
+ configurator.setProperty("hibernate.hbm2ddl.auto",
"create");
+ factory = configurator.buildEntityManagerFactory();
+ manager = factory.createEntityManager();
+ namespaces = new Namespaces(manager);
+
+ manager.getTransaction().begin();
+
+ // Now populate a graph of nodes ...
+ uuidByPath = new HashMap<Path, UUID>();
+ uuidByPath.put(path("/"), UUID.randomUUID());
+ create("/a");
+ create("/a/a1");
+ create("/a/a1/a1");
+ create("/a/a1/a2");
+ create("/a/a1/a3");
+ create("/a/a2");
+ create("/a/a2/a1");
+ create("/a/a2/a1/a1");
+ create("/a/a2/a1/a1/a1");
+ create("/a/a2/a1/a1/a2");
+ create("/a/a2/a1/a2");
+ create("/a/a2/a2");
+ create("/a/a2/a3");
+ create("/a/a2/a4");
+ setLargeValue("/a/a1", "prop1", validLargeValues[0]);
+ setLargeValue("/a/a1", "prop1", validLargeValues[1]); // the
only node that uses #1
+ setLargeValue("/a/a2", "prop1", validLargeValues[0]);
+ setLargeValue("/a/a2", "prop2", validLargeValues[2]);
+ setLargeValue("/a/a2/a1", "prop2", validLargeValues[0]);
+ setLargeValue("/a/a2/a1", "prop3", validLargeValues[2]);
+ manager.getTransaction().commit();
+ manager.getTransaction().begin();
+ }
+
+ @After
+ public void afterEach() throws Exception {
+ if (query != null) {
+ try {
+ query.close();
+ } catch (Throwable t) {
+ t.printStackTrace();
+ }
+ }
+ try {
+ manager.close();
+ } finally {
+ factory.close();
+ }
+ }
+
+ protected Path path( String path ) {
+ return context.getValueFactories().getPathFactory().create(path);
+ }
+
+ protected void create( String pathStr ) {
+ Path path = path(pathStr);
+ if (uuidByPath.containsKey(path)) return;
+ if (path.isRoot()) return;
+ Path parent = path.getParent();
+ // Look up the parent ...
+ UUID parentUuid = uuidByPath.get(parent);
+ assert parentUuid != null;
+ // Calculate the child index by walking the existing nodes ...
+ int numChildren = 0;
+ for (Path existing : uuidByPath.keySet()) {
+ if (parent.equals(existing.getParent())) {
+ ++numChildren;
+ }
+ }
+
+ // Create the child entity ...
+ Name childName = path.getLastSegment().getName();
+ int snsIndex = path.getLastSegment().getIndex();
+ NamespaceEntity namespace = namespaces.get(childName.getNamespaceUri(), true);
+ UUID childUuid = UUID.randomUUID();
+ ChildId id = new ChildId(parentUuid.toString(), childUuid.toString());
+ ChildEntity entity = new ChildEntity(id, ++numChildren, namespace,
childName.getLocalName(), snsIndex);
+ manager.persist(entity);
+
+ // Create the properties ...
+ NodeId nodeId = new NodeId(childUuid.toString());
+ PropertiesEntity props = new PropertiesEntity(nodeId);
+ props.setData("bogus data".getBytes());
+ props.setPropertyCount(1);
+ props.setCompressed(false);
+ manager.persist(props);
+
+ uuidByPath.put(path, childUuid);
+ }
+
+ protected UUID uuidForPath( String pathStr ) {
+ Path path = path(pathStr);
+ return uuidByPath.get(path);
+ }
+
+ protected void setLargeValue( String pathStr,
+ String propertyName,
+ String largeValue ) throws
UnsupportedEncodingException, NoSuchAlgorithmException {
+ Path path = path(pathStr);
+ UUID nodeUuid = uuidByPath.get(path);
+ assertThat(nodeUuid, is(notNullValue()));
+
+ // Find or create the large value object ...
+ LargeValueId id = largeValueId(largeValue);
+ LargeValueEntity entity = manager.find(LargeValueEntity.class, id);
+ if (entity == null) {
+ entity = new LargeValueEntity();
+ entity.setId(id);
+ entity.setLength(largeValue.length());
+ entity.setCompressed(false);
+ entity.setData(largeValue.getBytes());
+ entity.setType(PropertyType.STRING);
+ manager.persist(entity);
+ }
+
+ // Load the PropertiesEntity ...
+ NodeId nodeId = new NodeId(nodeUuid.toString());
+ PropertiesEntity props = manager.find(PropertiesEntity.class, nodeId);
+ assertThat(props, is(notNullValue()));
+
+ // Add the large value ...
+ props.getLargeValues().add(id);
+ }
+
+ protected LargeValueId largeValueId( String value ) throws
UnsupportedEncodingException, NoSuchAlgorithmException {
+ return new
LargeValueId(StringUtil.getHexString(SecureHash.getHash(SecureHash.Algorithm.SHA_1,
value.getBytes())));
+ }
+
+ protected PropertiesEntity getProperties( String pathStr ) {
+ Path path = path(pathStr);
+ UUID nodeUuid = uuidByPath.get(path);
+ assertThat(nodeUuid, is(notNullValue()));
+
+ NodeId nodeId = new NodeId(nodeUuid.toString());
+ return manager.find(PropertiesEntity.class, nodeId);
+ }
+
+ protected void verifyNextLocationIs( String path ) {
+ Path pathObj = path(path);
+ UUID uuid = uuidByPath.get(pathObj);
+ Location next = locations.remove(0);
+ assertThat(next, is(notNullValue()));
+ assertThat(next.getPath(), is(pathObj));
+ assertThat(next.getUuid(), is(uuid));
+ }
+
+ protected void verifyNoMoreLocations() {
+ assertThat(locations.isEmpty(), is(true));
+ }
+
+ @SuppressWarnings( "unchecked" )
+ protected void verifyNodesHaveLargeValues( String... paths ) {
+ if (paths == null || paths.length == 0) return;
+ // Build the set of UUIDs for the nodes that should have large values ...
+ String[] expectedNodeUuids = new String[paths.length];
+ for (int i = 0; i != paths.length; ++i) {
+ String pathStr = paths[i];
+ expectedNodeUuids[i] = uuidForPath(pathStr).toString();
+ }
+ // Load the PropertiesEntity for the nodes that have large properties ...
+ Query queryProps = manager.createQuery("select prop from PropertiesEntity as
prop where size(prop.largeValues) > 0");
+ Set<String> actualNodeUuids = new HashSet<String>();
+ List<PropertiesEntity> propsWithLargeValues = queryProps.getResultList();
+ for (PropertiesEntity entity : propsWithLargeValues) {
+ String uuidStr = entity.getId().getUuidString();
+ actualNodeUuids.add(uuidStr);
+ }
+ assertThat(actualNodeUuids, hasItems(expectedNodeUuids));
+ }
+
+ @Test
+ public void shouldFindLargeValueContentFromFile() {
+ for (int i = 0; i != validLargeValues.length; ++i) {
+ assertThat(validLargeValues[i].startsWith((i + 1) + ". Lorem ipsum dolor
sit amet"), is(true));
+ }
+ }
+
+ @Test
+ public void shouldPerformSubgraphQueryOfNodeWithChildrenAndNoGrandchildren() {
+ Path path = path("/a/a1");
+ UUID uuid = uuidByPath.get(path);
+ query = SubgraphQuery.create(context, manager, uuid, path, Integer.MAX_VALUE);
+ locations = query.getNodeLocations(true);
+ verifyNextLocationIs("/a/a1");
+ verifyNextLocationIs("/a/a1/a1");
+ verifyNextLocationIs("/a/a1/a2");
+ verifyNextLocationIs("/a/a1/a3");
+ verifyNoMoreLocations();
+ query.close();
+ }
+
+ @Test
+ public void shouldPerformSubgraphQueryOfNodeWithChildrenAndGrandchildren() {
+ Path path = path("/a/a2");
+ UUID uuid = uuidByPath.get(path);
+ query = SubgraphQuery.create(context, manager, uuid, path, Integer.MAX_VALUE);
+ locations = query.getNodeLocations(true);
+ verifyNextLocationIs("/a/a2");
+ verifyNextLocationIs("/a/a2/a1");
+ verifyNextLocationIs("/a/a2/a2");
+ verifyNextLocationIs("/a/a2/a3");
+ verifyNextLocationIs("/a/a2/a4");
+ verifyNextLocationIs("/a/a2/a1/a1");
+ verifyNextLocationIs("/a/a2/a1/a2");
+ verifyNextLocationIs("/a/a2/a1/a1/a1");
+ verifyNextLocationIs("/a/a2/a1/a1/a2");
+ verifyNoMoreLocations();
+ query.close();
+ }
+
+ @Test
+ public void
shouldPerformSubgraphQueryOfNodeWithChildrenAndGrandchildrenAndGreatGranchildren() {
+ Path path = path("/a");
+ UUID uuid = uuidByPath.get(path);
+ query = SubgraphQuery.create(context, manager, uuid, path, Integer.MAX_VALUE);
+ locations = query.getNodeLocations(true);
+ verifyNextLocationIs("/a");
+ verifyNextLocationIs("/a/a1");
+ verifyNextLocationIs("/a/a2");
+ verifyNextLocationIs("/a/a1/a1");
+ verifyNextLocationIs("/a/a1/a2");
+ verifyNextLocationIs("/a/a1/a3");
+ verifyNextLocationIs("/a/a2/a1");
+ verifyNextLocationIs("/a/a2/a2");
+ verifyNextLocationIs("/a/a2/a3");
+ verifyNextLocationIs("/a/a2/a4");
+ verifyNextLocationIs("/a/a2/a1/a1");
+ verifyNextLocationIs("/a/a2/a1/a2");
+ verifyNextLocationIs("/a/a2/a1/a1/a1");
+ verifyNextLocationIs("/a/a2/a1/a1/a2");
+ verifyNoMoreLocations();
+ query.close();
+ }
+
+ @Test
+ public void
shouldPerformMaxDepthSubgraphQueryOfNodeWithChildrenAndGrandchildrenAndGreatGranchildren()
{
+ Path path = path("/a");
+ UUID uuid = uuidByPath.get(path);
+ query = SubgraphQuery.create(context, manager, uuid, path, 4);
+ locations = query.getNodeLocations(true);
+ verifyNextLocationIs("/a");
+ verifyNextLocationIs("/a/a1");
+ verifyNextLocationIs("/a/a2");
+ verifyNextLocationIs("/a/a1/a1");
+ verifyNextLocationIs("/a/a1/a2");
+ verifyNextLocationIs("/a/a1/a3");
+ verifyNextLocationIs("/a/a2/a1");
+ verifyNextLocationIs("/a/a2/a2");
+ verifyNextLocationIs("/a/a2/a3");
+ verifyNextLocationIs("/a/a2/a4");
+ verifyNextLocationIs("/a/a2/a1/a1");
+ verifyNextLocationIs("/a/a2/a1/a2");
+ verifyNoMoreLocations();
+ query.close();
+
+ query = SubgraphQuery.create(context, manager, uuid, path, 2);
+ locations = query.getNodeLocations(true);
+ verifyNextLocationIs("/a");
+ verifyNextLocationIs("/a/a1");
+ verifyNextLocationIs("/a/a2");
+ verifyNoMoreLocations();
+ query.close();
+
+ query = SubgraphQuery.create(context, manager, uuid, path, 3);
+ locations = query.getNodeLocations(true);
+ verifyNextLocationIs("/a");
+ verifyNextLocationIs("/a/a1");
+ verifyNextLocationIs("/a/a2");
+ verifyNextLocationIs("/a/a1/a1");
+ verifyNextLocationIs("/a/a1/a2");
+ verifyNextLocationIs("/a/a1/a3");
+ verifyNextLocationIs("/a/a2/a1");
+ verifyNextLocationIs("/a/a2/a2");
+ verifyNextLocationIs("/a/a2/a3");
+ verifyNextLocationIs("/a/a2/a4");
+ verifyNoMoreLocations();
+ query.close();
+ }
+
+ @Test
+ public void shouldDeleteSubgraph() throws Exception {
+ // Verify that all the nodes with large values do indeed have them ...
+ verifyNodesHaveLargeValues("/a/a1", "/a/a2",
"/a/a2/a1");
+
+ // Count the number of objects ...
+ assertThat((Long)manager.createQuery("select count(*) from
LargeValueEntity").getSingleResult(), is(3L));
+ assertThat((Long)manager.createQuery("select count(*) from
PropertiesEntity").getSingleResult(), is(14L));
+ assertThat((Long)manager.createQuery("select count(*) from
ChildEntity").getSingleResult(), is(14L));
+
+ // Delete "/a/a1". Note that "/a/a1" has a large value that
is shared by "/a/a2", but it's also the only
+ // user of large value #1.
+ Path path = path("/a/a1");
+ UUID uuid = uuidByPath.get(path);
+
+ query = SubgraphQuery.create(context, manager, uuid, path, Integer.MAX_VALUE);
+ locations = query.getNodeLocations(true);
+ verifyNextLocationIs("/a/a1");
+ verifyNextLocationIs("/a/a1/a1");
+ verifyNextLocationIs("/a/a1/a2");
+ verifyNextLocationIs("/a/a1/a3");
+ verifyNoMoreLocations();
+ query.deleteSubgraph(true);
+ query.close();
+
+ // Commit the transaction, and start another ...
+ manager.getTransaction().commit();
+ manager.getTransaction().begin();
+ manager.flush();
+
+ // Count the number of objects ...
+ assertThat((Long)manager.createQuery("select count(*) from
LargeValueEntity").getSingleResult(), is(2L));
+ assertThat((Long)manager.createQuery("select count(*) from
PropertiesEntity").getSingleResult(), is(10L));
+ assertThat((Long)manager.createQuery("select count(*) from
ChildEntity").getSingleResult(), is(10L));
+
+ // Verify the graph structure is correct ...
+ path = path("/a");
+ uuid = uuidByPath.get(path);
+ query = SubgraphQuery.create(context, manager, uuid, path, 4);
+ locations = query.getNodeLocations(true);
+ verifyNextLocationIs("/a");
+ verifyNextLocationIs("/a/a2");
+ verifyNextLocationIs("/a/a2/a1");
+ verifyNextLocationIs("/a/a2/a2");
+ verifyNextLocationIs("/a/a2/a3");
+ verifyNextLocationIs("/a/a2/a4");
+ verifyNextLocationIs("/a/a2/a1/a1");
+ verifyNextLocationIs("/a/a2/a1/a2");
+ verifyNoMoreLocations();
+ query.close();
+
+ // Verify that all the nodes with large values do indeed have them ...
+ verifyNodesHaveLargeValues("/a/a2", "/a/a2/a1"); //
"/a/a1" was deleted
+
+ // Now, load the one node remaining with
+ }
+
+ // @Test
+ // public void shouldCreateMultipleSubgraphQueriesInDatabase() {
+ // }
+
+}
Property changes on:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/SubgraphQueryTest.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Added: trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum1.txt
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum1.txt
(rev 0)
+++ trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum1.txt 2008-12-03
22:46:57 UTC (rev 652)
@@ -0,0 +1,9 @@
+1. Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Etiam pellentesque, erat non
congue laoreet, felis nulla eleifend nulla, ut varius tellus felis eu risus. Etiam
dapibus. Aenean eget nibh sed urna pellentesque egestas. Aliquam sit amet turpis. Ut
blandit purus sit amet pede dictum gravida. Sed vestibulum, pede vel ornare faucibus,
libero est accumsan ante, a varius diam sapien a tortor. Maecenas luctus. Nullam purus
lectus, varius non, sodales nec, adipiscing eget, nulla. Nam nunc nunc, pretium a,
tincidunt ut, tristique ac, nisi. Vivamus tempor est quis turpis. In hac habitasse platea
dictumst. Nunc porttitor lacinia nunc. Proin dolor magna, imperdiet at, placerat in,
tempor at, magna. Fusce a nulla. Pellentesque sit amet sem nec leo porttitor sagittis.
+
+Integer mauris. Nunc libero est, consectetuer id, lacinia in, tempor a, lacus. Praesent
sed augue sed felis dictum blandit. In ipsum orci, pellentesque vitae, viverra vitae,
aliquam at, ante. Sed imperdiet mauris id lectus. Morbi vel magna sed pede accumsan
vulputate. Aliquam nisi est, molestie vitae, tempus sed, sagittis at, nulla. Cum sociis
natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Curabitur
turpis risus, faucibus at, porttitor ac, venenatis a, diam. Vestibulum blandit consequat
felis. Vestibulum vitae leo. Curabitur a nunc. Phasellus ornare. Morbi tortor est,
fringilla in, volutpat non, volutpat at, risus.
+
+Proin blandit pede tristique arcu. Vivamus ac libero. Donec pharetra leo sit amet orci.
Fusce sit amet velit. Nunc dui. Ut vitae nulla. Suspendisse potenti. Cras et elit.
Maecenas ipsum. Proin at mauris. Suspendisse fringilla libero a neque.
+
+Nullam vitae justo egestas justo lobortis consectetuer. Vestibulum eros. Cras id nisl.
Sed scelerisque commodo erat. Cras dignissim massa quis tellus. Donec auctor rutrum dolor.
Donec leo est, tempor quis, eleifend tincidunt, euismod eu, purus. Duis laoreet consequat
lectus. Cras quis dolor id massa mattis lacinia. Ut sollicitudin nunc in eros. Cras
gravida, purus ac rhoncus tempus, justo eros sodales mi, sit amet tincidunt metus erat sed
neque.
+
+Aliquam vitae nunc. Nullam at lectus ut nunc iaculis commodo. Phasellus augue. Aenean eu
mi eget magna egestas vehicula. Cum sociis natoque penatibus et magnis dis parturient
montes, nascetur ridiculus mus. Aenean molestie, massa eu rhoncus euismod, nisl eros porta
augue, vel dictum orci nibh vestibulum diam. Duis interdum. In hac habitasse platea
dictumst. In hac habitasse platea dictumst. Integer mi erat, egestas ac, aliquet at,
accumsan quis, dui. Praesent consectetuer euismod lorem. Fusce sagittis eros. Vivamus
hendrerit. Pellentesque malesuada, mauris eget mollis eleifend, enim nisl imperdiet
tortor, at pulvinar pede lectus eget augue. Cras convallis, odio nec aliquet rutrum, lorem
ipsum tincidunt mauris, vel rutrum lectus felis at felis. Ut lectus. Nam varius.
\ No newline at end of file
Property changes on:
trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum1.txt
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Added: trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum2.txt
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum2.txt
(rev 0)
+++ trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum2.txt 2008-12-03
22:46:57 UTC (rev 652)
@@ -0,0 +1,9 @@
+2. Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Etiam pellentesque, erat non
congue laoreet, felis nulla eleifend nulla, ut varius tellus felis eu risus. Etiam
dapibus. Aenean eget nibh sed urna pellentesque egestas. Aliquam sit amet turpis. Ut
blandit purus sit amet pede dictum gravida. Sed vestibulum, pede vel ornare faucibus,
libero est accumsan ante, a varius diam sapien a tortor. Maecenas luctus. Nullam purus
lectus, varius non, sodales nec, adipiscing eget, nulla. Nam nunc nunc, pretium a,
tincidunt ut, tristique ac, nisi. Vivamus tempor est quis turpis. In hac habitasse platea
dictumst. Nunc porttitor lacinia nunc. Proin dolor magna, imperdiet at, placerat in,
tempor at, magna. Fusce a nulla. Pellentesque sit amet sem nec leo porttitor sagittis.
+
+Integer mauris. Nunc libero est, consectetuer id, lacinia in, tempor a, lacus. Praesent
sed augue sed felis dictum blandit. In ipsum orci, pellentesque vitae, viverra vitae,
aliquam at, ante. Sed imperdiet mauris id lectus. Morbi vel magna sed pede accumsan
vulputate. Aliquam nisi est, molestie vitae, tempus sed, sagittis at, nulla. Cum sociis
natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Curabitur
turpis risus, faucibus at, porttitor ac, venenatis a, diam. Vestibulum blandit consequat
felis. Vestibulum vitae leo. Curabitur a nunc. Phasellus ornare. Morbi tortor est,
fringilla in, volutpat non, volutpat at, risus.
+
+Proin blandit pede tristique arcu. Vivamus ac libero. Donec pharetra leo sit amet orci.
Fusce sit amet velit. Nunc dui. Ut vitae nulla. Suspendisse potenti. Cras et elit.
Maecenas ipsum. Proin at mauris. Suspendisse fringilla libero a neque.
+
+Nullam vitae justo egestas justo lobortis consectetuer. Vestibulum eros. Cras id nisl.
Sed scelerisque commodo erat. Cras dignissim massa quis tellus. Donec auctor rutrum dolor.
Donec leo est, tempor quis, eleifend tincidunt, euismod eu, purus. Duis laoreet consequat
lectus. Cras quis dolor id massa mattis lacinia. Ut sollicitudin nunc in eros. Cras
gravida, purus ac rhoncus tempus, justo eros sodales mi, sit amet tincidunt metus erat sed
neque.
+
+Aliquam vitae nunc. Nullam at lectus ut nunc iaculis commodo. Phasellus augue. Aenean eu
mi eget magna egestas vehicula. Cum sociis natoque penatibus et magnis dis parturient
montes, nascetur ridiculus mus. Aenean molestie, massa eu rhoncus euismod, nisl eros porta
augue, vel dictum orci nibh vestibulum diam. Duis interdum. In hac habitasse platea
dictumst. In hac habitasse platea dictumst. Integer mi erat, egestas ac, aliquet at,
accumsan quis, dui. Praesent consectetuer euismod lorem. Fusce sagittis eros. Vivamus
hendrerit. Pellentesque malesuada, mauris eget mollis eleifend, enim nisl imperdiet
tortor, at pulvinar pede lectus eget augue. Cras convallis, odio nec aliquet rutrum, lorem
ipsum tincidunt mauris, vel rutrum lectus felis at felis. Ut lectus. Nam varius.
\ No newline at end of file
Property changes on:
trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum2.txt
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Added: trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum3.txt
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum3.txt
(rev 0)
+++ trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum3.txt 2008-12-03
22:46:57 UTC (rev 652)
@@ -0,0 +1,9 @@
+3. Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Etiam pellentesque, erat non
congue laoreet, felis nulla eleifend nulla, ut varius tellus felis eu risus. Etiam
dapibus. Aenean eget nibh sed urna pellentesque egestas. Aliquam sit amet turpis. Ut
blandit purus sit amet pede dictum gravida. Sed vestibulum, pede vel ornare faucibus,
libero est accumsan ante, a varius diam sapien a tortor. Maecenas luctus. Nullam purus
lectus, varius non, sodales nec, adipiscing eget, nulla. Nam nunc nunc, pretium a,
tincidunt ut, tristique ac, nisi. Vivamus tempor est quis turpis. In hac habitasse platea
dictumst. Nunc porttitor lacinia nunc. Proin dolor magna, imperdiet at, placerat in,
tempor at, magna. Fusce a nulla. Pellentesque sit amet sem nec leo porttitor sagittis.
+
+Integer mauris. Nunc libero est, consectetuer id, lacinia in, tempor a, lacus. Praesent
sed augue sed felis dictum blandit. In ipsum orci, pellentesque vitae, viverra vitae,
aliquam at, ante. Sed imperdiet mauris id lectus. Morbi vel magna sed pede accumsan
vulputate. Aliquam nisi est, molestie vitae, tempus sed, sagittis at, nulla. Cum sociis
natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Curabitur
turpis risus, faucibus at, porttitor ac, venenatis a, diam. Vestibulum blandit consequat
felis. Vestibulum vitae leo. Curabitur a nunc. Phasellus ornare. Morbi tortor est,
fringilla in, volutpat non, volutpat at, risus.
+
+Proin blandit pede tristique arcu. Vivamus ac libero. Donec pharetra leo sit amet orci.
Fusce sit amet velit. Nunc dui. Ut vitae nulla. Suspendisse potenti. Cras et elit.
Maecenas ipsum. Proin at mauris. Suspendisse fringilla libero a neque.
+
+Nullam vitae justo egestas justo lobortis consectetuer. Vestibulum eros. Cras id nisl.
Sed scelerisque commodo erat. Cras dignissim massa quis tellus. Donec auctor rutrum dolor.
Donec leo est, tempor quis, eleifend tincidunt, euismod eu, purus. Duis laoreet consequat
lectus. Cras quis dolor id massa mattis lacinia. Ut sollicitudin nunc in eros. Cras
gravida, purus ac rhoncus tempus, justo eros sodales mi, sit amet tincidunt metus erat sed
neque.
+
+Aliquam vitae nunc. Nullam at lectus ut nunc iaculis commodo. Phasellus augue. Aenean eu
mi eget magna egestas vehicula. Cum sociis natoque penatibus et magnis dis parturient
montes, nascetur ridiculus mus. Aenean molestie, massa eu rhoncus euismod, nisl eros porta
augue, vel dictum orci nibh vestibulum diam. Duis interdum. In hac habitasse platea
dictumst. In hac habitasse platea dictumst. Integer mi erat, egestas ac, aliquet at,
accumsan quis, dui. Praesent consectetuer euismod lorem. Fusce sagittis eros. Vivamus
hendrerit. Pellentesque malesuada, mauris eget mollis eleifend, enim nisl imperdiet
tortor, at pulvinar pede lectus eget augue. Cras convallis, odio nec aliquet rutrum, lorem
ipsum tincidunt mauris, vel rutrum lectus felis at felis. Ut lectus. Nam varius.
\ No newline at end of file
Property changes on:
trunk/extensions/dna-connector-store-jpa/src/test/resources/LoremIpsum3.txt
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Modified: trunk/extensions/dna-connector-store-jpa/src/test/resources/log4j.properties
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/resources/log4j.properties 2008-12-02
17:24:06 UTC (rev 651)
+++
trunk/extensions/dna-connector-store-jpa/src/test/resources/log4j.properties 2008-12-03
22:46:57 UTC (rev 652)
@@ -13,6 +13,7 @@
# Hibernate
log4j.logger.org.hibernate=ERROR
+log4j.logger.org.hibernate.hql=ERROR
# C3P0
log4j.logger.com.mchange=ERROR