Author: rhauch
Date: 2008-11-26 11:42:57 -0500 (Wed, 26 Nov 2008)
New Revision: 650
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/common/NamespaceEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java
trunk/extensions/dna-connector-store-jpa/src/test/resources/log4j.properties
Log:
DNA-40 Persistant storage for information not stored in other repository sources
Improved functionality and testing, including some performance testing of creating 100s
and 1000s of nodes. (These tests are commented out due to the time required to run
them.)
Also made minor improvements to the Graph API. Specifically, added the ability to get the
UUID out of a Location, changed the CreateNodeRequest.toString() to be more readable, and
corrected the interface returned from Graph.create(...) and Graph.Batch.create(...)
methods (previously required two .and() calls).
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-11-26
16:42:23 UTC (rev 649)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-11-26
16:42:57 UTC (rev 650)
@@ -36,7 +36,6 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import java.util.UUID;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
@@ -47,6 +46,7 @@
import net.jcip.annotations.Immutable;
import net.jcip.annotations.NotThreadSafe;
import org.jboss.dna.common.util.IoUtil;
+import org.jboss.dna.common.util.Logger;
import org.jboss.dna.common.util.StringUtil;
import org.jboss.dna.connector.store.jpa.JpaConnectorI18n;
import org.jboss.dna.connector.store.jpa.models.common.NamespaceEntity;
@@ -73,6 +73,7 @@
import org.jboss.dna.graph.requests.MoveBranchRequest;
import org.jboss.dna.graph.requests.ReadAllChildrenRequest;
import org.jboss.dna.graph.requests.ReadAllPropertiesRequest;
+import org.jboss.dna.graph.requests.ReadNodeRequest;
import org.jboss.dna.graph.requests.ReadPropertyRequest;
import org.jboss.dna.graph.requests.UpdatePropertiesRequest;
import org.jboss.dna.graph.requests.processor.RequestProcessor;
@@ -89,9 +90,11 @@
private final NameFactory nameFactory;
private final Namespaces namespaces;
private final UUID rootNodeUuid;
+ private final String rootNodeUuidString;
private final Serializer serializer;
private final long largeValueMinimumSizeInBytes;
private final boolean compressData;
+ protected final Logger logger;
/**
* @param sourceName
@@ -116,9 +119,12 @@
this.nameFactory = context.getValueFactories().getNameFactory();
this.namespaces = new Namespaces(entityManager);
this.rootNodeUuid = rootNodeUuid;
- this.serializer = new Serializer(context, this, true);
+ this.rootNodeUuidString = this.rootNodeUuid.toString();
+ this.serializer = new Serializer(context, true);
this.largeValueMinimumSizeInBytes = largeValueMinimumSizeInBytes;
this.compressData = compressData;
+ this.logger = getExecutionContext().getLogger(getClass());
+
// Start the transaction ...
this.entities.getTransaction().begin();
}
@@ -130,6 +136,7 @@
*/
@Override
public void process( CreateNodeRequest request ) {
+ logger.trace(request.toString());
Location actualLocation = null;
String childUuidString = null;
try {
@@ -141,33 +148,8 @@
// We need to look for an existing UUID property in the request,
// so since we have to iterate through the properties, go ahead an serialize
them right away ...
- Set<String> largeValueHexHashes = new HashSet<String>();
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- OutputStream os = compressData ? new ZipOutputStream(baos) : baos;
- ObjectOutputStream oos = new ObjectOutputStream(os);
- int numProperties = 0;
- try {
- for (Property property : request.properties()) {
- if (property.getName().equals(DnaLexicon.UUID)) {
- childUuidString =
stringFactory.create(property.getFirstValue());
- }
- if (serializer.serializeProperty(oos, property, largeValueHexHashes))
++numProperties;
- }
- } finally {
- oos.close();
- }
- String largeValueHexHashesString =
createHexValuesString(largeValueHexHashes);
- if (childUuidString == null) childUuidString =
stringFactory.create(UUID.randomUUID());
+ childUuidString = createProperties(null, request.properties());
- // Create the PropertiesEntity ...
- NodeId nodeId = new NodeId(childUuidString);
- PropertiesEntity props = new PropertiesEntity(nodeId);
- props.setData(baos.toByteArray());
- props.setCompressed(compressData);
- props.setPropertyCount(numProperties);
- props.setLargeValueKeys(largeValueHexHashesString);
- entities.persist(props);
-
// Find or create the namespace for the child ...
Name childName = request.named();
String childNsUri = childName.getNamespaceUri();
@@ -177,21 +159,23 @@
// Find the largest SNS index in the existing ChildEntity objects with the
same name ...
String childLocalName = childName.getLocalName();
Query query =
entities.createNamedQuery("ChildEntity.findMaximumSnsIndex");
- query.setParameter("uuid", parentUuidString);
+ query.setParameter("parentUuid", parentUuidString);
query.setParameter("ns", nsId);
query.setParameter("childName", childLocalName);
int nextSnsIndex = 1;
try {
- nextSnsIndex = (Integer)query.getSingleResult();
+ Integer result = (Integer)query.getSingleResult();
+ nextSnsIndex = result != null ? result + 1 : 1;
} catch (NoResultException e) {
}
// Find the largest child index in the existing ChildEntity objects ...
query =
entities.createNamedQuery("ChildEntity.findMaximumChildIndex");
- query.setParameter("uuid", parentUuidString);
+ query.setParameter("parentUuid", parentUuidString);
int nextIndexInParent = 1;
try {
- nextIndexInParent = (Integer)query.getSingleResult() + 1;
+ Integer result = (Integer)query.getSingleResult();
+ nextIndexInParent = result != null ? result + 1 : 1;
} catch (NoResultException e) {
}
@@ -199,28 +183,100 @@
NamespaceEntity ns = entities.find(NamespaceEntity.class, nsId);
assert ns != null;
ChildId id = new ChildId(parentUuidString, childUuidString);
- ChildEntity entity = new ChildEntity(id, nextIndexInParent, ns,
childLocalName, nextSnsIndex + 1);
+ ChildEntity entity = new ChildEntity(id, nextIndexInParent, ns,
childLocalName, nextSnsIndex);
entities.persist(entity);
+ // Look up the actual path, regardless of the supplied path...
+ assert childUuidString != null;
+ assert actual.location.getPath() != null;
+ Path path = pathFactory.create(actual.location.getPath(), childName,
nextSnsIndex);
+ actualLocation = new Location(path, UUID.fromString(childUuidString));
+
} catch (Throwable e) { // Includes PathNotFoundException
request.setError(e);
+ logger.trace(e, "Problem " + request);
return;
}
- // Look up the actual path, regardless of the supplied path...
- assert childUuidString != null;
- Path path = getPathForUuid(childUuidString);
- actualLocation = new Location(path, UUID.fromString(childUuidString));
request.setActualLocationOfNode(actualLocation);
}
/**
* {@inheritDoc}
*
+ * @see
org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.ReadNodeRequest)
+ */
+ @SuppressWarnings( "unchecked" )
+ @Override
+ public void process( ReadNodeRequest request ) {
+ logger.trace(request.toString());
+ Location actualLocation = null;
+ try {
+ Location location = request.at();
+ ActualLocation actual = getActualLocation(location);
+ String parentUuidString = actual.uuid;
+ actualLocation = actual.location;
+ Path path = actualLocation.getPath();
+
+ // Record the UUID as a property, since it's not stored in the serialized
properties...
+ request.addProperty(actualLocation.getIdProperty(DnaLexicon.UUID));
+
+ // Find the properties entity for this node ...
+ Query query =
entities.createNamedQuery("PropertiesEntity.findByUuid");
+ query.setParameter("uuid", parentUuidString);
+ try {
+ PropertiesEntity entity = (PropertiesEntity)query.getSingleResult();
+
+ // Deserialize the properties ...
+ boolean compressed = entity.isCompressed();
+ Collection<Property> properties = new
LinkedList<Property>();
+ byte[] data = entity.getData();
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new ZipInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.deserializeAllProperties(ois, properties, this);
+ for (Property property : properties) {
+ request.addProperty(property);
+ }
+ } finally {
+ ois.close();
+ }
+
+ } catch (NoResultException e) {
+ // No properties, but that's okay...
+ }
+ // Find the children of the supplied node ...
+ query =
entities.createNamedQuery("ChildEntity.findAllUnderParent");
+ query.setParameter("parentUuidString", parentUuidString);
+ List<ChildEntity> children = query.getResultList();
+ for (ChildEntity child : children) {
+ String namespaceUri = child.getChildNamespace().getUri();
+ String localName = child.getChildName();
+ Name childName = nameFactory.create(namespaceUri, localName);
+ int sns = child.getSameNameSiblingIndex();
+ Path childPath = pathFactory.create(path, childName, sns);
+ String childUuidString = child.getId().getChildUuidString();
+ Location childLocation = new Location(childPath,
UUID.fromString(childUuidString));
+ request.addChild(childLocation);
+ }
+ } catch (NoResultException e) {
+ // there are no properties (probably not expected, but still okay) ...
+ } catch (Throwable e) { // Includes PathNotFoundException
+ request.setError(e);
+ return;
+ }
+ if (actualLocation != null) request.setActualLocationOfNode(actualLocation);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
* @see
org.jboss.dna.graph.requests.processor.RequestProcessor#process(org.jboss.dna.graph.requests.ReadAllChildrenRequest)
*/
@SuppressWarnings( "unchecked" )
@Override
public void process( ReadAllChildrenRequest request ) {
+ logger.trace(request.toString());
Location actualLocation = null;
try {
Location location = request.of();
@@ -231,14 +287,13 @@
// Find the children of the supplied node ...
Query query =
entities.createNamedQuery("ChildEntity.findAllUnderParent");
- query.setParameter("uuid", parentUuidString);
+ query.setParameter("parentUuidString", parentUuidString);
List<ChildEntity> children = query.getResultList();
for (ChildEntity child : children) {
String namespaceUri = child.getChildNamespace().getUri();
String localName = child.getChildName();
Name childName = nameFactory.create(namespaceUri, localName);
- Integer sns = child.getSameNameSiblingIndex();
- if (sns == null) sns = new Integer(1);
+ int sns = child.getSameNameSiblingIndex();
Path childPath = pathFactory.create(path, childName, sns);
String childUuidString = child.getId().getChildUuidString();
Location childLocation = new Location(childPath,
UUID.fromString(childUuidString));
@@ -260,6 +315,7 @@
*/
@Override
public void process( ReadAllPropertiesRequest request ) {
+ logger.trace(request.toString());
Location actualLocation = null;
try {
Location location = request.at();
@@ -284,7 +340,7 @@
InputStream is = compressed ? new ZipInputStream(bais) : bais;
ObjectInputStream ois = new ObjectInputStream(is);
try {
- serializer.deserializeAllProperties(ois, properties);
+ serializer.deserializeAllProperties(ois, properties, this);
for (Property property : properties) {
request.addProperty(property);
}
@@ -307,6 +363,7 @@
*/
@Override
public void process( ReadPropertyRequest request ) {
+ logger.trace(request.toString());
// Small optimization ...
final Name propertyName = request.named();
if (DnaLexicon.UUID.equals(propertyName)) {
@@ -342,7 +399,8 @@
InputStream is = compressed ? new ZipInputStream(bais) : bais;
ObjectInputStream ois = new ObjectInputStream(is);
try {
- serializer.deserializeSomeProperties(ois, properties, propertyName);
+ Serializer.LargeValues skippedLargeValues = Serializer.NO_LARGE_VALUES;
+ serializer.deserializeSomeProperties(ois, properties, this,
skippedLargeValues, propertyName);
for (Property property : properties) {
request.setProperty(property); // should be only one property
}
@@ -365,6 +423,7 @@
*/
@Override
public void process( UpdatePropertiesRequest request ) {
+ logger.trace(request.toString());
Location actualLocation = null;
try {
Location location = request.on();
@@ -374,49 +433,60 @@
// Find the properties entity for this node ...
Query query =
entities.createNamedQuery("PropertiesEntity.findByUuid");
query.setParameter("uuid", actual.uuid);
- PropertiesEntity entity = (PropertiesEntity)query.getSingleResult();
+ PropertiesEntity entity = null;
+ try {
+ entity = (PropertiesEntity)query.getSingleResult();
- // Determine which large values are referenced ...
- String largeValueHexKeys = entity.getLargeValueKeys();
- Collection<String> hexKeys = null;
- if (largeValueHexKeys != null) {
- hexKeys = createHexValues(largeValueHexKeys);
- }
+ // Determine which large values are referenced ...
+ Collection<String> hexKeys = null;
+ String largeValueHexKeys = entity.getLargeValueKeys();
+ if (largeValueHexKeys != null) {
+ hexKeys = createHexValues(largeValueHexKeys);
+ }
- // Now serialize the properties and save them ...
- Collection<String> newHexKeys = new HashSet<String>();
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- OutputStream os = compressData ? new ZipOutputStream(baos) : baos;
- ObjectOutputStream oos = new ObjectOutputStream(os);
- int numProperties = 0;
- try {
- for (Property property : request.properties()) {
- if (serializer.serializeProperty(oos, property, newHexKeys))
++numProperties;
+ // Prepare the streams so we can deserialize all existing properties and
reserialize the old and updated
+ // properties ...
+ boolean compressed = entity.isCompressed();
+ ByteArrayInputStream bais = new ByteArrayInputStream(entity.getData());
+ InputStream is = compressed ? new ZipInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ OutputStream os = compressed ? new ZipOutputStream(baos) : baos;
+ ObjectOutputStream oos = new ObjectOutputStream(os);
+ int numProperties = 0;
+ SkippedLargeValues skipped = new SkippedLargeValues();
+ RecordingLargeValues largeValues = new RecordingLargeValues();
+ try {
+ numProperties = serializer.reserializeProperties(ois, oos,
request.properties(), largeValues, skipped);
+ } finally {
+ try {
+ ois.close();
+ } finally {
+ oos.close();
+ }
}
- } finally {
- oos.close();
- }
- largeValueHexKeys = createHexValuesString(newHexKeys);
- entity.setPropertyCount(numProperties);
- entity.setData(baos.toByteArray());
- entity.setCompressed(compressData);
- entity.setLargeValueKeys(largeValueHexKeys);
+ largeValueHexKeys = createHexValuesString(largeValues.writtenKeys);
+ entity.setPropertyCount(numProperties);
+ entity.setData(baos.toByteArray());
+ entity.setCompressed(compressData);
+ entity.setLargeValueKeys(largeValueHexKeys);
- // Update the large values that used to be reference but no longer are ...
- if (hexKeys != null) {
- hexKeys.removeAll(newHexKeys);
- for (String oldHexKey : hexKeys) {
- LargeValueEntity largeValue = entities.find(LargeValueEntity.class,
oldHexKey);
- if (largeValue != null) {
- if (largeValue.decrementUsageCount() == 0) {
- entities.remove(entity);
+ // Update the large values that used to be reference but no longer are
...
+ if (hexKeys != null) {
+ for (String oldHexKey : skipped.skippedKeys) {
+ LargeValueEntity largeValue =
entities.find(LargeValueEntity.class, oldHexKey);
+ if (largeValue != null) {
+ if (largeValue.decrementUsageCount() == 0) {
+ entities.remove(largeValue);
+ }
}
}
}
+ } catch (NoResultException e) {
+ // there are no properties yet ...
+ createProperties(actual.uuid, request.properties());
}
- } catch (NoResultException e) {
- // there are no properties (probably not expected, but still okay) ...
} catch (Throwable e) { // Includes PathNotFoundException
request.setError(e);
return;
@@ -431,6 +501,7 @@
*/
@Override
public void process( CopyBranchRequest request ) {
+ logger.trace(request.toString());
}
/**
@@ -440,6 +511,7 @@
*/
@Override
public void process( DeleteBranchRequest request ) {
+ logger.trace(request.toString());
}
/**
@@ -449,6 +521,7 @@
*/
@Override
public void process( MoveBranchRequest request ) {
+ logger.trace(request.toString());
Location actualOldLocation = null;
Location actualNewLocation = null;
try {
@@ -485,7 +558,7 @@
String childLocalName = fromEntity.getChildName();
NamespaceEntity ns = fromEntity.getChildNamespace();
Query query =
entities.createNamedQuery("ChildEntity.findMaximumSnsIndex");
- query.setParameter("uuid", toUuidString);
+ query.setParameter("parentUuidString", toUuidString);
query.setParameter("ns", ns.getId());
query.setParameter("childName", childLocalName);
int nextSnsIndex = 1;
@@ -496,7 +569,7 @@
// Find the largest child index in the existing ChildEntity objects
...
query =
entities.createNamedQuery("ChildEntity.findMaximumChildIndex");
- query.setParameter("uuid", toUuidString);
+ query.setParameter("parentUuidString", toUuidString);
int nextIndexInParent = 1;
try {
nextIndexInParent = (Integer)query.getSingleResult() + 1;
@@ -528,6 +601,38 @@
}
+ protected String createProperties( String uuidString,
+ Collection<Property> properties ) throws
IOException {
+ RecordingLargeValues largeValues = new RecordingLargeValues();
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ OutputStream os = compressData ? new ZipOutputStream(baos) : baos;
+ ObjectOutputStream oos = new ObjectOutputStream(os);
+ int numProperties = properties.size();
+ try {
+ oos.writeInt(numProperties);
+ for (Property property : properties) {
+ if (uuidString == null &&
property.getName().equals(DnaLexicon.UUID)) {
+ uuidString = stringFactory.create(property.getFirstValue());
+ }
+ if (serializer.serializeProperty(oos, property, largeValues))
++numProperties;
+ }
+ } finally {
+ oos.close();
+ }
+ String largeValueHexHashesString =
createHexValuesString(largeValues.writtenKeys);
+ if (uuidString == null) uuidString = stringFactory.create(UUID.randomUUID());
+
+ // Create the PropertiesEntity ...
+ NodeId nodeId = new NodeId(uuidString);
+ PropertiesEntity props = new PropertiesEntity(nodeId);
+ props.setData(baos.toByteArray());
+ props.setCompressed(compressData);
+ props.setPropertyCount(numProperties);
+ props.setLargeValueKeys(largeValueHexHashesString);
+ entities.persist(props);
+ return uuidString;
+ }
+
/**
* {@inheritDoc}
*
@@ -571,38 +676,53 @@
// Look for the UUID in the original ...
Property uuidProperty = original.getIdProperty(DnaLexicon.UUID);
- String uuidString = uuidProperty.isEmpty() ? null :
stringFactory.create(uuidProperty.getFirstValue());
+ String uuidString = uuidProperty != null && !uuidProperty.isEmpty() ?
stringFactory.create(uuidProperty.getFirstValue()) : null;
// If the original location has a UUID, then use that to find the child entity
that represents the location ...
if (uuidString != null) {
// The original has a UUID, so use that to find the child entity.
// Then walk up the ancestors and build the path.
+ String nodeUuidString = uuidString;
LinkedList<Path.Segment> segments = new
LinkedList<Path.Segment>();
+ // while (uuidString != null &&
!uuidString.equals(this.rootNodeUuidString)) {
+ // // Find the parent of the child, along with the child's name and SNS
index ...
+ // Query query =
entities.createNamedQuery("ChildEntity.findValuesByChildUuid");
+ // query.setParameter("childUuidString", uuidString);
+ // try {
+ // Object[] record = (Object[])query.getSingleResult();
+ // String parentUuidString = (String)record[0];
+ // String uri = (String)record[1];
+ // String localName = (String)record[2];
+ // int sns = (Integer)record[3];
+ // // Now create the path segment and set the next child UUID as the parent
of this child ...
+ // Name name = nameFactory.create(uri, localName);
+ // segments.addFirst(pathFactory.createSegment(name, sns));
+ // uuidString = parentUuidString;
+ // } catch (NoResultException e) {
+ // uuidString = null;
+ // }
+ // }
+ // Path fullPath = pathFactory.createAbsolutePath(segments);
+ // return new ActualLocation(new Location(fullPath, uuidProperty),
nodeUuidString, null);
ChildEntity entity = null;
- ChildEntity childEntity = null;
- do {
- String childUuid = uuidString;
+ while (uuidString != null &&
!uuidString.equals(this.rootNodeUuidString)) {
Query query =
entities.createNamedQuery("ChildEntity.findByChildUuid");
- query.setParameter("childUuidString", childUuid);
+ query.setParameter("childUuidString", uuidString);
try {
// Find the parent of the UUID ...
entity = (ChildEntity)query.getSingleResult();
- if (childEntity == null) childEntity = entity;
String localName = entity.getChildName();
String uri = entity.getChildNamespace().getUri();
- Integer sns = entity.getSameNameSiblingIndex();
+ int sns = entity.getSameNameSiblingIndex();
Name name = nameFactory.create(uri, localName);
- if (sns != null) {
- segments.addFirst(pathFactory.createSegment(name, sns));
- } else {
- segments.addFirst(pathFactory.createSegment(name));
- }
+ segments.addFirst(pathFactory.createSegment(name, sns));
+ uuidString = entity.getId().getParentUuidString();
} catch (NoResultException e) {
- entity = null;
+ uuidString = null;
}
- } while (entity != null);
+ }
Path fullPath = pathFactory.createAbsolutePath(segments);
- return new ActualLocation(new Location(fullPath, uuidProperty), uuidString,
childEntity);
+ return new ActualLocation(new Location(fullPath, uuidProperty),
nodeUuidString, entity);
}
// There is no UUID, so look for a path ...
@@ -615,9 +735,36 @@
// Walk the child entities, starting at the root, down the to the path ...
if (path.isRoot()) {
- return new ActualLocation(original.with(rootNodeUuid),
rootNodeUuid.toString(), null);
+ return new ActualLocation(original.with(rootNodeUuid), rootNodeUuidString,
null);
}
- String parentUuid = this.rootNodeUuid.toString();
+ String parentUuid = this.rootNodeUuidString;
+ // String childUuid = null;
+ // for (Path.Segment segment : path) {
+ // Name name = segment.getName();
+ // String localName = name.getLocalName();
+ // String nsUri = name.getNamespaceUri();
+ // int snsIndex = segment.hasIndex() ? segment.getIndex() : 1;
+ //
+ // Query query =
entities.createNamedQuery("ChildEntity.findChildUuidByPathSegment");
+ // query.setParameter("parentUuidString", parentUuid);
+ // query.setParameter("nsUri", nsUri);
+ // query.setParameter("childName", localName);
+ // query.setParameter("sns", snsIndex);
+ // try {
+ // childUuid = (String)query.getSingleResult();
+ // } catch (NoResultException e) {
+ // // Unable to complete the path, so prepare the exception by determining the
lowest path that exists ...
+ // Path lowest = path;
+ // while (lowest.getLastSegment() != segment) {
+ // lowest = lowest.getParent();
+ // }
+ // lowest = lowest.getParent();
+ // throw new PathNotFoundException(original, lowest);
+ // }
+ // parentUuid = childUuid;
+ // }
+ // return new ActualLocation(original.with(UUID.fromString(childUuid)),
childUuid, null);
+
ChildEntity child = null;
for (Path.Segment segment : path) {
child = findByPathSegment(parentUuid, segment);
@@ -630,6 +777,7 @@
lowest = lowest.getParent();
throw new PathNotFoundException(original, lowest);
}
+ parentUuid = child.getId().getChildUuidString();
}
assert child != null;
uuidString = child.getId().getChildUuidString();
@@ -653,6 +801,7 @@
String localName = name.getLocalName();
String nsUri = name.getNamespaceUri();
Integer nsId = namespaces.getId(nsUri, false);
+ int snsIndex = pathSegment.hasIndex() ? pathSegment.getIndex() : 1;
if (nsId == null) {
// The namespace can't be found, then certainly the node won't be
found ...
return null;
@@ -661,11 +810,7 @@
query.setParameter("parentUuidString", parentUuid);
query.setParameter("ns", nsId);
query.setParameter("childName", localName);
- if (pathSegment.hasIndex()) {
- query.setParameter("sns", localName);
- } else {
- query.setParameter("sns", null);
- }
+ query.setParameter("sns", snsIndex);
try {
return (ChildEntity)query.getSingleResult();
} catch (NoResultException e) {
@@ -673,38 +818,6 @@
}
}
- /**
- * Build up the path for the node with the supplied UUID.
- *
- * @param uuidString the UUID of the node
- * @return the path to the node; never null
- */
- protected Path getPathForUuid( String uuidString ) {
- ChildEntity entity = null;
- String childUuid = uuidString;
- LinkedList<Path.Segment> segments = new LinkedList<Path.Segment>();
- do {
- // Find the parent of the UUID ...
- Query query =
entities.createNamedQuery("ChildEntity.findByChildUuid");
- query.setParameter("childUuidString", childUuid);
- try {
- entity = (ChildEntity)query.getSingleResult();
- String localName = entity.getChildName();
- String uri = entity.getChildNamespace().getUri();
- Integer sns = entity.getSameNameSiblingIndex();
- Name name = nameFactory.create(uri, localName);
- if (sns != null) {
- segments.addFirst(pathFactory.createSegment(name, sns));
- } else {
- segments.addFirst(pathFactory.createSegment(name));
- }
- } catch (NoResultException e) {
- entity = null;
- }
- } while (entity != null);
- return pathFactory.createAbsolutePath(segments);
- }
-
protected String createHexValuesString( Collection<String> hexValues ) {
if (hexValues == null || hexValues.isEmpty()) return null;
StringBuilder sb = new StringBuilder();
@@ -810,6 +923,83 @@
}
}
+ protected class RecordingLargeValues implements LargeValues {
+ protected Collection<String> readKeys = new HashSet<String>();
+ protected Collection<String> writtenKeys = new HashSet<String>();
+
+ RecordingLargeValues() {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#getMinimumSize()
+ */
+ public long getMinimumSize() {
+ return BasicRequestProcessor.this.getMinimumSize();
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#read(org.jboss.dna.graph.properties.ValueFactories,
+ * byte[], long)
+ */
+ public Object read( ValueFactories valueFactories,
+ byte[] hash,
+ long length ) throws IOException {
+ String key = StringUtil.getHexString(hash);
+ readKeys.add(key);
+ return BasicRequestProcessor.this.read(valueFactories, hash, length);
+ }
+
+ public void write( byte[] hash,
+ long length,
+ PropertyType type,
+ Object value ) throws IOException {
+ String key = StringUtil.getHexString(hash);
+ writtenKeys.add(key);
+ BasicRequestProcessor.this.write(hash, length, type, value);
+ }
+ }
+
+ protected class SkippedLargeValues implements LargeValues {
+ protected Collection<String> skippedKeys = new HashSet<String>();
+
+ SkippedLargeValues() {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#getMinimumSize()
+ */
+ public long getMinimumSize() {
+ return BasicRequestProcessor.this.getMinimumSize();
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#read(org.jboss.dna.graph.properties.ValueFactories,
+ * byte[], long)
+ */
+ public Object read( ValueFactories valueFactories,
+ byte[] hash,
+ long length ) throws IOException {
+ String key = StringUtil.getHexString(hash);
+ skippedKeys.add(key);
+ return null;
+ }
+
+ public void write( byte[] hash,
+ long length,
+ PropertyType type,
+ Object value ) {
+ throw new UnsupportedOperationException();
+ }
+ }
+
@Immutable
protected static class ActualLocation {
/** The actual location */
@@ -828,6 +1018,16 @@
this.uuid = uuid;
this.childEntity = childEntity;
}
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return this.location.toString() + " (uuid=" + uuid + ") "
+ childEntity;
+ }
}
protected static class Namespaces {
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildEntity.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildEntity.java 2008-11-26
16:42:23 UTC (rev 649)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildEntity.java 2008-11-26
16:42:57 UTC (rev 650)
@@ -28,8 +28,8 @@
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
+import javax.persistence.Table;
import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
import org.jboss.dna.common.util.HashCode;
import org.jboss.dna.connector.store.jpa.models.common.NamespaceEntity;
@@ -42,32 +42,38 @@
* @author Randall Hauch
*/
@Entity
-(a)javax.persistence.Table( name = "DNA_BASIC_CHILDREN" )
-@Table( appliesTo = "DNA_BASIC_CHILDREN", indexes = @Index( name =
"CHILDINDEX_INX", columnNames = {"PARENT_UUID",
"CHILD_INDEX"} ) )
+@Table( name = "DNA_BASIC_CHILDREN" )
+(a)org.hibernate.annotations.Table( appliesTo = "DNA_BASIC_CHILDREN", indexes =
{
+ @Index( name = "CHILDINDEX_INX", columnNames = {"PARENT_UUID",
"CHILD_INDEX"} ),
+ @Index( name = "CHILDUUID_INX", columnNames = {"CHILD_UUID"} ),
+ @Index( name = "CHILDNAME_INX", columnNames = {"PARENT_UUID",
"CHILD_NAME_NS_ID", "CHILD_NAME_LOCAL", "SNS_INDEX"} )} )
@NamedQueries( {
- @NamedQuery( name = "ChildEntity.findByPathSegment", query = "select
child from ChildEntity as child where child.id.parentUuidString = :parentUuid AND
child.childNamespace.id = :ns AND child.childName = :childName AND
child.sameNameSiblingIndex = :sns" ),
- @NamedQuery( name = "ChildEntity.findAllUnderParent", query = "select
child from ChildEntity as child where child.id.parentUuidString = :parentUuid" ),
- @NamedQuery( name = "ChildEntity.findByChildUuid", query = "select
child from ChildEntity as child where child.id.childUuidString = :childUuidString"
),
- @NamedQuery( name = "ChildEntity.findMaximumSnsIndex", query = "select
max(child.sameNameSiblingIndex) from ChildEntity as child where child.id.parentUuidString
= :parentUuid AND child.childNamespace.id = :ns AND child.childName = :childName" ),
- @NamedQuery( name = "ChildEntity.findMaximumChildIndex", query =
"select max(child.indexInParent) from ChildEntity as child where
child.id.parentUuidString = :parentUuid" )} )
+ @NamedQuery( name = "ChildEntity.findByPathSegment", query = "select
child from ChildEntity as child where child.id.parentUuidString = :parentUuidString AND
child.childNamespace.id = :ns AND child.childName = :childName AND
child.sameNameSiblingIndex = :sns and child.deleted is null" ),
+ @NamedQuery( name = "ChildEntity.findAllUnderParent", query = "select
child from ChildEntity as child where child.id.parentUuidString = :parentUuidString and
child.deleted is null" ),
+ @NamedQuery( name = "ChildEntity.findByChildUuid", query = "select
child from ChildEntity as child where child.id.childUuidString = :childUuidString and
child.deleted is null" ),
+ @NamedQuery( name = "ChildEntity.findMaximumSnsIndex", query = "select
max(child.sameNameSiblingIndex) from ChildEntity as child where child.id.parentUuidString
= :parentUuid AND child.childNamespace.id = :ns AND child.childName = :childName and
child.deleted is null" ),
+ @NamedQuery( name = "ChildEntity.findMaximumChildIndex", query =
"select max(child.indexInParent) from ChildEntity as child where
child.id.parentUuidString = :parentUuid and child.deleted is null" )} )
public class ChildEntity {
@Id
private ChildId id;
@Column( name = "CHILD_INDEX", nullable = false, unique = false )
- private Integer indexInParent;
+ private int indexInParent;
@ManyToOne
@JoinColumn( name = "CHILD_NAME_NS_ID", nullable = false )
private NamespaceEntity childNamespace;
- @Column( name = "CHILD_NAME_LOCAL", nullable = true, unique = false, length
= 512 )
+ @Column( name = "CHILD_NAME_LOCAL", nullable = false, unique = false,
length = 512 )
private String childName;
- @Column( name = "SNS_INDEX", nullable = true, unique = false )
- private Integer sameNameSiblingIndex;
+ @Column( name = "SNS_INDEX", nullable = false, unique = false )
+ private int sameNameSiblingIndex;
+ @Column( name = "DELETED", nullable = true, unique = false )
+ private Boolean deleted;
+
public ChildEntity() {
}
@@ -110,14 +116,14 @@
/**
* @return indexInParent
*/
- public Integer getIndexInParent() {
+ public int getIndexInParent() {
return indexInParent;
}
/**
* @param index Sets indexInParent to the specified value.
*/
- public void setIndexInParent( Integer index ) {
+ public void setIndexInParent( int index ) {
this.indexInParent = index;
}
@@ -152,18 +158,32 @@
/**
* @return sameNameSiblingIndex
*/
- public Integer getSameNameSiblingIndex() {
+ public int getSameNameSiblingIndex() {
return sameNameSiblingIndex;
}
/**
* @param sameNameSiblingIndex Sets sameNameSiblingIndex to the specified value.
*/
- public void setSameNameSiblingIndex( Integer sameNameSiblingIndex ) {
+ public void setSameNameSiblingIndex( int sameNameSiblingIndex ) {
this.sameNameSiblingIndex = sameNameSiblingIndex;
}
/**
+ * @return deleted
+ */
+ public boolean isDeleted() {
+ return Boolean.TRUE.equals(deleted);
+ }
+
+ /**
+ * @param deleted Sets deleted to the specified value.
+ */
+ public void setDeleted( boolean deleted ) {
+ this.deleted = deleted ? Boolean.TRUE : null;
+ }
+
+ /**
* {@inheritDoc}
*
* @see java.lang.Object#hashCode()
@@ -205,7 +225,7 @@
sb.append('{').append(childNamespace).append("}:");
}
sb.append(childName);
- if (sameNameSiblingIndex != null && sameNameSiblingIndex.intValue() >
1) {
+ if (sameNameSiblingIndex > 1) {
sb.append('[').append(sameNameSiblingIndex).append(']');
}
if (id != null) {
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/common/NamespaceEntity.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/common/NamespaceEntity.java 2008-11-26
16:42:23 UTC (rev 649)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/common/NamespaceEntity.java 2008-11-26
16:42:57 UTC (rev 650)
@@ -31,6 +31,8 @@
import javax.persistence.NamedQuery;
import javax.persistence.NoResultException;
import javax.persistence.Query;
+import javax.persistence.Table;
+import org.hibernate.annotations.Index;
import org.jboss.dna.common.util.CheckArg;
/**
@@ -39,9 +41,11 @@
*
* @author Randall Hauch
*/
-@Entity( name = "DNA_NAMESPACES" )
-@NamedQueries( {@NamedQuery( name = "NamespaceEntity.findAll", query =
"SELECT ns FROM DNA_NAMESPACES AS ns" ),
- @NamedQuery( name = "NamespaceEntity.findByUri", query = "SELECT ns
FROM DNA_NAMESPACES AS ns WHERE ns.uri = ?1" )} )
+@Entity
+@Table( name = "DNA_NAMESPACES" )
+(a)org.hibernate.annotations.Table( appliesTo = "DNA_NAMESPACES", indexes =
@Index( name = "NS_URI_INX", columnNames = {"URI"} ) )
+@NamedQueries( {@NamedQuery( name = "NamespaceEntity.findAll", query =
"select ns from NamespaceEntity as ns" ),
+ @NamedQuery( name = "NamespaceEntity.findByUri", query = "select ns
from NamespaceEntity as ns where ns.uri = ?1" )} )
public class NamespaceEntity {
@Id
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java 2008-11-26
16:42:23 UTC (rev 649)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java 2008-11-26
16:42:57 UTC (rev 650)
@@ -29,12 +29,13 @@
import java.net.URI;
import java.security.NoSuchAlgorithmException;
import java.util.Collection;
+import java.util.HashMap;
import java.util.HashSet;
+import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.jboss.dna.common.SystemFailureException;
import org.jboss.dna.common.util.SecureHash;
-import org.jboss.dna.common.util.StringUtil;
import org.jboss.dna.connector.store.jpa.models.basic.LargeValueEntity;
import org.jboss.dna.graph.DnaLexicon;
import org.jboss.dna.graph.ExecutionContext;
@@ -53,17 +54,16 @@
*/
public class Serializer {
+ public static final LargeValues NO_LARGE_VALUES = new NoLargeValues();
+
private final PropertyFactory propertyFactory;
private final ValueFactories valueFactories;
- private final LargeValues largeValues;
private final boolean excludeUuidProperty;
public Serializer( ExecutionContext context,
- LargeValues largeValues,
boolean excludeUuidProperty ) {
this.propertyFactory = context.getPropertyFactory();
this.valueFactories = context.getValueFactories();
- this.largeValues = largeValues;
this.excludeUuidProperty = excludeUuidProperty;
}
@@ -91,6 +91,25 @@
long length ) throws IOException;
}
+ protected static class NoLargeValues implements LargeValues {
+ public long getMinimumSize() {
+ return Long.MAX_VALUE;
+ }
+
+ public void write( byte[] hash,
+ long length,
+ PropertyType type,
+ Object value ) {
+ throw new UnsupportedOperationException();
+ }
+
+ public Object read( ValueFactories valueFactories,
+ byte[] hash,
+ long length ) {
+ throw new UnsupportedOperationException();
+ }
+ }
+
/**
* Serialize the properties' values to the object stream.
* <p>
@@ -109,21 +128,23 @@
* @param stream the stream where the properties' values are to be serialized;
may not be null
* @param number the number of properties exposed by the supplied
<code>properties</code> iterator; must be 0 or positive
* @param properties the iterator over the properties that are to be serialized; may
not be null
- * @param largeValueHexHashes the collection into which any large value hashes should
be recordeed
+ * @param largeValues the interface to use for writing large values; may not be null
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
- * @see #deserializeAllProperties(ObjectInputStream, Collection)
- * @see #serializeProperty(ObjectOutputStream, Property, Collection)
+ * @see #deserializeAllProperties(ObjectInputStream, Collection, LargeValues)
+ * @see #deserializeSomeProperties(ObjectInputStream, Collection, LargeValues,
LargeValues, Name...)
+ * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
*/
public void serializeProperties( ObjectOutputStream stream,
int number,
Iterable<Property> properties,
- Collection<String> largeValueHexHashes )
throws IOException {
+ LargeValues largeValues ) throws IOException {
assert number >= 0;
assert properties != null;
+ assert largeValues != null;
stream.writeInt(number);
for (Property property : properties) {
if (property == null) continue;
- serializeProperty(stream, property, largeValueHexHashes);
+ serializeProperty(stream, property, largeValues);
}
}
@@ -144,17 +165,18 @@
*
* @param stream the stream where the property's values are to be serialized; may
not be null
* @param property the property to be serialized; may not be null
- * @param largeValueHexHashes the collection into which any large value hashes should
be recordeed
+ * @param largeValues the interface to use for writing large values; may not be null
* @return true if the property was serialized, or false if it was not
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
- * @see #serializeProperties(ObjectOutputStream, int, Iterable, Collection)
- * @see #deserializePropertyValues(ObjectInputStream, Name, boolean)
+ * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
+ * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues,
LargeValues)
*/
public boolean serializeProperty( ObjectOutputStream stream,
Property property,
- Collection<String> largeValueHexHashes )
throws IOException {
+ LargeValues largeValues ) throws IOException {
assert stream != null;
assert property != null;
+ assert largeValues != null;
final Name name = property.getName();
if (this.excludeUuidProperty && DnaLexicon.UUID.equals(name)) return
false;
// Write the name ...
@@ -172,7 +194,6 @@
stream.write(hash);
stream.writeLong(stringValue.length());
// Now write to the large objects ...
- largeValueHexHashes.add(StringUtil.getHexString(hash));
largeValues.write(computeHash(stringValue), stringValue.length(),
PropertyType.STRING, stringValue);
} else {
stream.writeChar('S');
@@ -258,7 +279,6 @@
}
// If this is a large value and the binary has been released, write it to
the large objects ...
if (largeValues != null && hash != null) {
- largeValueHexHashes.add(StringUtil.getHexString(hash));
largeValues.write(hash, length, PropertyType.BINARY, value);
}
} else {
@@ -272,26 +292,94 @@
}
/**
+ * Deserialize the existing properties from the supplied input stream, update the
properties, and then serialize the updated
+ * properties to the output stream.
+ *
+ * @param input the stream from which the existing properties are to be deserialized;
may not be null
+ * @param output the stream to which the updated properties are to be serialized; may
not be null
+ * @param updatedProperties the properties that are being updated (or removed, if
there are no values); may not be null
+ * @param largeValues the interface to use for writing large values; may not be null
+ * @param removedLargeValues the interface to use for recording the large values that
were removed; may not be null
+ * @return the number of properties
+ * @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
+ * @throws ClassNotFoundException if the class for the value's object could not
be found
+ */
+ public int reserializeProperties( ObjectInputStream input,
+ ObjectOutputStream output,
+ Collection<Property> updatedProperties,
+ LargeValues largeValues,
+ LargeValues removedLargeValues ) throws
IOException, ClassNotFoundException {
+ assert input != null;
+ assert output != null;
+ assert updatedProperties != null;
+ assert largeValues != null;
+ // Assemble a set of property names to skip deserializing
+ Set<Name> skipNames = new HashSet<Name>();
+ for (Property property : updatedProperties) {
+ skipNames.add(property.getName());
+ }
+ Map<Name, Property> allProperties = new HashMap<Name, Property>();
+
+ // Read the number of properties ...
+ int count = input.readInt();
+ // Deserialize all of the proeprties ...
+ for (int i = 0; i != count; ++i) {
+ // Read the property name ...
+ String nameStr = (String)input.readObject();
+ Name name = valueFactories.getNameFactory().create(nameStr);
+ assert name != null;
+ if (skipNames.contains(name)) {
+ // Deserialized, but don't materialize ...
+ deserializePropertyValues(input, name, true, largeValues,
removedLargeValues);
+ } else {
+ // Now read the property values ...
+ Object[] values = deserializePropertyValues(input, name, false,
largeValues, removedLargeValues);
+ // Add the property to the collection ...
+ Property property = propertyFactory.create(name, values);
+ assert property != null;
+ allProperties.put(name, property);
+ }
+ }
+
+ // Add all the updated properties ...
+ for (Property updated : updatedProperties) {
+ if (updated.isEmpty()) {
+ allProperties.remove(updated.getName());
+ } else {
+ allProperties.put(updated.getName(), updated);
+ }
+ }
+
+ // Serialize properties ...
+ int numProperties = allProperties.size();
+ output.writeInt(numProperties);
+ for (Property property : allProperties.values()) {
+ if (property == null) continue;
+ serializeProperty(output, property, largeValues);
+ }
+ return numProperties;
+ }
+
+ /**
* Deserialize the serialized properties on the supplied object stream.
*
* @param stream the stream that contains the serialized properties; may not be null
* @param properties the collection into which each deserialized property is to be
placed; may not be null
+ * @param largeValues the interface to use for writing large values; may not be null
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not
be found
- * @see #deserializePropertyValues(ObjectInputStream, Name, boolean)
- * @see #serializeProperties(ObjectOutputStream, int, Iterable, Collection)
+ * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues,
LargeValues)
+ * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
*/
public void deserializeAllProperties( ObjectInputStream stream,
- Collection<Property> properties ) throws
IOException, ClassNotFoundException {
- assert propertyFactory != null;
- assert valueFactories != null;
+ Collection<Property> properties,
+ LargeValues largeValues ) throws IOException,
ClassNotFoundException {
assert stream != null;
assert properties != null;
- assert largeValues != null;
// Read the number of properties ...
int count = stream.readInt();
for (int i = 0; i != count; ++i) {
- Property property = deserializeProperty(stream);
+ Property property = deserializeProperty(stream, largeValues);
assert property != null;
properties.add(property);
}
@@ -303,13 +391,17 @@
* @param stream the stream that contains the serialized properties; may not be null
* @param properties the collection into which each deserialized property is to be
placed; may not be null
* @param names the names of the properties that should be deserialized; should not
be null or empty
+ * @param largeValues the interface to use for writing large values; may not be null
+ * @param skippedLargeValues the interface to use for recording the large values that
were skipped; may not be null
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not
be found
- * @see #deserializePropertyValues(ObjectInputStream, Name, boolean)
- * @see #serializeProperties(ObjectOutputStream, int, Iterable, Collection)
+ * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues,
LargeValues)
+ * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
*/
public void deserializeSomeProperties( ObjectInputStream stream,
Collection<Property> properties,
+ LargeValues largeValues,
+ LargeValues skippedLargeValues,
Name... names ) throws IOException,
ClassNotFoundException {
assert stream != null;
assert properties != null;
@@ -337,12 +429,17 @@
Name name = valueFactories.getNameFactory().create(nameStr);
assert name != null;
read = name.equals(nameToRead) || (namesToRead != null &&
namesToRead.contains(namesToRead));
- // Now read the property values ...
- Object[] values = deserializePropertyValues(stream, name, !read);
- // Add the property to the collection ...
- Property property = propertyFactory.create(name, values);
- assert property != null;
- properties.add(property);
+ if (read) {
+ // Now read the property values ...
+ Object[] values = deserializePropertyValues(stream, name, false,
skippedLargeValues, skippedLargeValues);
+ // Add the property to the collection ...
+ Property property = propertyFactory.create(name, values);
+ assert property != null;
+ properties.add(property);
+ } else {
+ // Skip the property ...
+ deserializePropertyValues(stream, name, true, largeValues,
skippedLargeValues);
+ }
}
}
@@ -350,19 +447,21 @@
* Deserialize the serialized property on the supplied object stream.
*
* @param stream the stream that contains the serialized properties; may not be null
- * @return the deserialized property values, or an empty list if there are no values
+ * @param largeValues the interface to use for writing large values; may not be null
+ * @return the deserialized property; never null
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not
be found
- * @see #deserializeAllProperties(ObjectInputStream, Collection)
- * @see #serializeProperty(ObjectOutputStream, Property, Collection)
+ * @see #deserializeAllProperties(ObjectInputStream, Collection, LargeValues)
+ * @see #serializeProperty(ObjectOutputStream, Property, LargeValues)
*/
- public Property deserializeProperty( ObjectInputStream stream ) throws IOException,
ClassNotFoundException {
+ public Property deserializeProperty( ObjectInputStream stream,
+ LargeValues largeValues ) throws IOException,
ClassNotFoundException {
// Read the name ...
String nameStr = (String)stream.readObject();
Name name = valueFactories.getNameFactory().create(nameStr);
assert name != null;
// Now read the property values ...
- Object[] values = deserializePropertyValues(stream, name, false);
+ Object[] values = deserializePropertyValues(stream, name, false, largeValues,
largeValues);
// Add the property to the collection ...
return propertyFactory.create(name, values);
}
@@ -373,15 +472,19 @@
* @param stream the stream that contains the serialized properties; may not be null
* @param propertyName the name of the property being deserialized
* @param skip true if the values don't need to be read, or false if they are to
be read
+ * @param largeValues the interface to use for writing large values; may not be null
+ * @param skippedLargeValues the interface to use for recording the large values that
were skipped; may not be null
* @return the deserialized property values, or an empty list if there are no values
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not
be found
- * @see #deserializeAllProperties(ObjectInputStream, Collection)
- * @see #serializeProperty(ObjectOutputStream, Property, Collection)
+ * @see #deserializeAllProperties(ObjectInputStream, Collection, LargeValues)
+ * @see #serializeProperty(ObjectOutputStream, Property, LargeValues)
*/
public Object[] deserializePropertyValues( ObjectInputStream stream,
Name propertyName,
- boolean skip ) throws IOException,
ClassNotFoundException {
+ boolean skip,
+ LargeValues largeValues,
+ LargeValues skippedLargeValues ) throws
IOException, ClassNotFoundException {
assert stream != null;
assert propertyName != null;
// Read the number of values ...
@@ -478,7 +581,11 @@
stream.read(hash);
// Read the length of the content ...
long length = stream.readLong();
- if (!skip) value = largeValues.read(valueFactories, hash, length);
+ if (skip) {
+ skippedLargeValues.read(valueFactories, hash, length);
+ } else {
+ value = largeValues.read(valueFactories, hash, length);
+ }
break;
default:
// All other objects ...
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-11-26
16:42:23 UTC (rev 649)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-11-26
16:42:57 UTC (rev 650)
@@ -21,20 +21,40 @@
*/
package org.jboss.dna.connector.store.jpa;
+import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsNull.notNullValue;
+import static org.jboss.dna.graph.IsNodeWithChildren.hasChild;
+import static org.jboss.dna.graph.IsNodeWithChildren.hasChildren;
+import static org.jboss.dna.graph.IsNodeWithChildren.hasNoChildren;
+import static org.jboss.dna.graph.IsNodeWithProperty.hasProperty;
+import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import java.util.UUID;
+import java.util.concurrent.TimeUnit;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import org.hibernate.ejb.Ejb3Configuration;
+import org.jboss.dna.common.stats.Stopwatch;
import org.jboss.dna.connector.store.jpa.models.basic.BasicModel;
import org.jboss.dna.graph.BasicExecutionContext;
+import org.jboss.dna.graph.DnaLexicon;
import org.jboss.dna.graph.ExecutionContext;
+import org.jboss.dna.graph.Graph;
+import org.jboss.dna.graph.Location;
+import org.jboss.dna.graph.Node;
+import org.jboss.dna.graph.Subgraph;
import org.jboss.dna.graph.cache.CachePolicy;
+import org.jboss.dna.graph.properties.Name;
+import org.jboss.dna.graph.properties.Path;
+import org.jboss.dna.graph.properties.Property;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
+ * Test the JpaConnection class using a {@link #getModel() model} for the entire set of
tests. To run these same methods using a
+ * different {@link Model}, subclass this class and override the {@link #getModel()}
method to return the desired Model instance.
+ *
* @author Randall Hauch
*/
public class JpaConnectionTest {
@@ -48,11 +68,12 @@
private UUID rootNodeUuid;
private long largeValueSize;
private boolean compressData;
+ private Graph graph;
@Before
public void beforeEach() throws Exception {
context = new BasicExecutionContext();
- model = new BasicModel();
+ model = getModel();
rootNodeUuid = UUID.randomUUID();
largeValueSize = 2 ^ 10; // 1 kilobyte
compressData = false;
@@ -65,7 +86,7 @@
configurator.setProperty("hibernate.connection.username",
"sa");
configurator.setProperty("hibernate.connection.password",
"");
configurator.setProperty("hibernate.connection.url",
"jdbc:hsqldb:.");
- configurator.setProperty("hibernate.show_sql", "true");
+ configurator.setProperty("hibernate.show_sql", "false");
configurator.setProperty("hibernate.format_sql", "true");
configurator.setProperty("hibernate.use_sql_comments",
"true");
configurator.setProperty("hibernate.hbm2ddl.auto",
"create");
@@ -75,27 +96,335 @@
// Create the connection ...
cachePolicy = mock(CachePolicy.class);
connection = new JpaConnection("source", cachePolicy, manager, model,
rootNodeUuid, largeValueSize, compressData);
+
+ // And create the graph ...
+ graph = Graph.create(connection, context);
}
@After
public void afterEach() throws Exception {
try {
- if (manager != null) manager.close();
+ if (connection != null) connection.close();
} finally {
- manager = null;
- if (factory != null) {
- try {
- factory.close();
- } finally {
- factory = null;
+ try {
+ if (manager != null) manager.close();
+ } finally {
+ manager = null;
+ if (factory != null) {
+ try {
+ factory.close();
+ } finally {
+ factory = null;
+ }
}
}
}
}
+ /**
+ * Override this method in subclasses to create test cases that test other models.
+ *
+ * @return the model that should be used in the test
+ */
+ protected Model getModel() {
+ return new BasicModel();
+ }
+
+ protected Path path( String path ) {
+ return context.getValueFactories().getPathFactory().create(path);
+ }
+
+ protected Name name( String name ) {
+ return context.getValueFactories().getNameFactory().create(name);
+ }
+
+ protected Path.Segment child( String name ) {
+ return context.getValueFactories().getPathFactory().createSegment(name);
+ }
+
@Test
- public void shouldAlwaysReadRootNode() {
+ public void shouldAlwaysReadRootNodeByPath() {
+ Node root = graph.getNodeAt("/");
+ assertThat(root, is(notNullValue()));
+ assertThat(root.getProperty(DnaLexicon.UUID).getFirstValue(),
is((Object)rootNodeUuid));
+ assertThat(root.getLocation().getPath(), is(path("/")));
+ assertThat(root.getLocation().getUuid(), is(rootNodeUuid));
+ }
+ @Test
+ public void shouldAlwaysReadRootNodeByUuid() {
+ Location location = new Location(rootNodeUuid);
+ Node root = graph.getNodeAt(location);
+ assertThat(root, is(notNullValue()));
+ assertThat(root.getProperty(DnaLexicon.UUID).getFirstValue(),
is((Object)rootNodeUuid));
+ assertThat(root.getLocation().getPath(), is(path("/")));
+ assertThat(root.getLocation().getUuid(), is(rootNodeUuid));
}
+ @Test
+ public void shouldSetPropertyOnRootNode() {
+ graph.set("propA").to("valueA").on("/");
+ // Now look up the node ...
+ Node root = graph.getNodeAt("/");
+ assertThat(root, is(notNullValue()));
+ assertThat(root, hasProperty(DnaLexicon.UUID, rootNodeUuid));
+ assertThat(root, hasProperty("propA", "valueA"));
+ }
+
+ @Test
+ public void shouldAddChildUnderRootNode() {
+ graph.batch().create("/a").with("propB",
"valueB").and("propC", "valueC").execute();
+ // Now look up the root node ...
+ Node root = graph.getNodeAt("/");
+ assertThat(root, is(notNullValue()));
+ assertThat(root, hasProperty(DnaLexicon.UUID, rootNodeUuid));
+ assertThat(root, hasChild(child("a")));
+
+ // Now look up node A ...
+ Node nodeA = graph.getNodeAt("/a");
+ assertThat(nodeA, is(notNullValue()));
+ assertThat(nodeA, hasProperty("propB", "valueB"));
+ assertThat(nodeA, hasProperty("propC", "valueC"));
+ assertThat(nodeA, hasNoChildren());
+ }
+
+ @Test
+ public void shouldAddChildrenOnRootNode() {
+
graph.batch().set("propA").to("valueA").on("/").and().create("/a").with("propB",
"valueB").and("propC", "valueC").and()
+ .create("/b").with("propD",
"valueD").and("propE", "valueE").execute();
+ // Now look up the root node ...
+ Node root = graph.getNodeAt("/");
+ assertThat(root, is(notNullValue()));
+ assertThat(root, hasProperty(DnaLexicon.UUID, rootNodeUuid));
+ assertThat(root, hasProperty("propA", "valueA"));
+ assertThat(root, hasChildren(child("a"), child("b")));
+
+ // Now look up node A ...
+ Node nodeA = graph.getNodeAt("/a");
+ assertThat(nodeA, is(notNullValue()));
+ assertThat(nodeA, hasProperty("propB", "valueB"));
+ assertThat(nodeA, hasProperty("propC", "valueC"));
+ assertThat(nodeA, hasNoChildren());
+
+ // Now look up node B ...
+ Node nodeB = graph.getNodeAt("/b");
+ assertThat(nodeB, is(notNullValue()));
+ assertThat(nodeB, hasProperty("propD", "valueD"));
+ assertThat(nodeB, hasProperty("propE", "valueE"));
+ assertThat(nodeB, hasNoChildren());
+
+ // Get the subgraph ...
+ Subgraph subgraph = graph.getSubgraphOfDepth(3).at("/");
+ assertThat(subgraph, is(notNullValue()));
+ assertThat(subgraph.getNode("."), hasProperty(DnaLexicon.UUID,
rootNodeUuid));
+ assertThat(subgraph.getNode("."), hasProperty("propA",
"valueA"));
+ assertThat(subgraph.getNode("."), hasChildren(child("a"),
child("b")));
+ assertThat(subgraph.getNode("a"), is(notNullValue()));
+ assertThat(subgraph.getNode("a"), hasProperty("propB",
"valueB"));
+ assertThat(subgraph.getNode("a"), hasProperty("propC",
"valueC"));
+ assertThat(subgraph.getNode("a"), hasNoChildren());
+ assertThat(subgraph.getNode("b"), is(notNullValue()));
+ assertThat(subgraph.getNode("b"), hasProperty("propD",
"valueD"));
+ assertThat(subgraph.getNode("b"), hasProperty("propE",
"valueE"));
+ assertThat(subgraph.getNode("b"), hasNoChildren());
+ }
+
+ @Test
+ public void shouldStoreManyPropertiesOnANode() {
+ Graph.Create<Graph.Batch> create = graph.batch().create("/a");
+ for (int i = 0; i != 100; ++i) {
+ create = create.with("property" + i, "value" + i);
+ }
+ create.execute();
+ // Now look up all the properties ...
+ Node nodeA = graph.getNodeAt("/a");
+ assertThat(nodeA, is(notNullValue()));
+ for (int i = 0; i != 100; ++i) {
+ assertThat(nodeA, hasProperty("property" + i, "value" +
i));
+ }
+ assertThat(nodeA, hasNoChildren());
+ }
+
+ @Test
+ public void shouldGetOnePropertyOnNode() {
+ Graph.Create<Graph.Batch> create = graph.batch().create("/a");
+ for (int i = 0; i != 100; ++i) {
+ create = create.with("property" + i, "value" + i);
+ }
+ create.execute();
+ // Now get a single property ...
+ Property p = graph.getProperty("property75").on("/a");
+ assertThat(p, is(notNullValue()));
+ assertThat(p.size(), is(1));
+ assertThat(p.isSingle(), is(true));
+ assertThat(p.getFirstValue().toString(), is("value75"));
+ }
+
+ @Test
+ public void shouldCalculateNumberOfNodesInTreeCorrectly() {
+ assertThat(numberNodesInTree(2, 2), is(7));
+ assertThat(numberNodesInTree(2, 3), is(15));
+ assertThat(numberNodesInTree(2, 4), is(31));
+ assertThat(numberNodesInTree(3, 2), is(13));
+ assertThat(numberNodesInTree(3, 3), is(40));
+ assertThat(numberNodesInTree(3, 4), is(121));
+ assertThat(numberNodesInTree(3, 5), is(364));
+ assertThat(numberNodesInTree(3, 6), is(1093));
+ assertThat(numberNodesInTree(3, 7), is(3280));
+ assertThat(numberNodesInTree(3, 8), is(9841));
+ assertThat(numberNodesInTree(3, 9), is(29524));
+ assertThat(numberNodesInTree(4, 2), is(21));
+ assertThat(numberNodesInTree(4, 3), is(85));
+ assertThat(numberNodesInTree(4, 4), is(341));
+ assertThat(numberNodesInTree(4, 5), is(1365));
+ assertThat(numberNodesInTree(4, 6), is(5461));
+ assertThat(numberNodesInTree(4, 7), is(21845));
+ assertThat(numberNodesInTree(5, 3), is(156));
+ assertThat(numberNodesInTree(5, 4), is(781));
+ assertThat(numberNodesInTree(5, 5), is(3906));
+ assertThat(numberNodesInTree(7, 3), is(400));
+ assertThat(numberNodesInTree(7, 4), is(2801));
+ assertThat(numberNodesInTree(7, 5), is(19608));
+ assertThat(numberNodesInTree(8, 3), is(585));
+ assertThat(numberNodesInTree(8, 4), is(4681));
+ assertThat(numberNodesInTree(8, 5), is(37449));
+ assertThat(numberNodesInTree(8, 6), is(299593));
+ assertThat(numberNodesInTree(8, 7), is(2396745));
+ assertThat(numberNodesInTree(10, 2), is(111));
+ assertThat(numberNodesInTree(10, 3), is(1111));
+ assertThat(numberNodesInTree(10, 4), is(11111));
+ assertThat(numberNodesInTree(100, 1), is(101));
+ assertThat(numberNodesInTree(200, 1), is(201));
+ assertThat(numberNodesInTree(200, 2), is(40201));
+ assertThat(numberNodesInTree(200, 3), is(8040201));
+ assertThat(numberNodesInTree(1000, 1), is(1001));
+ assertThat(numberNodesInTree(3000, 1), is(3001));
+ }
+
+ @Test
+ public void shouldCreateDeepBranch() {
+ createTree("", 1, 50, "deep and narrow tree, 1x50", false);
+ }
+
+ // @Test
+ // public void shouldCreateBinaryTree() {
+ // createTree("", 2, 8, "binary tree, 2x8", false);
+ // }
+
+ @Test
+ public void shouldCreate10x2Tree() {
+ createTree("", 10, 2, null, false);
+ }
+
+ // @Test
+ // public void shouldCreate10x3DecimalTree() {
+ // createTree("", 10, 3, null, false);
+ // }
+
+ // @Test
+ // public void shouldCreateWideTree() {
+ // createTree("", 1000, 1, "wide/flat tree, 1000x1", false);
+ // }
+
+ // @Test
+ // public void shouldCreateVeryWideTree() {
+ // createTree("", 3000, 1, "wide/flat tree, 3000x1", false);
+ // }
+
+ // @Test
+ // public void shouldCreate10KDecimalTree() {
+ // createTree("", 10, 4, "10K decimal tree, 10x4", false);
+ // }
+
+ // @Test
+ // public void shouldCreate8x4Tree() {
+ // createTree("", 8, 4, null, false);
+ // }
+
+ protected int createTree( String initialPath,
+ int numberPerDepth,
+ int depth,
+ String description,
+ boolean oneBatch ) {
+ int totalNumber = numberNodesInTree(numberPerDepth, depth) - 1; // this method
doesn't create the root
+ if (description == null) description = "" + numberPerDepth +
"x" + depth + " tree";
+ boolean printIntermediate = totalNumber > 500;
+ System.out.println(description + " (" + totalNumber + "
nodes):");
+ int totalNumberCreated = 0;
+ Graph.Batch batch = oneBatch ? graph.batch() : null;
+ Stopwatch sw = new Stopwatch();
+ if (batch != null) {
+ totalNumberCreated += createChildren(batch, initialPath, "node",
numberPerDepth, depth, printIntermediate);
+ sw.start();
+ batch.execute();
+ } else {
+ sw.start();
+ totalNumberCreated += createChildren(null, initialPath, "node",
numberPerDepth, depth, printIntermediate);
+ }
+ sw.stop();
+ long totalDurationInMillis =
TimeUnit.NANOSECONDS.toMillis(sw.getTotalDuration().longValue());
+ long avgDurationInMillis = totalDurationInMillis / totalNumber;
+ System.out.println(" Total = " + sw.getTotalDuration() + ";
avg = " + avgDurationInMillis + " ms");
+
+ // Perform second batch ...
+ batch = graph.batch();
+ totalNumberCreated += createChildren(batch, initialPath,
"secondBranch", 2, 2, printIntermediate);
+ sw = new Stopwatch();
+ sw.start();
+ batch.execute();
+ sw.stop();
+ System.out.println(" Final batch total = " + sw.getTotalDuration()
+ "; avg = " + avgDurationInMillis + " ms");
+ assertThat(totalNumberCreated, is(totalNumber + numberNodesInTree(2, 2) - 1));
+ return totalNumberCreated;
+ }
+
+ protected int createChildren( Graph.Batch useBatch,
+ String parentPath,
+ String nodePrefix,
+ int number,
+ int depthRemaining,
+ boolean printIntermediateStatus ) {
+ int numberCreated = 0;
+ Graph.Batch batch = useBatch;
+ if (batch == null) batch = graph.batch();
+ for (int i = 0; i != number; ++i) {
+ String path = parentPath + "/" + nodePrefix + i;
+ Graph.Create<Graph.Batch> create = batch.create(path);
+ String value = "the quick brown fox jumped over the moon. What? ";
+ for (int j = 0; j != 10; ++j) {
+ // value = value + value;
+ create = create.with("property" + j, value);
+ }
+ create.and();
+ }
+ numberCreated += number;
+ if (useBatch == null) {
+ batch.execute();
+ if (printIntermediateStatus) {
+ System.out.println(" total created ... " +
numberCreated);
+ }
+ }
+ if (depthRemaining > 1) {
+ for (int i = 0; i != number; ++i) {
+ String path = parentPath + "/" + nodePrefix + i;
+ numberCreated += createChildren(useBatch, path, nodePrefix, number,
depthRemaining - 1, false);
+ if (printIntermediateStatus) {
+ System.out.println(" total created ... " +
numberCreated);
+ }
+ }
+ }
+ return numberCreated;
+ }
+
+ protected int numberNodesInTree( int numberPerDepth,
+ int depth ) {
+ assert depth > 0;
+ assert numberPerDepth > 0;
+ int totalNumber = 0;
+ for (int i = 0; i <= depth; ++i) {
+ totalNumber += (int)Math.pow(numberPerDepth, i);
+ }
+ return totalNumber;
+ }
+
}
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java 2008-11-26
16:42:23 UTC (rev 649)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java 2008-11-26
16:42:57 UTC (rev 650)
@@ -24,7 +24,6 @@
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsNot.not;
import static org.hamcrest.core.IsNull.notNullValue;
-import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.stub;
@@ -334,13 +333,13 @@
assertThat(child1a.getIndexInParent(), is(1));
assertThat(child1a.getChildName(), is("child1"));
assertThat(child1a.getChildNamespace(), is(ns));
- assertThat(child1a.getSameNameSiblingIndex(), is(nullValue()));
+ assertThat(child1a.getSameNameSiblingIndex(), is(0));
assertThat(child2a.getId(), is(childId2));
assertThat(child2a.getIndexInParent(), is(2));
assertThat(child2a.getChildName(), is("child2"));
assertThat(child2a.getChildNamespace(), is(ns));
- assertThat(child2a.getSameNameSiblingIndex(), is(nullValue()));
+ assertThat(child2a.getSameNameSiblingIndex(), is(0));
assertThat(child3a.getId(), is(childId3));
assertThat(child3a.getIndexInParent(), is(3));
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java 2008-11-26
16:42:23 UTC (rev 649)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java 2008-11-26
16:42:57 UTC (rev 650)
@@ -22,6 +22,7 @@
package org.jboss.dna.connector.store.jpa.util;
import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsNull.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.matchers.JUnitMatchers.hasItems;
import java.io.ByteArrayInputStream;
@@ -29,10 +30,12 @@
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
+import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -45,6 +48,7 @@
import org.jboss.dna.graph.BasicExecutionContext;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.properties.Binary;
+import org.jboss.dna.graph.properties.Name;
import org.jboss.dna.graph.properties.Property;
import org.jboss.dna.graph.properties.PropertyFactory;
import org.jboss.dna.graph.properties.PropertyType;
@@ -62,16 +66,14 @@
private LargeValuesHolder largeValues;
private PropertyFactory propertyFactory;
private ValueFactories valueFactories;
- private Set<String> largeValueHexHashes;
@Before
public void beforeEach() {
context = new BasicExecutionContext();
propertyFactory = context.getPropertyFactory();
valueFactories = context.getValueFactories();
+ serializer = new Serializer(context, false);
largeValues = new LargeValuesHolder();
- largeValueHexHashes = new HashSet<String>();
- serializer = new Serializer(context, largeValues, false);
}
@Test
@@ -79,7 +81,6 @@
Property prop = createProperty("p1", new Long(1));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -87,7 +88,6 @@
Property prop = createProperty("p1", new Integer(1));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -95,7 +95,6 @@
Property prop = createProperty("p1", new Short((short)1));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -103,7 +102,6 @@
Property prop = createProperty("p1", new Float(1.0f));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -111,7 +109,6 @@
Property prop = createProperty("p1", new Double(1.0d));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -119,7 +116,6 @@
Property prop = createProperty("p1", new Boolean(true));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -127,7 +123,6 @@
Property prop = createProperty("p1",
valueFactories.getNameFactory().create("something"));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -135,7 +130,6 @@
Property prop = createProperty("p1",
valueFactories.getPathFactory().create("/a/b/c/something"));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -143,12 +137,10 @@
Property prop = createProperty("p1",
valueFactories.getDateFactory().createUtc());
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
prop = createProperty("p1", valueFactories.getDateFactory().create());
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -156,7 +148,6 @@
Property prop = createProperty("p1", UUID.randomUUID());
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -164,7 +155,6 @@
Property prop = createProperty("p1", new
URI("http://example.com"));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -173,7 +163,6 @@
Property prop = createProperty("p1",
valueFactories.getReferenceFactory().create(uuid.toString()));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -181,7 +170,6 @@
Property prop = createProperty("p1",
valueFactories.getDecimalFactory().create("1.0123455243284347375478525485466895512"));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -190,7 +178,6 @@
Property prop = createProperty("p1",
valueFactories.getBinaryFactory().create(value));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -199,7 +186,6 @@
Property prop = createProperty("p1",
valueFactories.getBinaryFactory().create(value));
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(1));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -207,7 +193,6 @@
Property prop = createProperty("p1", "v1");
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(0));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
@@ -217,38 +202,66 @@
assertSerializableAndDeserializable(serializer, prop);
assertThat(largeValues.getCount(), is(1));
assertThat(largeValues.get(value).value, is((Object)value));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
public void shouldSerializeAndDeserializeSmallAndLargeStringProperty() throws
Exception {
Property prop1 = createProperty("p1", "v1");
String value = "v234567890123456789012345678901234567890";
- Property prop2 = createProperty("p1", value);
- Property prop3 = createProperty("p1", "v2");
- Property prop4 = createProperty("p1", new String(value)); // make sure
it's a different String object
+ Property prop2 = createProperty("p2", value);
+ Property prop3 = createProperty("p3", "v2");
+ Property prop4 = createProperty("p4", new String(value)); // make sure
it's a different String object
assertSerializableAndDeserializable(serializer, prop1, prop2, prop3, prop4);
assertThat(largeValues.getCount(), is(1));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
@Test
public void shouldSerializeAndDeserializeMixtureOfSmallAndLargeProperties() throws
Exception {
Property prop1 = createProperty("p1", "v1");
String value = "v234567890123456789012345678901234567890";
- Property prop2 = createProperty("p1", value);
- Property prop3 = createProperty("p1", "v2");
- Property prop4 = createProperty("p1", new String(value)); // make sure
it's a different String object
- Property prop5 = createProperty("p1",
valueFactories.getBinaryFactory().create("something"));
+ Property prop2 = createProperty("p2", value);
+ Property prop3 = createProperty("p3", "v2");
+ Property prop4 = createProperty("p4", new String(value)); // make sure
it's a different String object
+ Property prop5 = createProperty("p5",
valueFactories.getBinaryFactory().create("something"));
String binaryValue = "really really long string that will be converted to a
binary value and tested like that";
- Property prop6 = createProperty("p1",
valueFactories.getBinaryFactory().create(binaryValue));
+ Property prop6 = createProperty("p6",
valueFactories.getBinaryFactory().create(binaryValue));
assertSerializableAndDeserializable(serializer, prop1, prop2, prop3, prop4,
prop5, prop6);
assertThat(largeValues.getCount(), is(2));
- assertThat(largeValueHexHashes.size(), is(largeValues.getCount()));
}
+ @Test
+ public void shouldReserializePropertiesWithUpdates() throws Exception {
+ Property prop1 = createProperty("p1", "v1");
+ String value = "v234567890123456789012345678901234567890";
+ Property prop2 = createProperty("p2", value);
+ Property prop3 = createProperty("p3", "v2");
+ Property prop4 = createProperty("p4", new String(value)); // make sure
it's a different String object
+ Property prop5 = createProperty("p5",
valueFactories.getBinaryFactory().create("something"));
+ String binaryValueStr = "really really long string that will be converted to
a binary value and tested like that";
+ Binary binaryValue = valueFactories.getBinaryFactory().create(binaryValueStr);
+ Property prop6 = createProperty("p6", binaryValue);
+
+ Property prop2b = createProperty("p2");
+ Property prop3b = createProperty("p3", "v3");
+ String binaryValueStr2 = binaryValueStr + " but modified";
+ Binary binaryValue2 = valueFactories.getBinaryFactory().create(binaryValueStr2);
+ Property prop6b = createProperty("p6", binaryValue2);
+
+ Property[] initial = new Property[] {prop1, prop2, prop3, prop4, prop5, prop6};
+ Property[] updated = new Property[] {prop2b, prop3b, prop6b};
+ SkippedLargeValues removedLargeValues = new SkippedLargeValues();
+ assertReserializable(serializer, removedLargeValues, initial, updated);
+
+ assertThat(largeValues.getCount(), is(3));
+ assertThat(removedLargeValues.getCount(), is(2)); // p2's value and p6's
original value
+ assertThat(largeValues.get(serializer.computeHash(value)), is(notNullValue()));
+ assertThat(largeValues.get(binaryValue2), is(notNullValue()));
+ assertThat(largeValues.get(binaryValue2), is(notNullValue()));
+ assertThat(removedLargeValues.isSkipped(binaryValue), is(true));
+ }
+
protected Property createProperty( String name,
Object... values ) {
return propertyFactory.create(valueFactories.getNameFactory().create(name),
values);
@@ -261,7 +274,7 @@
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
- serializer.serializeProperty(oos, property, largeValueHexHashes);
+ serializer.serializeProperty(oos, property, largeValues);
} finally {
oos.close();
}
@@ -272,7 +285,7 @@
ObjectInputStream ois = new ObjectInputStream(bais);
Property copy = null;
try {
- copy = serializer.deserializeProperty(ois);
+ copy = serializer.deserializeProperty(ois, largeValues);
} finally {
ois.close();
}
@@ -288,7 +301,7 @@
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
- serializer.serializeProperties(oos, propertyList.size(), propertyList,
largeValueHexHashes);
+ serializer.serializeProperties(oos, propertyList.size(), propertyList,
largeValues);
} finally {
oos.close();
}
@@ -298,7 +311,7 @@
ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
ObjectInputStream ois = new ObjectInputStream(bais);
try {
- serializer.deserializeAllProperties(ois, outputProperties);
+ serializer.deserializeAllProperties(ois, outputProperties, largeValues);
} finally {
ois.close();
}
@@ -308,6 +321,126 @@
assertThat(outputProperties, hasItems(propertyList.toArray(new
Property[propertyList.size()])));
}
+ protected void assertReserializable( Serializer serializer,
+ Serializer.LargeValues removedLargeValues,
+ Property[] originalProperties,
+ Property... updatedProperties ) throws
IOException, ClassNotFoundException {
+ Collection<Name> propertiesThatStay = new HashSet<Name>();
+ Collection<Name> propertiesThatAreDeleted = new HashSet<Name>();
+ for (Property prop : originalProperties) {
+ propertiesThatStay.add(prop.getName());
+ }
+ for (Property prop : updatedProperties) {
+ if (prop.isEmpty()) {
+ propertiesThatAreDeleted.add(prop.getName());
+ propertiesThatStay.remove(prop.getName());
+ } else {
+ propertiesThatStay.add(prop.getName());
+ }
+ }
+
+ // Serialize the properties one at a time ...
+ Collection<Property> initialProps = Arrays.asList(originalProperties);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ ObjectOutputStream oos = new ObjectOutputStream(baos);
+ try {
+ serializer.serializeProperties(oos, initialProps.size(), initialProps,
largeValues);
+ } finally {
+ oos.close();
+ }
+ byte[] bytes = baos.toByteArray();
+
+ // Now reserialize, updating the properties ...
+ Collection<Property> updatedProps = Arrays.asList(updatedProperties);
+ ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+ ObjectInputStream ois = new ObjectInputStream(bais);
+ baos = new ByteArrayOutputStream();
+ oos = new ObjectOutputStream(baos);
+ try {
+ serializer.reserializeProperties(ois, oos, updatedProps, largeValues,
removedLargeValues);
+ } finally {
+ oos.close();
+ ois.close();
+ }
+
+ // Deserialize ...
+ List<Property> afterProperties = new ArrayList<Property>();
+ bais = new ByteArrayInputStream(baos.toByteArray());
+ ois = new ObjectInputStream(bais);
+ try {
+ serializer.deserializeAllProperties(ois, afterProperties, largeValues);
+ } finally {
+ ois.close();
+ }
+ Collection<Name> namesAfter = new HashSet<Name>();
+ for (Property prop : afterProperties) {
+ namesAfter.add(prop.getName());
+ }
+
+ // Check the properties match ...
+ assertThat(afterProperties.size(), is(propertiesThatStay.size()));
+ assertThat(namesAfter, is(propertiesThatStay));
+ for (Name deleted : propertiesThatAreDeleted) {
+ assertThat(namesAfter.contains(deleted), is(false));
+ }
+ }
+
+ protected class SkippedLargeValues implements Serializer.LargeValues {
+ private int minimumSize = 20;
+ private Set<String> skippedKeys = new HashSet<String>();
+
+ public SkippedLargeValues() {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#getMinimumSize()
+ */
+ public long getMinimumSize() {
+ return minimumSize;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#read(org.jboss.dna.graph.properties.ValueFactories,
+ * byte[], long)
+ */
+ public Object read( ValueFactories valueFactories,
+ byte[] hash,
+ long length ) throws IOException {
+ String key = StringUtil.getHexString(hash);
+ return skippedKeys.add(key);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.LargeValues#write(byte[], long,
+ * org.jboss.dna.graph.properties.PropertyType, java.lang.Object)
+ */
+ public void write( byte[] hash,
+ long length,
+ PropertyType type,
+ Object value ) {
+ throw new UnsupportedOperationException();
+ }
+
+ public boolean isSkipped( Binary binary ) throws UnsupportedEncodingException {
+ String key = StringUtil.getHexString(binary.getHash());
+ return isSkipped(key);
+ }
+
+ public boolean isSkipped( String key ) {
+ return skippedKeys.contains(key);
+ }
+
+ public int getCount() {
+ return skippedKeys.size();
+ }
+ }
+
protected class LargeValuesHolder implements Serializer.LargeValues {
private int minimumSize = 20;
private final Map<String, LargeValue> largeValuesByHexHash = new
HashMap<String, LargeValue>();
Modified: trunk/extensions/dna-connector-store-jpa/src/test/resources/log4j.properties
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/resources/log4j.properties 2008-11-26
16:42:23 UTC (rev 649)
+++
trunk/extensions/dna-connector-store-jpa/src/test/resources/log4j.properties 2008-11-26
16:42:57 UTC (rev 650)
@@ -9,6 +9,8 @@
# Set up the default logging to be INFO level, then override specific units
log4j.logger.org.jboss.dna=INFO
+log4j.logger.org.jboss.dna.connector.store.jpa=INFO
+
# Hibernate
log4j.logger.org.hibernate=ERROR
# C3P0