Author: rhauch
Date: 2008-12-16 11:19:33 -0500 (Tue, 16 Dec 2008)
New Revision: 686
Added:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/ReferentialIntegrityException.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceId.java
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnection.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaSource.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/Model.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java
trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/ModelTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java
Log:
DNA-40
Added to the JPA connector support for referential integrity (which is optional).
References that are values in properties are now tracked in the database, where each
reference record maps the UUID of the node containing the reference to the UUID of the
node being referenced. Note that this table does not maintain which property contains the
reference, which is not required to manage dependencies.
Finally, these references are checked upon closing the BasicRequestProcessor to catch any
operations that remove any nodes that are still being referenced by other nodes.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-12 23:00:10 UTC
(rev 685)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/Graph.java 2008-12-16 16:19:33 UTC
(rev 686)
@@ -1825,7 +1825,7 @@
return new On<BatchConjunction>() {
public BatchConjunction on( Location location ) {
UpdatePropertiesRequest request = new
UpdatePropertiesRequest(location, properties);
- queue().submit(request);
+ requestQueue.submit(request);
return nextRequests;
}
@@ -1902,7 +1902,7 @@
return new On<BatchConjunction>() {
public BatchConjunction on( Location location ) {
RemovePropertiesRequest request = new
RemovePropertiesRequest(location, propertyNames);
- queue().submit(request);
+ requestQueue.submit(request);
return nextRequests;
}
@@ -1944,7 +1944,7 @@
return new On<BatchConjunction>() {
public BatchConjunction on( Location location ) {
RemovePropertiesRequest request = new
RemovePropertiesRequest(location, names);
- queue().submit(request);
+ requestQueue.submit(request);
return nextRequests;
}
@@ -2115,7 +2115,7 @@
public BatchConjunction on( Location at ) {
ReadPropertyRequest request = new ReadPropertyRequest(at, name);
- queue().submit(request);
+ requestQueue.submit(request);
return Batch.this.nextRequests;
}
};
@@ -2136,7 +2136,7 @@
return new On<BatchConjunction>() {
public BatchConjunction on( Location location ) {
ReadAllPropertiesRequest request = new
ReadAllPropertiesRequest(location);
- queue().submit(request);
+ requestQueue.submit(request);
return Batch.this.nextRequests;
}
@@ -2199,7 +2199,7 @@
public BatchConjunction of( Location at ) {
ReadAllChildrenRequest request = new ReadAllChildrenRequest(at);
- queue().submit(request);
+ requestQueue.submit(request);
return Batch.this.nextRequests;
}
};
@@ -2221,7 +2221,7 @@
return new At<BatchConjunction>() {
public BatchConjunction at( Location location ) {
ReadBranchRequest request = new ReadBranchRequest(location, depth);
- queue().submit(request);
+ requestQueue.submit(request);
return Batch.this.nextRequests;
}
@@ -3106,7 +3106,7 @@
}
public void submit( Request request ) {
- if (request instanceof UpdatePropertiesRequest) {
+ if (!requests.isEmpty() && request instanceof
UpdatePropertiesRequest) {
// If the previous request was also an update, then maybe they can be
merged ...
Request previous = requests.getLast();
if (previous instanceof UpdatePropertiesRequest) {
Added:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/ReferentialIntegrityException.java
===================================================================
---
trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/ReferentialIntegrityException.java
(rev 0)
+++
trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/ReferentialIntegrityException.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -0,0 +1,116 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site:
http://www.fsf.org.
+ */
+package org.jboss.dna.graph.properties;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.jboss.dna.graph.Location;
+
+/**
+ * @author Randall Hauch
+ */
+public class ReferentialIntegrityException extends RuntimeException {
+
+ /**
+ */
+ private static final long serialVersionUID = -3703984046286975978L;
+
+ private final Map<Location, List<Reference>> invalidReferences;
+
+ /**
+ * @param location the location of the node containing the bad reference(s)
+ * @param invalidReferences the invalid references
+ */
+ public ReferentialIntegrityException( Location location,
+ Reference... invalidReferences ) {
+ this.invalidReferences = new HashMap<Location, List<Reference>>();
+ List<Reference> invalidRefList = null;
+ if (invalidReferences == null || invalidReferences.length == 0) {
+ invalidRefList = Collections.emptyList();
+ } else if (invalidReferences.length == 1) {
+ invalidRefList = Collections.singletonList(invalidReferences[0]);
+ } else {
+ invalidRefList = new ArrayList<Reference>();
+ for (Reference ref : invalidReferences) {
+ invalidRefList.add(ref);
+ }
+ }
+ this.invalidReferences.put(location, invalidRefList);
+ }
+
+ /**
+ * @param invalidReferences the map of locations to invalid references
+ */
+ public ReferentialIntegrityException( Map<Location, List<Reference>>
invalidReferences ) {
+ this.invalidReferences = invalidReferences;
+ }
+
+ /**
+ * @param invalidReferences the map of locations to invalid references
+ * @param message
+ */
+ public ReferentialIntegrityException( Map<Location, List<Reference>>
invalidReferences,
+ String message ) {
+ super(message);
+ this.invalidReferences = invalidReferences;
+ }
+
+ /**
+ * @param invalidReferences the map of locations to invalid references
+ * @param cause
+ */
+ public ReferentialIntegrityException( Map<Location, List<Reference>>
invalidReferences,
+ Throwable cause ) {
+ super(cause);
+ this.invalidReferences = invalidReferences;
+ }
+
+ /**
+ * @param invalidReferences the map of locations to invalid references
+ * @param message
+ * @param cause
+ */
+ public ReferentialIntegrityException( Map<Location, List<Reference>>
invalidReferences,
+ String message,
+ Throwable cause ) {
+ super(message, cause);
+ this.invalidReferences = invalidReferences;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public String toString() {
+ return super.toString();
+ }
+
+ /**
+ * @return invalidReferences
+ */
+ public Map<Location, List<Reference>> getInvalidReferences() {
+ return invalidReferences;
+ }
+}
Property changes on:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/properties/ReferentialIntegrityException.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnection.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnection.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnection.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -47,6 +47,7 @@
private final UUID rootNodeUuid;
private final long largeValueMinimumSizeInBytes;
private final boolean compressData;
+ private final boolean enforceReferentialIntegrity;
/*package*/JpaConnection( String sourceName,
CachePolicy cachePolicy,
@@ -54,7 +55,8 @@
Model model,
UUID rootNodeUuid,
long largeValueMinimumSizeInBytes,
- boolean compressData ) {
+ boolean compressData,
+ boolean enforceReferentialIntegrity ) {
assert sourceName != null;
assert entityManager != null;
assert model != null;
@@ -66,6 +68,7 @@
this.rootNodeUuid = rootNodeUuid;
this.largeValueMinimumSizeInBytes = largeValueMinimumSizeInBytes;
this.compressData = compressData;
+ this.enforceReferentialIntegrity = enforceReferentialIntegrity;
}
/**
@@ -125,7 +128,13 @@
public void execute( ExecutionContext context,
Request request ) throws RepositorySourceException {
long size = largeValueMinimumSizeInBytes;
- RequestProcessor proc = model.createRequestProcessor(name, context,
entityManager, rootNodeUuid, size, compressData);
+ RequestProcessor proc = model.createRequestProcessor(name,
+ context,
+ entityManager,
+ rootNodeUuid,
+ size,
+ compressData,
+
enforceReferentialIntegrity);
try {
proc.process(request);
} finally {
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -41,6 +41,7 @@
public static I18n unableToReadLargeValue;
public static I18n unableToMoveRootNode;
public static I18n locationShouldHavePathAndOrProperty;
+ public static I18n invalidReferences;
public static I18n basicModelDescription;
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaSource.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaSource.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/JpaSource.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -117,6 +117,7 @@
protected static final String MODEL_NAME = "modelName";
protected static final String LARGE_VALUE_SIZE_IN_BYTES =
"largeValueSizeInBytes";
protected static final String COMPRESS_DATA = "compressData";
+ protected static final String ENFORCE_REFERENTIAL_INTEGRITY =
"enforceReferentialIntegrity";
/**
* This source supports events.
@@ -148,6 +149,7 @@
private static final int DEFAULT_IDLE_TIME_IN_SECONDS_BEFORE_TESTING_CONNECTIONS = 60
* 3; // 3 minutes
private static final int DEFAULT_LARGE_VALUE_SIZE_IN_BYTES = 2 ^ 10; // 1 kilobyte
private static final boolean DEFAULT_COMPRESS_DATA = true;
+ private static final boolean DEFAULT_ENFORCE_REFERENTIAL_INTEGRITY = true;
/**
* The first serialized version of this source.
@@ -173,6 +175,7 @@
private int cacheTimeToLiveInMilliseconds = DEFAULT_CACHE_TIME_TO_LIVE_IN_SECONDS *
1000;
private long largeValueSizeInBytes = DEFAULT_LARGE_VALUE_SIZE_IN_BYTES;
private boolean compressData = DEFAULT_COMPRESS_DATA;
+ private boolean referentialIntegrityEnforced =
DEFAULT_ENFORCE_REFERENTIAL_INTEGRITY;
private final Capabilities capabilities = new Capabilities();
private transient Model model;
private String modelName;
@@ -577,6 +580,20 @@
}
/**
+ * @return referentialIntegrityEnforced
+ */
+ public boolean isReferentialIntegrityEnforced() {
+ return referentialIntegrityEnforced;
+ }
+
+ /**
+ * @param referentialIntegrityEnforced Sets referentialIntegrityEnforced to the
specified value.
+ */
+ public void setReferentialIntegrityEnforced( boolean referentialIntegrityEnforced )
{
+ this.referentialIntegrityEnforced = referentialIntegrityEnforced;
+ }
+
+ /**
* {@inheritDoc}
*
* @see
org.jboss.dna.graph.connectors.RepositorySource#initialize(org.jboss.dna.graph.connectors.RepositoryContext)
@@ -634,6 +651,7 @@
ref.add(new StringRefAddr(CACHE_TIME_TO_LIVE_IN_MILLISECONDS,
Integer.toString(getCacheTimeToLiveInMilliseconds())));
ref.add(new StringRefAddr(LARGE_VALUE_SIZE_IN_BYTES,
Long.toString(getLargeValueSizeInBytes())));
ref.add(new StringRefAddr(COMPRESS_DATA, Boolean.toString(isCompressData())));
+ ref.add(new StringRefAddr(ENFORCE_REFERENTIAL_INTEGRITY,
Boolean.toString(isReferentialIntegrityEnforced())));
if (getModel() != null) {
ref.add(new StringRefAddr(MODEL_NAME, getModel()));
}
@@ -680,6 +698,7 @@
String retryLimit = values.get(RETRY_LIMIT);
String largeModelSize = values.get(LARGE_VALUE_SIZE_IN_BYTES);
String compressData = values.get(COMPRESS_DATA);
+ String refIntegrity = values.get(ENFORCE_REFERENTIAL_INTEGRITY);
// Create the source instance ...
JpaSource source = new JpaSource();
@@ -703,6 +722,7 @@
if (modelName != null) source.setModel(modelName);
if (largeModelSize != null)
source.setLargeValueSizeInBytes(Long.parseLong(largeModelSize));
if (compressData != null)
source.setCompressData(Boolean.parseBoolean(compressData));
+ if (refIntegrity != null)
source.setReferentialIntegrityEnforced(Boolean.parseBoolean(refIntegrity));
return source;
}
return null;
@@ -807,7 +827,8 @@
if (entityManager == null) {
entityManager = entityManagerFactory.createEntityManager();
}
- return new JpaConnection(getName(), cachePolicy, entityManager, model, rootUuid,
largeValueSizeInBytes, compressData);
+ return new JpaConnection(getName(), cachePolicy, entityManager, model, rootUuid,
largeValueSizeInBytes, compressData,
+ referentialIntegrityEnforced);
}
/**
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/Model.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/Model.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/Model.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -79,7 +79,8 @@
EntityManager
entityManager,
UUID rootNodeUuid,
long
largeValueMinimumSizeInBytes,
- boolean comparessData );
+ boolean comparessData,
+ boolean
enforceReferentialIntegrity );
/**
* Configure the entity class that will be used by JPA to store information in the
database.
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModel.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -50,6 +50,7 @@
* possible to efficiently work with nodes containing large numbers of children, where
adding and removing child nodes is largely
* independent of the number of children. Also, working with properties is also
completely independent of the number of child
* nodes.</li>
+ * <li>ReferenceChanges - the references from one node to another</li>
* <li>Subgraph - a working area for efficiently computing the space of a subgraph;
see below</li>
* <li>Change log - a record of the changes that have been made to the repository.
This is used to distribute change events across
* multiple distributed processes, and to allow a recently-connected client to identify
the set of changes that have been made
@@ -98,7 +99,7 @@
* {@inheritDoc}
*
* @see
org.jboss.dna.connector.store.jpa.Model#createRequestProcessor(java.lang.String,
org.jboss.dna.graph.ExecutionContext,
- * javax.persistence.EntityManager, java.util.UUID, long, boolean)
+ * javax.persistence.EntityManager, java.util.UUID, long, boolean, boolean)
*/
@Override
public RequestProcessor createRequestProcessor( String sourceName,
@@ -106,9 +107,10 @@
EntityManager entityManager,
UUID rootNodeUuid,
long largeValueMinimumSizeInBytes,
- boolean compressData ) {
+ boolean compressData,
+ boolean enforceReferentialIntegrity )
{
return new BasicRequestProcessor(sourceName, context, entityManager,
rootNodeUuid, largeValueMinimumSizeInBytes,
- compressData);
+ compressData, enforceReferentialIntegrity);
}
/**
@@ -126,6 +128,8 @@
configurator.addAnnotatedClass(LargeValueId.class);
configurator.addAnnotatedClass(ChildEntity.class);
configurator.addAnnotatedClass(ChildId.class);
+ configurator.addAnnotatedClass(ReferenceEntity.class);
+ configurator.addAnnotatedClass(ReferenceId.class);
configurator.addAnnotatedClass(SubgraphQueryEntity.class);
configurator.addAnnotatedClass(SubgraphNodeEntity.class);
configurator.addAnnotatedClass(ChangeLogEntity.class);
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/BasicRequestProcessor.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -31,6 +31,7 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
@@ -68,8 +69,12 @@
import org.jboss.dna.graph.properties.PathNotFoundException;
import org.jboss.dna.graph.properties.Property;
import org.jboss.dna.graph.properties.PropertyType;
+import org.jboss.dna.graph.properties.Reference;
+import org.jboss.dna.graph.properties.ReferentialIntegrityException;
+import org.jboss.dna.graph.properties.UuidFactory;
import org.jboss.dna.graph.properties.ValueFactories;
import org.jboss.dna.graph.properties.ValueFactory;
+import org.jboss.dna.graph.properties.ValueFormatException;
import org.jboss.dna.graph.requests.CopyBranchRequest;
import org.jboss.dna.graph.requests.CreateNodeRequest;
import org.jboss.dna.graph.requests.DeleteBranchRequest;
@@ -95,6 +100,7 @@
protected final ValueFactory<String> stringFactory;
protected final PathFactory pathFactory;
protected final NameFactory nameFactory;
+ protected final UuidFactory uuidFactory;
protected final Namespaces namespaces;
protected final UUID rootNodeUuid;
protected final String rootNodeUuidString;
@@ -103,6 +109,8 @@
protected final boolean compressData;
protected final Logger logger;
protected final RequestProcessorCache cache;
+ protected final boolean enforceReferentialIntegrity;
+ private boolean referencesChanged;
/**
* @param sourceName
@@ -111,25 +119,30 @@
* @param rootNodeUuid
* @param largeValueMinimumSizeInBytes
* @param compressData
+ * @param enforceReferentialIntegrity
*/
public BasicRequestProcessor( String sourceName,
ExecutionContext context,
EntityManager entityManager,
UUID rootNodeUuid,
long largeValueMinimumSizeInBytes,
- boolean compressData ) {
+ boolean compressData,
+ boolean enforceReferentialIntegrity ) {
super(sourceName, context);
assert entityManager != null;
assert rootNodeUuid != null;
this.entities = entityManager;
- this.stringFactory = context.getValueFactories().getStringFactory();
- this.pathFactory = context.getValueFactories().getPathFactory();
- this.nameFactory = context.getValueFactories().getNameFactory();
+ ValueFactories valuesFactory = context.getValueFactories();
+ this.stringFactory = valuesFactory.getStringFactory();
+ this.pathFactory = valuesFactory.getPathFactory();
+ this.nameFactory = valuesFactory.getNameFactory();
+ this.uuidFactory = valuesFactory.getUuidFactory();
this.namespaces = new Namespaces(entityManager);
this.rootNodeUuid = rootNodeUuid;
this.rootNodeUuidString = this.rootNodeUuid.toString();
this.largeValueMinimumSizeInBytes = largeValueMinimumSizeInBytes;
this.compressData = compressData;
+ this.enforceReferentialIntegrity = enforceReferentialIntegrity;
this.serializer = new Serializer(context, true);
this.logger = getExecutionContext().getLogger(getClass());
this.cache = new RequestProcessorCache(this.pathFactory);
@@ -147,7 +160,6 @@
public void process( CreateNodeRequest request ) {
logger.trace(request.toString());
Location actualLocation = null;
- String childUuidString = null;
try {
// Create nodes have to be defined via a path ...
Location parentLocation = request.under();
@@ -165,7 +177,8 @@
}
}
if (uuidString == null) uuidString = UUID.randomUUID().toString();
- childUuidString = createProperties(uuidString, request.properties());
+ assert uuidString != null;
+ createProperties(uuidString, request.properties());
// Find or create the namespace for the child ...
Name childName = request.named();
@@ -222,14 +235,13 @@
}
// Create the new ChildEntity ...
- ChildId id = new ChildId(parentUuidString, childUuidString);
+ ChildId id = new ChildId(parentUuidString, uuidString);
ChildEntity entity = new ChildEntity(id, nextIndexInParent, ns,
childName.getLocalName(), nextSnsIndex);
entities.persist(entity);
// Set the actual path, regardless of the supplied path...
- assert childUuidString != null;
Path path = pathFactory.create(parentPath, childName, nextSnsIndex);
- actualLocation = new Location(path, UUID.fromString(childUuidString));
+ actualLocation = new Location(path, UUID.fromString(uuidString));
// Finally, update the cache with the information we know ...
if (childrenOfParent != null) {
@@ -278,17 +290,19 @@
boolean compressed = entity.isCompressed();
Collection<Property> properties = new
LinkedList<Property>();
byte[] data = entity.getData();
- LargeValueSerializer largeValues = new LargeValueSerializer(entity);
- ByteArrayInputStream bais = new ByteArrayInputStream(data);
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
- try {
- serializer.deserializeAllProperties(ois, properties, largeValues);
- for (Property property : properties) {
- request.addProperty(property);
+ if (data != null) {
+ LargeValueSerializer largeValues = new LargeValueSerializer(entity);
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.deserializeAllProperties(ois, properties,
largeValues);
+ for (Property property : properties) {
+ request.addProperty(property);
+ }
+ } finally {
+ ois.close();
}
- } finally {
- ois.close();
}
} catch (NoResultException e) {
@@ -553,17 +567,19 @@
int propertyCount = entity.getPropertyCount();
Collection<Property> properties = new
ArrayList<Property>(propertyCount);
byte[] data = entity.getData();
- LargeValueSerializer largeValues = new LargeValueSerializer(entity);
- ByteArrayInputStream bais = new ByteArrayInputStream(data);
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
- try {
- serializer.deserializeAllProperties(ois, properties, largeValues);
- for (Property property : properties) {
- request.addProperty(property);
+ if (data != null) {
+ LargeValueSerializer largeValues = new LargeValueSerializer(entity);
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.deserializeAllProperties(ois, properties, largeValues);
+ for (Property property : properties) {
+ request.addProperty(property);
+ }
+ } finally {
+ ois.close();
}
- } finally {
- ois.close();
}
} catch (NoResultException e) {
// there are no properties (probably not expected, but still okay) ...
@@ -613,18 +629,20 @@
int propertyCount = entity.getPropertyCount();
Collection<Property> properties = new
ArrayList<Property>(propertyCount);
byte[] data = entity.getData();
- LargeValueSerializer largeValues = new LargeValueSerializer(entity);
- ByteArrayInputStream bais = new ByteArrayInputStream(data);
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
- try {
- Serializer.LargeValues skippedLargeValues = Serializer.NO_LARGE_VALUES;
- serializer.deserializeSomeProperties(ois, properties, largeValues,
skippedLargeValues, propertyName);
- for (Property property : properties) {
- request.setProperty(property); // should be only one property
+ if (data != null) {
+ LargeValueSerializer largeValues = new LargeValueSerializer(entity);
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ Serializer.LargeValues skippedLargeValues =
Serializer.NO_LARGE_VALUES;
+ serializer.deserializeSomeProperties(ois, properties, largeValues,
skippedLargeValues, propertyName);
+ for (Property property : properties) {
+ request.setProperty(property); // should be only one property
+ }
+ } finally {
+ ois.close();
}
- } finally {
- ois.close();
}
} catch (NoResultException e) {
// there are no properties (probably not expected, but still okay) ...
@@ -655,42 +673,88 @@
PropertiesEntity entity = null;
try {
entity = (PropertiesEntity)query.getSingleResult();
- final boolean hadLargeValues = !entity.getLargeValues().isEmpty();
// Prepare the streams so we can deserialize all existing properties and
reserialize the old and updated
// properties ...
boolean compressed = entity.isCompressed();
- ByteArrayInputStream bais = new ByteArrayInputStream(entity.getData());
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
+ byte[] originalData = entity.getData();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
OutputStream os = compressed ? new GZIPOutputStream(baos) : baos;
ObjectOutputStream oos = new ObjectOutputStream(os);
- int numProperties = 0;
- Set<String> largeValueHashesWritten = hadLargeValues ? new
HashSet<String>() : null;
- LargeValueSerializer largeValues = new LargeValueSerializer(entity,
largeValueHashesWritten);
- SkippedLargeValues removedValues = new SkippedLargeValues(largeValues);
- try {
- numProperties = serializer.reserializeProperties(ois, oos,
request.properties(), largeValues, removedValues);
- } finally {
+ int numProps = 0;
+ LargeValueSerializer largeValues = null;
+ Collection<Property> props = request.properties();
+ References refs = enforceReferentialIntegrity ? new References() : null;
+ if (originalData == null) {
+ largeValues = new LargeValueSerializer(entity);
+ numProps = props.size();
+ serializer.serializeProperties(oos, numProps, props, largeValues,
refs);
+ } else {
+ boolean hadLargeValues = !entity.getLargeValues().isEmpty();
+ Set<String> largeValueHashesWritten = hadLargeValues ? new
HashSet<String>() : null;
+ largeValues = new LargeValueSerializer(entity,
largeValueHashesWritten);
+ ByteArrayInputStream bais = new ByteArrayInputStream(originalData);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ SkippedLargeValues removedValues = new
SkippedLargeValues(largeValues);
try {
- ois.close();
+ Serializer.ReferenceValues refValues = refs != null ? refs :
Serializer.NO_REFERENCES_VALUES;
+ numProps = serializer.reserializeProperties(ois, oos, props,
largeValues, removedValues, refValues);
} finally {
- oos.close();
+ try {
+ ois.close();
+ } finally {
+ oos.close();
+ }
}
+ // The new large values were recorded and associated with the
properties entity during reserialization.
+ // However, any values no longer used now need to be removed ...
+ if (hadLargeValues) {
+ // Remove any large value from the 'skipped' list that
was also written ...
+ removedValues.skippedKeys.removeAll(largeValueHashesWritten);
+ for (String oldHexKey : removedValues.skippedKeys) {
+ LargeValueId id = new LargeValueId(oldHexKey);
+ entity.getLargeValues().remove(id);
+ }
+ }
+
+ if (refs != null) {
+ // Remove any existing references ...
+ if (refs.hasRemoved()) {
+ for (Reference reference : refs.getRemoved()) {
+ String toUuid = resolveToUuid(reference);
+ if (toUuid != null) {
+ ReferenceId id = new ReferenceId(actual.uuid,
toUuid);
+ ReferenceEntity refEntity =
entities.find(ReferenceEntity.class, id);
+ if (refEntity != null) {
+ entities.remove(refEntity);
+ referencesChanged = true;
+ }
+ }
+ }
+ }
+ }
}
- entity.setPropertyCount(numProperties);
+ entity.setPropertyCount(numProps);
entity.setData(baos.toByteArray());
entity.setCompressed(compressData);
- // The new large values were recorded and associated with the properties
entity during reserialization.
- // However, any values no longer used now need to be removed ...
- if (hadLargeValues) {
- // Remove any large value from the 'skipped' list that was
also written ...
- removedValues.skippedKeys.removeAll(largeValueHashesWritten);
- for (String oldHexKey : removedValues.skippedKeys) {
- LargeValueId id = new LargeValueId(oldHexKey);
- entity.getLargeValues().remove(id);
+ if (refs != null && refs.hasWritten()) {
+ // If there were references from the updated node ...
+ Set<Reference> newReferences = refs.getWritten();
+ // Remove any reference that was written (and not removed) ...
+ newReferences.removeAll(refs.getRead());
+ if (newReferences.size() != 0) {
+ // Now save the new references ...
+ for (Reference reference : newReferences) {
+ String toUuid = resolveToUuid(reference);
+ if (toUuid != null) {
+ ReferenceId id = new ReferenceId(actual.uuid, toUuid);
+ ReferenceEntity refEntity = new ReferenceEntity(id);
+ entities.persist(refEntity);
+ referencesChanged = true;
+ }
+ }
}
}
} catch (NoResultException e) {
@@ -777,15 +841,17 @@
properties.add(actualLocation.getIdProperty(DnaLexicon.UUID));
// Deserialize all the properties (except the UUID)...
byte[] data = props.getData();
- LargeValueSerializer largeValues = new LargeValueSerializer(props);
- ByteArrayInputStream bais = new ByteArrayInputStream(data);
- InputStream is = compressed ? new GZIPInputStream(bais) : bais;
- ObjectInputStream ois = new ObjectInputStream(is);
- try {
- serializer.deserializeAllProperties(ois, properties, largeValues);
- request.setProperties(nodeLocation, properties);
- } finally {
- ois.close();
+ if (data != null) {
+ LargeValueSerializer largeValues = new LargeValueSerializer(props);
+ ByteArrayInputStream bais = new ByteArrayInputStream(data);
+ InputStream is = compressed ? new GZIPInputStream(bais) : bais;
+ ObjectInputStream ois = new ObjectInputStream(is);
+ try {
+ serializer.deserializeAllProperties(ois, properties,
largeValues);
+ request.setProperties(nodeLocation, properties);
+ } finally {
+ ois.close();
+ }
}
}
@@ -946,45 +1012,137 @@
request.setActualLocations(actualOldLocation, actualNewLocation);
}
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.graph.requests.processor.RequestProcessor#close()
+ */
+ @Override
+ public void close() {
+ // Verify that the references are valid so far ...
+ verifyReferences();
+
+ // Now commit the transaction ...
+ EntityTransaction txn = entities.getTransaction();
+ if (txn != null) txn.commit();
+ super.close();
+ }
+
+ /**
+ * {@link ReferenceEntity Reference entities} are added and removed in the
appropriate <code>process(...)</code> methods.
+ * However, this method is typically called in {@link BasicRequestProcessor#close()}
and performs the following steps:
+ * <ol>
+ * <li>Remove all references that have a "from" node that is under
the versions branch.</li>
+ * <li>Verify that all remaining references have a valid and existing
"to" node</li>
+ * </ol>
+ */
+ protected void verifyReferences() {
+ if (!enforceReferentialIntegrity) return;
+ if (referencesChanged) {
+
+ // Remove all references that have a "from" node that doesn't
support referential integrity ...
+ ReferenceEntity.deleteUnenforcedReferences(entities);
+
+ // Verify that all references are resolved to existing nodes ...
+ int numUnresolved = ReferenceEntity.countAllReferencesResolved(entities);
+ if (numUnresolved != 0) {
+ List<ReferenceEntity> references =
ReferenceEntity.verifyAllReferencesResolved(entities);
+ ValueFactory<Reference> refFactory =
getExecutionContext().getValueFactories().getReferenceFactory();
+ Map<Location, List<Reference>> invalidRefs = new
HashMap<Location, List<Reference>>();
+ for (ReferenceEntity entity : references) {
+ UUID fromUuid = UUID.fromString(entity.getId().getFromUuidString());
+ Location location = new Location(fromUuid);
+ location = getActualLocation(location).location;
+ List<Reference> refs = invalidRefs.get(location);
+ if (refs == null) {
+ refs = new ArrayList<Reference>();
+ invalidRefs.put(location, refs);
+ }
+ UUID toUuid = UUID.fromString(entity.getId().getToUuidString());
+ refs.add(refFactory.create(toUuid));
+ }
+ String msg = JpaConnectorI18n.invalidReferences.text(getSourceName());
+ throw new ReferentialIntegrityException(invalidRefs, msg);
+ }
+
+ referencesChanged = false;
+ }
+ }
+
protected String createProperties( String uuidString,
Collection<Property> properties ) throws
IOException {
assert uuidString != null;
- if (properties.isEmpty()) return uuidString;
- if (properties.size() == 1 &&
properties.iterator().next().getName().equals(JcrLexicon.NAME)) return uuidString;
// Create the PropertiesEntity ...
NodeId nodeId = new NodeId(uuidString);
PropertiesEntity props = new PropertiesEntity(nodeId);
- LargeValueSerializer largeValues = new LargeValueSerializer(props);
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- OutputStream os = compressData ? new GZIPOutputStream(baos) : baos;
- ObjectOutputStream oos = new ObjectOutputStream(os);
- int numProperties = properties.size();
- try {
- serializer.serializeProperties(oos, numProperties, properties, largeValues);
- } finally {
- oos.close();
+ // If there are properties ...
+ boolean processProperties = true;
+ if (properties.isEmpty()) processProperties = false;
+ else if (properties.size() == 1 &&
properties.iterator().next().getName().equals(JcrLexicon.NAME)) processProperties =
false;
+
+ if (processProperties) {
+ References refs = enforceReferentialIntegrity ? new References() : null;
+ LargeValueSerializer largeValues = new LargeValueSerializer(props);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ OutputStream os = compressData ? new GZIPOutputStream(baos) : baos;
+ ObjectOutputStream oos = new ObjectOutputStream(os);
+ int numProperties = properties.size();
+ try {
+ Serializer.ReferenceValues refValues = refs != null ? refs :
Serializer.NO_REFERENCES_VALUES;
+ serializer.serializeProperties(oos, numProperties, properties,
largeValues, refValues);
+ } finally {
+ oos.close();
+ }
+
+ props.setData(baos.toByteArray());
+ props.setPropertyCount(numProperties);
+
+ // Record the changes to the references ...
+ if (refs != null && refs.hasWritten()) {
+ for (Reference reference : refs.getWritten()) {
+ String toUuid = resolveToUuid(reference);
+ if (toUuid != null) {
+ ReferenceId id = new ReferenceId(uuidString, toUuid);
+ ReferenceEntity refEntity = new ReferenceEntity(id);
+ entities.persist(refEntity);
+ referencesChanged = true;
+ }
+ }
+ }
+ } else {
+ props.setData(null);
+ props.setPropertyCount(0);
}
-
- props.setData(baos.toByteArray());
props.setCompressed(compressData);
- props.setPropertyCount(numProperties);
+ props.setReferentialIntegrityEnforced(true);
entities.persist(props);
+
+ // References will be persisted in the commit ...
return uuidString;
}
/**
- * {@inheritDoc}
+ * Attempt to resolve the reference.
*
- * @see org.jboss.dna.graph.requests.processor.RequestProcessor#close()
+ * @param reference the reference
+ * @return the UUID of the node to which the reference points, or null if the
reference could not be resolved
*/
- @Override
- public void close() {
- EntityTransaction txn = entities.getTransaction();
- if (txn != null) txn.commit();
- super.close();
+ protected String resolveToUuid( Reference reference ) {
+ // See if the reference is by UUID ...
+ try {
+ UUID uuid = uuidFactory.create(reference);
+ ActualLocation actualLocation = getActualLocation(new Location(uuid));
+ return actualLocation.uuid;
+ } catch (ValueFormatException e) {
+ // Unknown kind of reference, which we don't track
+ } catch (PathNotFoundException e) {
+ // Unable to resolve reference ...
+ }
+ // Unable to resolve reference ...
+ return null;
}
/**
@@ -1400,4 +1558,79 @@
return this.location.toString() + " (uuid=" + uuid + ") "
+ childEntity;
}
}
+
+ protected class References implements Serializer.ReferenceValues {
+ private Set<Reference> read;
+ private Set<Reference> removed;
+ private Set<Reference> written;
+
+ protected References() {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.ReferenceValues#read(org.jboss.dna.graph.properties.Reference)
+ */
+ public void read( Reference reference ) {
+ if (read == null) read = new HashSet<Reference>();
+ read.add(reference);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.ReferenceValues#remove(org.jboss.dna.graph.properties.Reference)
+ */
+ public void remove( Reference reference ) {
+ if (removed == null) removed = new HashSet<Reference>();
+ removed.add(reference);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see
org.jboss.dna.connector.store.jpa.util.Serializer.ReferenceValues#write(org.jboss.dna.graph.properties.Reference)
+ */
+ public void write( Reference reference ) {
+ if (written == null) written = new HashSet<Reference>();
+ written.add(reference);
+ }
+
+ public boolean hasRead() {
+ return read != null;
+ }
+
+ public boolean hasRemoved() {
+ return removed != null;
+ }
+
+ public boolean hasWritten() {
+ return written != null;
+ }
+
+ /**
+ * @return read
+ */
+ public Set<Reference> getRead() {
+ if (read != null) return read;
+ return Collections.emptySet();
+ }
+
+ /**
+ * @return removed
+ */
+ public Set<Reference> getRemoved() {
+ if (removed != null) return removed;
+ return Collections.emptySet();
+ }
+
+ /**
+ * @return written
+ */
+ public Set<Reference> getWritten() {
+ if (written != null) return written;
+ return Collections.emptySet();
+ }
+ }
}
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ChildId.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -24,6 +24,7 @@
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Embeddable;
+import net.jcip.annotations.Immutable;
import org.jboss.dna.common.util.HashCode;
/**
@@ -32,6 +33,8 @@
* @author Randall Hauch
*/
@Embeddable
+@Immutable
+(a)org.hibernate.annotations.Immutable
public class ChildId implements Serializable {
/**
@@ -48,22 +51,16 @@
public ChildId() {
}
- // public ChildId( UUID parentUuid,
- // UUID childUuid ) {
- // setParentUuid(parentUuid);
- // setChildUuid(childUuid);
- // }
-
public ChildId( NodeId parentId,
NodeId childId ) {
- if (parentId != null) setParentUuidString(parentId.getUuidString());
- if (childId != null) setChildUuidString(childId.getUuidString());
+ if (parentId != null) this.parentUuidString = parentId.getUuidString();
+ if (childId != null) this.childUuidString = childId.getUuidString();
}
public ChildId( String parentUuid,
String childUuid ) {
- setParentUuidString(parentUuid);
- setChildUuidString(childUuid);
+ this.parentUuidString = parentUuid;
+ this.childUuidString = childUuid;
}
/**
@@ -74,13 +71,6 @@
}
/**
- * @param parentUuidString Sets parentUuidString to the specified value.
- */
- public void setParentUuidString( String parentUuidString ) {
- this.parentUuidString = parentUuidString;
- }
-
- /**
* @return childUuidString
*/
public String getChildUuidString() {
@@ -88,13 +78,6 @@
}
/**
- * @param childUuidString Sets childUuidString to the specified value.
- */
- public void setChildUuidString( String childUuidString ) {
- this.childUuidString = childUuidString;
- }
-
- /**
* {@inheritDoc}
*
* @see java.lang.Object#hashCode()
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/PropertiesEntity.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -53,7 +53,7 @@
private NodeId id;
@Lob
- @Column( name = "DATA", nullable = false, unique = false )
+ @Column( name = "DATA", nullable = true, unique = false )
private byte[] data;
@Column( name = "NUM_PROPS", nullable = false )
@@ -65,6 +65,12 @@
@Column( name = "COMPRESSED", nullable = true )
private Boolean compressed;
+ /**
+ * Flag specifying whether the binary data is stored in a compressed format.
+ */
+ @Column( name = "ENFORCEREFINTEG", nullable = false )
+ private boolean referentialIntegrityEnforced = true;
+
@org.hibernate.annotations.CollectionOfElements( fetch = FetchType.LAZY )
@JoinTable( name = "DNA_LARGEVALUE_USAGES", joinColumns = @JoinColumn( name
= "NODE_UUID" ) )
private Collection<LargeValueId> largeValues = new
HashSet<LargeValueId>();
@@ -148,6 +154,20 @@
}
/**
+ * @return referentialIntegrityEnforced
+ */
+ public boolean isReferentialIntegrityEnforced() {
+ return referentialIntegrityEnforced;
+ }
+
+ /**
+ * @param referentialIntegrityEnforced Sets referentialIntegrityEnforced to the
specified value.
+ */
+ public void setReferentialIntegrityEnforced( boolean referentialIntegrityEnforced )
{
+ this.referentialIntegrityEnforced = referentialIntegrityEnforced;
+ }
+
+ /**
* {@inheritDoc}
*
* @see java.lang.Object#hashCode()
Added:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceEntity.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceEntity.java
(rev 0)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceEntity.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -0,0 +1,176 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site:
http://www.fsf.org.
+ */
+package org.jboss.dna.connector.store.jpa.models.basic;
+
+import java.util.List;
+import javax.persistence.Entity;
+import javax.persistence.EntityManager;
+import javax.persistence.Id;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
+import javax.persistence.NoResultException;
+import javax.persistence.Query;
+import javax.persistence.Table;
+import org.hibernate.annotations.Index;
+
+/**
+ * A record of a reference from one node to another.
+ *
+ * @author Randall Hauch
+ */
+@Entity
+@Table( name = "DNA_BASIC_REFERENCES" )
+(a)org.hibernate.annotations.Table( appliesTo = "DNA_BASIC_REFERENCES", indexes =
{
+ @Index( name = "REFINDEX_INX", columnNames = {"FROM_UUID",
"TO_UUID"} ),
+ @Index( name = "REFTOUUID_INX", columnNames = {"TO_UUID"} )} )
+@NamedQueries( {
+ @NamedQuery( name = "ReferenceEntity.removeReferencesFrom", query =
"delete ReferenceEntity where id.fromUuidString = :fromUuid" ),
+ @NamedQuery( name = "ReferenceEntity.removeNonEnforcedReferences", query =
"delete ReferenceEntity as ref where ref.id.fromUuidString not in ( select
props.id.uuidString from PropertiesEntity props where props.referentialIntegrityEnforced =
true )" ),
+ @NamedQuery( name = "ReferenceEntity.countUnresolveReferences", query =
"select count(*) from ReferenceEntity as ref where ref.id.toUuidString not in (
select props.id.uuidString from PropertiesEntity props where
props.referentialIntegrityEnforced = true )" ),
+ @NamedQuery( name = "ReferenceEntity.getUnresolveReferences", query =
"select ref from ReferenceEntity as ref where ref.id.toUuidString not in ( select
props.id.uuidString from PropertiesEntity props where props.referentialIntegrityEnforced =
true )" )} )
+public class ReferenceEntity {
+
+ @Id
+ private ReferenceId id;
+
+ /**
+ *
+ */
+ public ReferenceEntity() {
+ }
+
+ /**
+ * @param id the id
+ */
+ public ReferenceEntity( ReferenceId id ) {
+ this.id = id;
+ }
+
+ /**
+ * @return id
+ */
+ public ReferenceId getId() {
+ return id;
+ }
+
+ /**
+ * @param id Sets id to the specified value.
+ */
+ public void setId( ReferenceId id ) {
+ this.id = id;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#hashCode()
+ */
+ @Override
+ public int hashCode() {
+ return id.hashCode();
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#equals(java.lang.Object)
+ */
+ @Override
+ public boolean equals( Object obj ) {
+ if (obj == this) return true;
+ if (obj instanceof ReferenceEntity) {
+ ReferenceEntity that = (ReferenceEntity)obj;
+ if (this.getId().equals(that.getId())) return true;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return this.id.toString();
+ }
+
+ /**
+ * Delete all references that start from the node with the supplied UUID.
+ *
+ * @param uuid the UUID of the node from which the references start
+ * @param manager the manager; may not be null
+ * @return the number of deleted references
+ */
+ public static int deleteReferencesFrom( String uuid,
+ EntityManager manager ) {
+ assert manager != null;
+ Query delete =
manager.createNamedQuery("ReferenceEntity.removeReferencesFrom");
+ delete.setParameter("fromUuid", uuid);
+ int result = delete.executeUpdate();
+ manager.flush();
+ return result;
+ }
+
+ /**
+ * Delete all references that start from nodes that do not support enforced
referential integrity.
+ *
+ * @param manager the manager; may not be null
+ * @return the number of deleted references
+ */
+ public static int deleteUnenforcedReferences( EntityManager manager ) {
+ assert manager != null;
+ Query delete =
manager.createNamedQuery("ReferenceEntity.removeNonEnforcedReferences");
+ int result = delete.executeUpdate();
+ manager.flush();
+ return result;
+ }
+
+ /**
+ * Delete all references that start from nodes that do not support enforced
referential integrity.
+ *
+ * @param manager the manager; may not be null
+ * @return the number of deleted references
+ */
+ public static int countAllReferencesResolved( EntityManager manager ) {
+ assert manager != null;
+ Query query =
manager.createNamedQuery("ReferenceEntity.getUnresolveReferences");
+ try {
+ return (Integer)query.getSingleResult();
+ } catch (NoResultException e) {
+ return 0;
+ }
+ }
+
+ /**
+ * Delete all references that start from nodes that do not support enforced
referential integrity.
+ *
+ * @param manager the manager; may not be null
+ * @return the number of deleted references
+ */
+ @SuppressWarnings( "unchecked" )
+ public static List<ReferenceEntity> verifyAllReferencesResolved( EntityManager
manager ) {
+ assert manager != null;
+ Query query =
manager.createNamedQuery("ReferenceEntity.getUnresolveReferences");
+ return query.getResultList();
+ }
+}
Property changes on:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceEntity.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Added:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceId.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceId.java
(rev 0)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceId.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -0,0 +1,120 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2008, Red Hat Middleware LLC, and individual contributors
+ * as indicated by the @author tags. See the copyright.txt file in the
+ * distribution for a full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site:
http://www.fsf.org.
+ */
+package org.jboss.dna.connector.store.jpa.models.basic;
+
+import java.io.Serializable;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+import net.jcip.annotations.Immutable;
+import org.jboss.dna.common.util.HashCode;
+
+/**
+ * An identifier for a reference, comprised of a single {@link NodeId} of the node
containing the reference and a single
+ * {@link NodeId} of the node being referenced.
+ *
+ * @author Randall Hauch
+ */
+@Embeddable
+@Immutable
+(a)org.hibernate.annotations.Immutable
+public class ReferenceId implements Serializable {
+
+ /**
+ * Version {@value}
+ */
+ private static final long serialVersionUID = 1L;
+
+ @Column( name = "FROM_UUID", nullable = false, updatable = false, length =
36 )
+ private String fromUuidString;
+
+ @Column( name = "TO_UUID", nullable = false, updatable = false, length = 36
)
+ private String toUuidString;
+
+ public ReferenceId() {
+ }
+
+ public ReferenceId( String fromUuid,
+ String toUuid ) {
+ this.fromUuidString = fromUuid;
+ this.toUuidString = toUuid;
+ }
+
+ /**
+ * @return fromUuidString
+ */
+ public String getFromUuidString() {
+ return fromUuidString;
+ }
+
+ /**
+ * @return toUuidString
+ */
+ public String getToUuidString() {
+ return toUuidString;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#hashCode()
+ */
+ @Override
+ public int hashCode() {
+ return HashCode.compute(fromUuidString, toUuidString);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#equals(java.lang.Object)
+ */
+ @Override
+ public boolean equals( Object obj ) {
+ if (obj == this) return true;
+ if (obj instanceof ReferenceId) {
+ ReferenceId that = (ReferenceId)obj;
+ if (this.fromUuidString == null) {
+ if (that.fromUuidString != null) return false;
+ } else {
+ if (!this.fromUuidString.equals(that.fromUuidString)) return false;
+ }
+ if (this.toUuidString == null) {
+ if (that.toUuidString != null) return false;
+ } else {
+ if (!this.toUuidString.equals(that.toUuidString)) return false;
+ }
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "Reference from " + fromUuidString + " to " +
toUuidString;
+ }
+
+}
Property changes on:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/models/basic/ReferenceId.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/util/Serializer.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -55,6 +55,7 @@
public class Serializer {
public static final LargeValues NO_LARGE_VALUES = new NoLargeValues();
+ public static final ReferenceValues NO_REFERENCES_VALUES = new NoReferenceValues();
private final PropertyFactory propertyFactory;
private final ValueFactories valueFactories;
@@ -111,6 +112,30 @@
}
/**
+ * Interface used to record how Reference values are processed during serialization
and deserialization.
+ *
+ * @author Randall Hauch
+ */
+ public interface ReferenceValues {
+ void read( Reference reference );
+
+ void write( Reference reference );
+
+ void remove( Reference reference );
+ }
+
+ protected static class NoReferenceValues implements ReferenceValues {
+ public void read( Reference arg0 ) {
+ }
+
+ public void remove( Reference arg0 ) {
+ }
+
+ public void write( Reference arg0 ) {
+ }
+ }
+
+ /**
* Serialize the properties' values to the object stream.
* <p>
* If any of the property values are considered {@link LargeValues#getMinimumSize()
large}, the value's hash and length of the
@@ -129,22 +154,25 @@
* @param number the number of properties exposed by the supplied
<code>properties</code> iterator; must be 0 or positive
* @param properties the iterator over the properties that are to be serialized; may
not be null
* @param largeValues the interface to use for writing large values; may not be null
+ * @param references the interface to use for recording which {@link Reference}
values were found during serialization, or
+ * null if the references do not need to be accumulated
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
* @see #deserializeAllProperties(ObjectInputStream, Collection, LargeValues)
* @see #deserializeSomeProperties(ObjectInputStream, Collection, LargeValues,
LargeValues, Name...)
- * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
+ * @see #serializeProperty(ObjectOutputStream, Property, LargeValues,
ReferenceValues)
*/
public void serializeProperties( ObjectOutputStream stream,
int number,
Iterable<Property> properties,
- LargeValues largeValues ) throws IOException {
+ LargeValues largeValues,
+ ReferenceValues references ) throws IOException {
assert number >= 0;
assert properties != null;
assert largeValues != null;
stream.writeInt(number);
for (Property property : properties) {
if (property == null) continue;
- serializeProperty(stream, property, largeValues);
+ serializeProperty(stream, property, largeValues, references);
}
}
@@ -166,17 +194,21 @@
* @param stream the stream where the property's values are to be serialized; may
not be null
* @param property the property to be serialized; may not be null
* @param largeValues the interface to use for writing large values; may not be null
+ * @param references the interface to use for recording which {@link Reference}
values were found during serialization, or
+ * null if the references do not need to be accumulated
* @return true if the property was serialized, or false if it was not
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
- * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
- * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues,
LargeValues)
+ * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues,
ReferenceValues)
+ * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues,
LargeValues, ReferenceValues)
*/
public boolean serializeProperty( ObjectOutputStream stream,
Property property,
- LargeValues largeValues ) throws IOException {
+ LargeValues largeValues,
+ ReferenceValues references ) throws IOException {
assert stream != null;
assert property != null;
assert largeValues != null;
+ assert references != null;
final Name name = property.getName();
if (this.excludeUuidProperty && DnaLexicon.UUID.equals(name)) return
false;
// Write the name ...
@@ -243,7 +275,9 @@
stream.writeChar(c);
} else if (value instanceof Reference) {
stream.writeChar('R');
- stream.writeObject(((Reference)value).getString());
+ Reference ref = (Reference)value;
+ stream.writeObject(ref.getString());
+ references.write(ref);
} else if (value instanceof Binary) {
Binary binary = (Binary)value;
byte[] hash = null;
@@ -300,6 +334,8 @@
* @param updatedProperties the properties that are being updated (or removed, if
there are no values); may not be null
* @param largeValues the interface to use for writing large values; may not be null
* @param removedLargeValues the interface to use for recording the large values that
were removed; may not be null
+ * @param references the interface to use for recording which {@link Reference}
values were found during serialization, or
+ * null if the references do not need to be accumulated
* @return the number of properties
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not
be found
@@ -308,11 +344,13 @@
ObjectOutputStream output,
Collection<Property> updatedProperties,
LargeValues largeValues,
- LargeValues removedLargeValues ) throws
IOException, ClassNotFoundException {
+ LargeValues removedLargeValues,
+ ReferenceValues references ) throws IOException,
ClassNotFoundException {
assert input != null;
assert output != null;
assert updatedProperties != null;
assert largeValues != null;
+ assert references != null;
// Assemble a set of property names to skip deserializing
Set<Name> skipNames = new HashSet<Name>();
for (Property property : updatedProperties) {
@@ -330,10 +368,10 @@
assert name != null;
if (skipNames.contains(name)) {
// Deserialized, but don't materialize ...
- deserializePropertyValues(input, name, true, largeValues,
removedLargeValues);
+ deserializePropertyValues(input, name, true, largeValues,
removedLargeValues, references);
} else {
// Now read the property values ...
- Object[] values = deserializePropertyValues(input, name, false,
largeValues, removedLargeValues);
+ Object[] values = deserializePropertyValues(input, name, false,
largeValues, removedLargeValues, references);
// Add the property to the collection ...
Property property = propertyFactory.create(name, values);
assert property != null;
@@ -355,7 +393,7 @@
output.writeInt(numProperties);
for (Property property : allProperties.values()) {
if (property == null) continue;
- serializeProperty(output, property, largeValues);
+ serializeProperty(output, property, largeValues, references);
}
return numProperties;
}
@@ -368,8 +406,8 @@
* @param largeValues the interface to use for writing large values; may not be null
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not
be found
- * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues,
LargeValues)
- * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
+ * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues,
LargeValues, ReferenceValues)
+ * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues,
ReferenceValues)
*/
public void deserializeAllProperties( ObjectInputStream stream,
Collection<Property> properties,
@@ -395,8 +433,8 @@
* @param skippedLargeValues the interface to use for recording the large values that
were skipped; may not be null
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not
be found
- * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues,
LargeValues)
- * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues)
+ * @see #deserializePropertyValues(ObjectInputStream, Name, boolean, LargeValues,
LargeValues, ReferenceValues)
+ * @see #serializeProperties(ObjectOutputStream, int, Iterable, LargeValues,
ReferenceValues)
*/
public void deserializeSomeProperties( ObjectInputStream stream,
Collection<Property> properties,
@@ -431,14 +469,14 @@
read = name.equals(nameToRead) || (namesToRead != null &&
namesToRead.contains(namesToRead));
if (read) {
// Now read the property values ...
- Object[] values = deserializePropertyValues(stream, name, false,
skippedLargeValues, skippedLargeValues);
+ Object[] values = deserializePropertyValues(stream, name, false,
skippedLargeValues, skippedLargeValues, null);
// Add the property to the collection ...
Property property = propertyFactory.create(name, values);
assert property != null;
properties.add(property);
} else {
// Skip the property ...
- deserializePropertyValues(stream, name, true, largeValues,
skippedLargeValues);
+ deserializePropertyValues(stream, name, true, largeValues,
skippedLargeValues, null);
}
}
}
@@ -452,7 +490,7 @@
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not
be found
* @see #deserializeAllProperties(ObjectInputStream, Collection, LargeValues)
- * @see #serializeProperty(ObjectOutputStream, Property, LargeValues)
+ * @see #serializeProperty(ObjectOutputStream, Property, LargeValues,
ReferenceValues)
*/
public Property deserializeProperty( ObjectInputStream stream,
LargeValues largeValues ) throws IOException,
ClassNotFoundException {
@@ -461,7 +499,7 @@
Name name = valueFactories.getNameFactory().create(nameStr);
assert name != null;
// Now read the property values ...
- Object[] values = deserializePropertyValues(stream, name, false, largeValues,
largeValues);
+ Object[] values = deserializePropertyValues(stream, name, false, largeValues,
largeValues, null);
// Add the property to the collection ...
return propertyFactory.create(name, values);
}
@@ -474,19 +512,24 @@
* @param skip true if the values don't need to be read, or false if they are to
be read
* @param largeValues the interface to use for writing large values; may not be null
* @param skippedLargeValues the interface to use for recording the large values that
were skipped; may not be null
+ * @param references the interface to use for recording which {@link Reference}
values were found (and/or removed) during
+ * deserialization; may not be null
* @return the deserialized property values, or an empty list if there are no values
* @throws IOException if there is an error writing to the
<code>stream</code> or <code>largeValues</code>
* @throws ClassNotFoundException if the class for the value's object could not
be found
* @see #deserializeAllProperties(ObjectInputStream, Collection, LargeValues)
- * @see #serializeProperty(ObjectOutputStream, Property, LargeValues)
+ * @see #serializeProperty(ObjectOutputStream, Property, LargeValues,
ReferenceValues)
*/
public Object[] deserializePropertyValues( ObjectInputStream stream,
Name propertyName,
boolean skip,
LargeValues largeValues,
- LargeValues skippedLargeValues ) throws
IOException, ClassNotFoundException {
+ LargeValues skippedLargeValues,
+ ReferenceValues references ) throws
IOException, ClassNotFoundException {
assert stream != null;
assert propertyName != null;
+ assert largeValues != null;
+ assert skippedLargeValues != null;
// Read the number of values ...
int size = stream.readInt();
Object[] values = skip ? null : new Object[size];
@@ -565,7 +608,16 @@
case 'R':
// Reference
String refValue = (String)stream.readObject();
- if (!skip) value =
valueFactories.getReferenceFactory().create(refValue);
+ Reference ref =
valueFactories.getReferenceFactory().create(refValue);
+ if (!skip || references != null) {
+ if (!skip) {
+ value = ref;
+ if (references != null) references.remove(ref);
+ } else {
+ assert references != null;
+ references.read(ref);
+ }
+ }
break;
case 'B':
// Binary
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/main/resources/org/jboss/dna/connector/store/jpa/JpaConnectorI18n.properties 2008-12-16
16:19:33 UTC (rev 686)
@@ -31,6 +31,6 @@
unableToReadLargeValue = Unable to read from {0} the large property with hash = {1}
unableToMoveRootNode = Unable to move the root node to another location in {0}
locationShouldHavePathAndOrProperty = The source {0} is unable to find a node without a
path or a {1} property
+invalidReferences = One or more references were invalid in {0}
-
basicModelDescription = Database model that stores node properties as opaque records and
children as transparent records. Large property values are stored separately.
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/JpaConnectionTest.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -70,6 +70,7 @@
private UUID rootNodeUuid;
private long largeValueSize;
private boolean compressData;
+ private boolean enforceReferentialIntegrity;
private Graph graph;
private String[] validLargeValues;
private int numPropsOnEach;
@@ -81,6 +82,7 @@
rootNodeUuid = UUID.randomUUID();
largeValueSize = 2 ^ 10; // 1 kilobyte
compressData = true;
+ enforceReferentialIntegrity = true;
numPropsOnEach = 0;
// Load in the large value ...
@@ -101,7 +103,8 @@
// Create the connection ...
cachePolicy = mock(CachePolicy.class);
- connection = new JpaConnection("source", cachePolicy, manager, model,
rootNodeUuid, largeValueSize, compressData);
+ connection = new JpaConnection("source", cachePolicy, manager, model,
rootNodeUuid, largeValueSize, compressData,
+ enforceReferentialIntegrity);
// And create the graph ...
graph = Graph.create(connection, context);
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/ModelTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/ModelTest.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/ModelTest.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -108,7 +108,8 @@
EntityManager entityManager,
UUID rootNodeUuid,
long
largeValueMinimumSizeInBytes,
- boolean compressData ) {
+ boolean compressData,
+ boolean
enforceReferentialIntegrity ) {
return requestProcessor;
}
}
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/models/basic/BasicModelTest.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -125,7 +125,13 @@
EntityManager manager = mock(EntityManager.class);
EntityTransaction txn = mock(EntityTransaction.class);
stub(manager.getTransaction()).toReturn(txn);
- RequestProcessor proc = model.createRequestProcessor("test source",
context, manager, UUID.randomUUID(), 100, false);
+ RequestProcessor proc = model.createRequestProcessor("test source",
+ context,
+ manager,
+ UUID.randomUUID(),
+ 100,
+ false,
+ false);
assertThat(proc, is(notNullValue()));
}
Modified:
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java
===================================================================
---
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java 2008-12-12
23:00:10 UTC (rev 685)
+++
trunk/extensions/dna-connector-store-jpa/src/test/java/org/jboss/dna/connector/store/jpa/util/SerializerTest.java 2008-12-16
16:19:33 UTC (rev 686)
@@ -45,6 +45,7 @@
import org.jboss.dna.common.util.CheckArg;
import org.jboss.dna.common.util.SecureHash;
import org.jboss.dna.common.util.StringUtil;
+import org.jboss.dna.connector.store.jpa.util.Serializer.ReferenceValues;
import org.jboss.dna.graph.BasicExecutionContext;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.properties.Binary;
@@ -66,6 +67,7 @@
private LargeValuesHolder largeValues;
private PropertyFactory propertyFactory;
private ValueFactories valueFactories;
+ private ReferenceValues references;
@Before
public void beforeEach() {
@@ -74,6 +76,7 @@
valueFactories = context.getValueFactories();
serializer = new Serializer(context, false);
largeValues = new LargeValuesHolder();
+ references = Serializer.NO_REFERENCES_VALUES;
}
@Test
@@ -274,7 +277,7 @@
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
- serializer.serializeProperty(oos, property, largeValues);
+ serializer.serializeProperty(oos, property, largeValues, references);
} finally {
oos.close();
}
@@ -301,7 +304,7 @@
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
- serializer.serializeProperties(oos, propertyList.size(), propertyList,
largeValues);
+ serializer.serializeProperties(oos, propertyList.size(), propertyList,
largeValues, references);
} finally {
oos.close();
}
@@ -344,7 +347,7 @@
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
try {
- serializer.serializeProperties(oos, initialProps.size(), initialProps,
largeValues);
+ serializer.serializeProperties(oos, initialProps.size(), initialProps,
largeValues, references);
} finally {
oos.close();
}
@@ -357,7 +360,7 @@
baos = new ByteArrayOutputStream();
oos = new ObjectOutputStream(baos);
try {
- serializer.reserializeProperties(ois, oos, updatedProps, largeValues,
removedLargeValues);
+ serializer.reserializeProperties(ois, oos, updatedProps, largeValues,
removedLargeValues, references);
} finally {
oos.close();
ois.close();