DNA SVN: r1509 - in trunk/dna-graph/src/main: java/org/jboss/dna/graph/connector and 1 other directories.
by dna-commits@lists.jboss.org
Author: bcarothers
Date: 2009-12-31 17:41:01 -0500 (Thu, 31 Dec 2009)
New Revision: 1509
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/connector/RepositoryConnectionPool.java
trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties
Log:
DNA-608 RepositoryConnectionPool.ConnectionWrapper.close Closes the Underlying Connection
Applied a patch that returns the closed flag and the related checks to the ConnectionWrapper class.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java 2009-12-31 21:36:01 UTC (rev 1508)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java 2009-12-31 22:41:01 UTC (rev 1509)
@@ -33,6 +33,7 @@
*/
public final class GraphI18n {
+ public static I18n closedConnectionMayNotBeUsed;
public static I18n errorConvertingIo;
public static I18n errorConvertingType;
public static I18n errorReadingPropertyValueBytes;
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/connector/RepositoryConnectionPool.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/connector/RepositoryConnectionPool.java 2009-12-31 21:36:01 UTC (rev 1508)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/connector/RepositoryConnectionPool.java 2009-12-31 22:41:01 UTC (rev 1509)
@@ -142,7 +142,7 @@
/**
* Flag specifying whether a connection should be validated before returning it from the {@link #getConnection()} method.
*/
- private final AtomicBoolean validateConnectionBeforeUse = new AtomicBoolean(true);
+ private final AtomicBoolean validateConnectionBeforeUse = new AtomicBoolean(false);
/**
* The time in nanoseconds that ping should wait before timing out and failing.
@@ -779,10 +779,8 @@
mainLock.lock();
// Remove the connection from the in-use set ...
boolean removed = this.inUseConnections.remove(wrapper);
+ assert removed;
- // This means that the wrapper was already closed at least once since the last time it was opened
- if (!removed) return;
-
// If we're shutting down the pool, then just close the connection ...
if (this.runState != RUNNING) {
wrapperToClose = wrapper;
@@ -934,6 +932,7 @@
private final RepositoryConnection original;
private final long timeCreated;
private long lastUsed;
+ private boolean closed = false;
protected ConnectionWrapper( RepositoryConnection connection ) {
assert connection != null;
@@ -973,6 +972,7 @@
* {@inheritDoc}
*/
public XAResource getXAResource() {
+ if (closed) throw new IllegalStateException(GraphI18n.closedConnectionMayNotBeUsed.text());
return this.original.getXAResource();
}
@@ -980,6 +980,7 @@
* {@inheritDoc}
*/
public CachePolicy getDefaultCachePolicy() {
+ if (closed) throw new IllegalStateException(GraphI18n.closedConnectionMayNotBeUsed.text());
return this.original.getDefaultCachePolicy();
}
@@ -991,6 +992,7 @@
*/
public void execute( ExecutionContext context,
Request request ) throws RepositorySourceException {
+ if (closed) throw new IllegalStateException(GraphI18n.closedConnectionMayNotBeUsed.text());
this.original.execute(context, request);
}
@@ -999,6 +1001,7 @@
*/
public boolean ping( long time,
TimeUnit unit ) throws InterruptedException {
+ if (closed) throw new IllegalStateException(GraphI18n.closedConnectionMayNotBeUsed.text());
return this.original.ping(time, unit);
}
@@ -1006,8 +1009,11 @@
* {@inheritDoc}
*/
public void close() {
- this.lastUsed = System.currentTimeMillis();
- returnConnection(this);
+ if (!closed) {
+ this.lastUsed = System.currentTimeMillis();
+ this.closed = true;
+ returnConnection(this);
+ }
}
}
Modified: trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties
===================================================================
--- trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties 2009-12-31 21:36:01 UTC (rev 1508)
+++ trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties 2009-12-31 22:41:01 UTC (rev 1509)
@@ -21,6 +21,7 @@
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA, or see the FSF site: http://www.fsf.org.
#
+closedConnectionMayNotBeUsed = The connection has been closed and may not be used
errorConvertingIo = Error converting {0} to a {1}
errorConvertingType = Error converting {0} to a {1}: {2}
errorReadingPropertyValueBytes = Error reading bytes
14 years, 3 months
DNA SVN: r1508 - in trunk: dna-jcr/src/main/java/org/jboss/dna/jcr/xpath and 6 other directories.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2009-12-31 16:36:01 -0500 (Thu, 31 Dec 2009)
New Revision: 1508
Added:
trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/query/HasValueQuery.java
Modified:
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrQueryManager.java
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/RepositoryQueryManager.java
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslator.java
trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java
trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java
trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslatorTest.java
trunk/dna-jcr/src/test/resources/tck/repositoryForTckTests.xml
trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/AbstractLuceneSearchEngine.java
trunk/extensions/dna-search-lucene/src/test/java/org/jboss/dna/search/lucene/LuceneSearchEngineTest.java
Log:
DNA-468 This commit includes several fixes for the XPath query functionality, some fixes for several tests, and some new unit tests. At this point there are still some outstanding issues with certain XPath tests, but these are being tracked under DNA-612 and DNA-613.
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrQueryManager.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrQueryManager.java 2009-12-31 18:53:06 UTC (rev 1507)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrQueryManager.java 2009-12-31 21:36:01 UTC (rev 1508)
@@ -297,6 +297,10 @@
this.variables = null;
}
+ protected QueryCommand getCommand() {
+ return query;
+ }
+
/**
* {@inheritDoc}
*
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/RepositoryQueryManager.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/RepositoryQueryManager.java 2009-12-31 18:53:06 UTC (rev 1507)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/RepositoryQueryManager.java 2009-12-31 21:36:01 UTC (rev 1508)
@@ -26,6 +26,7 @@
import java.io.File;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import javax.jcr.RepositoryException;
@@ -287,6 +288,8 @@
};
this.queryEngine = new QueryEngine(planner, optimizer, processor);
+ // Index any existing content ...
+ reindexContent();
}
protected void process( Changes changes ) {
@@ -321,11 +324,13 @@
*/
@Override
public void reindexContent() {
- // Index the existing content ...
- Graph graph = Graph.create(sourceName, connectionFactory, context);
+ // Get the workspace names ...
+ Set<String> workspaces = Graph.create(sourceName, connectionFactory, context).getWorkspaces();
+
+ // Index the existing content (this obtains a connection and possibly locks the source) ...
SearchEngineIndexer indexer = new SearchEngineIndexer(context, searchEngine, connectionFactory);
try {
- for (String workspace : graph.getWorkspaces()) {
+ for (String workspace : workspaces) {
indexer.index(workspace);
}
} finally {
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslator.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslator.java 2009-12-31 18:53:06 UTC (rev 1507)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslator.java 2009-12-31 21:36:01 UTC (rev 1508)
@@ -30,7 +30,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.property.PropertyType;
import org.jboss.dna.graph.query.QueryBuilder;
import org.jboss.dna.graph.query.QueryBuilder.ConstraintBuilder;
@@ -38,7 +37,6 @@
import org.jboss.dna.graph.query.model.QueryCommand;
import org.jboss.dna.graph.query.model.TypeSystem;
import org.jboss.dna.graph.query.parse.InvalidQueryException;
-import org.jboss.dna.jcr.JcrNtLexicon;
import org.jboss.dna.jcr.xpath.XPath.And;
import org.jboss.dna.jcr.xpath.XPath.AttributeNameTest;
import org.jboss.dna.jcr.xpath.XPath.AxisStep;
@@ -153,12 +151,12 @@
AxisStep axis = (AxisStep)step;
NodeTest nodeTest = axis.getNodeTest();
if (nodeTest instanceof NameTest) {
- if (appliesToPathConstraint(axis.getPredicates())) {
- // Can go into the path constraint ...
+ NameTest nameTest = (NameTest)nodeTest;
+ if (!nameTest.isWildcard()) {
path.add(step);
- } else {
+ }
+ if (!appliesToPathConstraint(axis.getPredicates())) {
// The constraints are more complicated, so we need to define a new source/table ...
- // path.add(step);
tableName = translateSource(tableName, path, where);
translatePredicates(axis.getPredicates(), tableName, where);
path.clear();
@@ -302,19 +300,18 @@
ConstraintBuilder where ) {
if (path.size() == 0) {
// This is a query against the root node ...
- ExecutionContext context = new ExecutionContext();
String alias = newAlias();
- builder.from(JcrNtLexicon.BASE.getString(context.getNamespaceRegistry()) + " AS " + alias);
+ builder.from("nt:base AS " + alias);
where.path(alias).isEqualTo("/");
return alias;
}
String alias = newAlias();
if (tableName != null) {
// This is after some element(...) steps, so we need to join ...
- builder.joinAllNodesAs(alias);
+ builder.join("nt:base AS " + alias);
} else {
// This is the only part of the query ...
- builder.fromAllNodesAs(alias);
+ builder.from("nt:base AS " + alias);
}
tableName = alias;
if (path.size() == 1 && path.get(0).collapse() instanceof NameTest) {
@@ -341,7 +338,7 @@
NameTest typeName = elementTest.getTypeName();
if (typeName.isWildcard()) {
tableName = newAlias();
- builder.fromAllNodesAs(tableName);
+ builder.from("nt:base AS " + tableName);
} else {
if (typeName.getLocalTest() == null) {
throw new InvalidQueryException(
@@ -417,7 +414,7 @@
// This adds the criteria that the child node exists ...
NameTest childName = (NameTest)predicate;
String alias = newAlias();
- builder.joinAllNodesAs(alias).onChildNode(tableName, alias);
+ builder.join("nt:base AS " + alias).onChildNode(tableName, alias);
if (!childName.isWildcard()) where.nodeName(alias).isEqualTo(nameFrom(childName));
tableName = alias;
} else if (predicate instanceof Comparison) {
@@ -535,7 +532,7 @@
} else if (param1 instanceof NameTest) {
// refers to child node, so we need to add a join ...
String alias = newAlias();
- builder.joinAllNodesAs(alias).onChildNode(tableName, alias);
+ builder.join("nt:base AS " + alias).onChildNode(tableName, alias);
// Now add the criteria ...
where.search(alias, value);
tableName = alias;
@@ -583,7 +580,7 @@
// Special case where this is similar to '[a/@id]'
NameTest childName = (NameTest)firstStep;
String alias = newAlias();
- builder.joinAllNodesAs(alias).onChildNode(tableName, alias);
+ builder.join("nt:base AS " + alias).onChildNode(tableName, alias);
if (!childName.isWildcard()) {
where.nodeName(alias).isEqualTo(nameFrom(childName));
}
@@ -592,12 +589,12 @@
if (firstStep instanceof DescendantOrSelf) {
// Special case where this is similar to '[a/@id]'
String alias = newAlias();
- builder.joinAllNodesAs(alias).onDescendant(tableName, alias);
+ builder.join("nt:base AS " + alias).onDescendant(tableName, alias);
return translatePredicate(new PathExpression(true, steps.subList(1, steps.size())), alias, where);
}
// Add the join ...
String alias = newAlias();
- builder.joinAllNodesAs(alias).onDescendant(tableName, alias);
+ builder.join("nt:base AS " + alias).onDescendant(tableName, alias);
// Now add the criteria ...
translatePathExpressionConstraint(pathExpr, where, alias);
} else {
Modified: trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java
===================================================================
--- trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java 2009-12-31 18:53:06 UTC (rev 1507)
+++ trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java 2009-12-31 21:36:01 UTC (rev 1508)
@@ -44,6 +44,7 @@
import org.jboss.dna.graph.connector.inmemory.InMemoryRepositorySource;
import org.jboss.dna.graph.property.Name;
import org.jboss.dna.graph.property.Path.Segment;
+import org.jboss.dna.jcr.JcrQueryManager.JcrQuery;
import org.jboss.dna.jcr.JcrRepository.Option;
import org.jboss.dna.jcr.JcrRepository.QueryLanguage;
import org.junit.After;
@@ -79,6 +80,7 @@
private static JcrEngine engine;
private static JcrRepository repository;
private Session session;
+ private boolean print;
@BeforeClass
public static void beforeAll() throws Exception {
@@ -110,6 +112,13 @@
stream.close();
}
+ // Create a branch that contains some same-name-siblings ...
+ Node other = session.getRootNode().addNode("Other", "nt:unstructured");
+ other.addNode("NodeA", "nt:unstructured");
+ other.addNode("NodeA", "nt:unstructured");
+ other.addNode("NodeA", "nt:unstructured");
+ session.save();
+
// Prime creating a first XPath query and SQL query ...
session.getWorkspace().getQueryManager().createQuery("//element(*,nt:unstructured)", Query.XPATH);
session.getWorkspace().getQueryManager().createQuery("SELECT * FROM [nt:base]", JcrRepository.QueryLanguage.SQL);
@@ -131,6 +140,7 @@
@Before
public void beforeEach() throws Exception {
+ print = false;
// Obtain a session using the anonymous login capability, which we granted READ privilege
session = repository.login();
}
@@ -162,6 +172,21 @@
return result;
}
+ protected void assertResults( Query query,
+ QueryResult result,
+ long numberOfResults ) throws RepositoryException {
+ assertThat(query, is(notNullValue()));
+ assertThat(result, is(notNullValue()));
+ if (print) {
+ System.out.println();
+ System.out.println(query.getLanguage() + ": " + query.getStatement());
+ System.out.println(" --> : " + ((JcrQuery)query).getCommand());
+ System.out.println(result);
+ }
+ assertThat(result.getNodes().getSize(), is(numberOfResults));
+ assertThat(result.getRows().getSize(), is(numberOfResults));
+ }
+
protected void assertResultsHaveColumns( QueryResult result,
String... columnNames ) throws RepositoryException {
Set<String> expectedNames = new HashSet<String>();
@@ -187,7 +212,7 @@
assertThat(node.hasNode("Sports"), is(true));
assertThat(node.hasNode("Utility"), is(true));
assertThat(node.hasNode("Hybrid"), is(true));
- System.out.println(node.getNode("Hybrid").getNodes().nextNode().getPath());
+ // System.out.println(node.getNode("Hybrid").getNodes().nextNode().getPath());
assertThat(node.hasNode("Hybrid/Toyota Prius"), is(true));
assertThat(node.getPrimaryNodeType().getName(), is("nt:unstructured"));
}
@@ -207,8 +232,8 @@
assertThat(query, is(notNullValue()));
QueryResult result = query.execute();
assertThat(result, is(notNullValue()));
+ assertResults(query, result, 22);
assertResultsHaveColumns(result, "jcr:primaryType");
- // System.out.println(result);
}
// ----------------------------------------------------------------------------------------------------------------
@@ -219,6 +244,7 @@
public void shouldBeAbleToCreateXPathQuery() throws RepositoryException {
Query query = session.getWorkspace().getQueryManager().createQuery("//element(*,nt:unstructured)", Query.XPATH);
assertThat(query, is(notNullValue()));
+ assertResults(query, query.execute(), 21);
}
@Test
@@ -226,7 +252,7 @@
Query query = session.getWorkspace().getQueryManager().createQuery("//element(*,nt:base)", Query.XPATH);
assertThat(query, is(notNullValue()));
QueryResult result = query.execute();
- assertThat(result, is(notNullValue()));
+ assertResults(query, result, 22);
assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
}
@@ -235,17 +261,39 @@
Query query = session.getWorkspace().getQueryManager().createQuery("//element(*,nt:unstructured)", Query.XPATH);
assertThat(query, is(notNullValue()));
QueryResult result = query.execute();
+ assertResults(query, result, 21);
assertThat(result, is(notNullValue()));
assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
}
@Test
+ public void shouldBeAbleToExecuteXPathQueryToFindSameNameSiblingsByIndex() throws RepositoryException {
+ Query query = session.getWorkspace().getQueryManager().createQuery("/jcr:root/Other/NodeA", Query.XPATH);
+ assertThat(query, is(notNullValue()));
+ QueryResult result = query.execute();
+ print = true;
+ assertResults(query, result, 1);
+ assertThat(result, is(notNullValue()));
+ assertThat(result.getNodes().nextNode().getIndex(), is(1));
+ assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
+
+ query = session.getWorkspace().getQueryManager().createQuery("/jcr:root/Other/NodeA[2]", Query.XPATH);
+ assertThat(query, is(notNullValue()));
+ result = query.execute();
+ print = true;
+ assertResults(query, result, 1);
+ assertThat(result, is(notNullValue()));
+ assertThat(result.getNodes().nextNode().getIndex(), is(2));
+ assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
+ }
+
+ @Test
public void shouldBeAbleToExecuteXPathQueryToFindAllCarNodes() throws RepositoryException {
Query query = session.getWorkspace().getQueryManager().createQuery("//element(*,car:Car)", Query.XPATH);
assertThat(query, is(notNullValue()));
QueryResult result = query.execute();
assertThat(result, is(notNullValue()));
- // System.out.println(result);
+ assertResults(query, result, 12);
assertResultsHaveColumns(result,
"jcr:primaryType",
"jcr:path",
@@ -268,18 +316,27 @@
Query query = session.getWorkspace().getQueryManager().createQuery("/jcr:root", Query.XPATH);
assertThat(query, is(notNullValue()));
QueryResult result = query.execute();
+ assertResults(query, result, 1);
+ assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
+ }
+
+ @Test
+ public void shouldBeAbleToExecuteXPathQueryToFindChildOfRootNode() throws RepositoryException {
+ Query query = session.getWorkspace().getQueryManager().createQuery("/jcr:root/Cars", Query.XPATH);
+ assertThat(query, is(notNullValue()));
+ QueryResult result = query.execute();
assertThat(result, is(notNullValue()));
- // System.out.println(result);
+ assertResults(query, result, 1);
assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
}
- // @Test
- // public void shouldBeAbleToExecuteXPathQueryToFindChildOfRootNode() throws RepositoryException {
- // Query query = session.getWorkspace().getQueryManager().createQuery("/jcr:root/Cars", Query.XPATH);
- // assertThat(query, is(notNullValue()));
- // QueryResult result = query.execute();
- // assertThat(result, is(notNullValue()));
- // System.out.println(result);
- // assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
- // }
+ @Test
+ public void shouldBeAbleToExecuteXPathQueryToFindChildOfRootNodeWithTypeCriteria() throws RepositoryException {
+ Query query = session.getWorkspace().getQueryManager().createQuery("/jcr:root/Cars[@jcr:primaryType]", Query.XPATH);
+ assertThat(query, is(notNullValue()));
+ QueryResult result = query.execute();
+ assertThat(result, is(notNullValue()));
+ assertResults(query, result, 1);
+ assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
+ }
}
Modified: trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java
===================================================================
--- trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java 2009-12-31 18:53:06 UTC (rev 1507)
+++ trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java 2009-12-31 21:36:01 UTC (rev 1508)
@@ -177,7 +177,7 @@
addTestSuite(org.apache.jackrabbit.test.api.RepositoryLoginTest.class);
// These might not all be level one tests
- // addTestSuite(org.apache.jackrabbit.test.api.query.XPathPosIndexTest.class);
+ addTestSuite(org.apache.jackrabbit.test.api.query.XPathPosIndexTest.class);
// addTestSuite(org.apache.jackrabbit.test.api.query.XPathDocOrderTest.class);
// addTestSuite(org.apache.jackrabbit.test.api.query.XPathOrderByTest.class);
addTestSuite(org.apache.jackrabbit.test.api.query.XPathJcrPathTest.class);
@@ -188,7 +188,7 @@
addTestSuite(org.apache.jackrabbit.test.api.query.GetSupportedQueryLanguagesTest.class);
addTestSuite(org.apache.jackrabbit.test.api.query.GetPropertyNamesTest.class);
addTestSuite(org.apache.jackrabbit.test.api.query.PredicatesTest.class);
- // addTestSuite(org.apache.jackrabbit.test.api.query.SimpleSelectionTest.class);
+ addTestSuite(org.apache.jackrabbit.test.api.query.SimpleSelectionTest.class);
// The tests in this suite are level one
addTest(org.apache.jackrabbit.test.api.nodetype.TestAll.suite());
Modified: trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslatorTest.java
===================================================================
--- trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslatorTest.java 2009-12-31 18:53:06 UTC (rev 1507)
+++ trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslatorTest.java 2009-12-31 21:36:01 UTC (rev 1508)
@@ -65,12 +65,12 @@
@Test
public void shouldTranslateFromXPathOfAnyNode() {
- assertThat(xpath("//element(*)"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1"));
- assertThat(xpath("/jcr:root//element(*)"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1"));
- assertThat(xpath("//*"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1"));
- assertThat(xpath("/jcr:root//*"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1"));
- assertThat(xpath("//."), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1"));
- assertThat(xpath("/jcr:root//."), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1"));
+ assertThat(xpath("//element(*)"), isSql("SELECT * FROM [nt:base] AS nodeSet1"));
+ assertThat(xpath("/jcr:root//element(*)"), isSql("SELECT * FROM [nt:base] AS nodeSet1"));
+ assertThat(xpath("//*"), isSql("SELECT * FROM [nt:base] AS nodeSet1"));
+ assertThat(xpath("/jcr:root//*"), isSql("SELECT * FROM [nt:base] AS nodeSet1"));
+ assertThat(xpath("//."), isSql("SELECT * FROM [nt:base] AS nodeSet1"));
+ assertThat(xpath("/jcr:root//."), isSql("SELECT * FROM [nt:base] AS nodeSet1"));
}
@Test
@@ -80,69 +80,70 @@
@Test
public void shouldTranslateFromXPathContainingExplicitPath() {
- assertThat(xpath("/jcr:root/a"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a'"));
- assertThat(xpath("/jcr:root/a/b"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b'"));
- assertThat(xpath("/jcr:root/a/b/c"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b/c'"));
- assertThat(xpath("/jcr:root/a/b/c/d"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b/c/d'"));
+ assertThat(xpath("/jcr:root/a"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a'"));
+ assertThat(xpath("/jcr:root/a/b"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b'"));
+ assertThat(xpath("/jcr:root/a/b/c"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b/c'"));
+ assertThat(xpath("/jcr:root/a/b/c/d"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b/c/d'"));
}
@Test
public void shouldTranslateFromXPathContainingExplicitPathWithChildNumbers() {
- assertThat(xpath("/jcr:root/a[2]/b"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a[2]/b'"));
- assertThat(xpath("/jcr:root/a/b[3]"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b[3]'"));
- assertThat(xpath("/jcr:root/a[2]/b[3]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a[2]/b[3]'"));
+ assertThat(xpath("/jcr:root/a[2]/b"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a[2]/b'"));
+ assertThat(xpath("/jcr:root/a/b[3]"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b[3]'"));
+ assertThat(xpath("/jcr:root/a[2]/b[3]"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a[2]/b[3]'"));
}
@Test
public void shouldTranslateFromXPathContainingExplicitPathWithWildcardChildNumbers() {
- assertThat(xpath("/jcr:root/a[*]/b"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b'"));
- assertThat(xpath("/jcr:root/a/b[*]"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b'"));
- assertThat(xpath("/jcr:root/a[*]/b[*]"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b'"));
+ assertThat(xpath("/jcr:root/a[*]/b"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b'"));
+ assertThat(xpath("/jcr:root/a/b[*]"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b'"));
+ assertThat(xpath("/jcr:root/a[*]/b[*]"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b'"));
}
@Test
public void shouldTranslateFromXPathContainingPathWithDescendantOrSelf() {
assertThat(xpath("/jcr:root/a/b//c"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b/c' OR PATH(nodeSet1) LIKE '/a/b/%/c'"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b/c' OR PATH(nodeSet1) LIKE '/a/b/%/c'"));
assertThat(xpath("/jcr:root/a/b[2]//c"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b[2]/c' OR PATH(nodeSet1) LIKE '/a/b[2]/%/c'"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b[2]/c' OR PATH(nodeSet1) LIKE '/a/b[2]/%/c'"));
assertThat(xpath("/jcr:root/a/b//c[4]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b/c[4]' OR PATH(nodeSet1) LIKE '/a/b/%/c[4]'"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b/c[4]' OR PATH(nodeSet1) LIKE '/a/b/%/c[4]'"));
assertThat(xpath("/jcr:root/a/b[2]//c[4]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b[2]/c[4]' OR PATH(nodeSet1) LIKE '/a/b[2]/%/c[4]'"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b[2]/c[4]' OR PATH(nodeSet1) LIKE '/a/b[2]/%/c[4]'"));
}
@Test
public void shouldTranslateFromXPathContainingPathWithMultipleDescendantOrSelf() {
assertThat(xpath("/jcr:root/a/b//c//d"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE (((PATH(nodeSet1) = '/a/b/c/d' OR PATH(nodeSet1) LIKE '/a/b/%/c/d') OR PATH(nodeSet1) LIKE '/a/b/c/%/d') OR PATH(nodeSet1) LIKE '/a/b/%/c/%/d')"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE (((PATH(nodeSet1) = '/a/b/c/d' OR PATH(nodeSet1) LIKE '/a/b/%/c/d') OR PATH(nodeSet1) LIKE '/a/b/c/%/d') OR PATH(nodeSet1) LIKE '/a/b/%/c/%/d')"));
}
@Test
public void shouldTranslateFromXPathContainingPredicatesUsingRelativePaths() {
assertThat(xpath("//element(*,my:type)[a/@id]"),
- isSql("SELECT * FROM [my:type] JOIN __ALLNODES__ as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) WHERE NAME(nodeSet1) = 'a' AND nodeSet1.id IS NOT NULL"));
+ isSql("SELECT * FROM [my:type] JOIN [nt:base] as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) WHERE NAME(nodeSet1) = 'a' AND nodeSet1.id IS NOT NULL"));
assertThat(xpath("//element(*,my:type)[a/b/@id]"),
- isSql("SELECT * FROM [my:type] JOIN __ALLNODES__ as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN __ALLNODES__ as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE (NAME(nodeSet1) = 'a' AND NAME(nodeSet2) = 'b') AND nodeSet2.id IS NOT NULL"));
+ isSql("SELECT * FROM [my:type] JOIN [nt:base] as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN [nt:base] as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE (NAME(nodeSet1) = 'a' AND NAME(nodeSet2) = 'b') AND nodeSet2.id IS NOT NULL"));
assertThat(xpath("//element(*,my:type)[a/b/((@id and @name) or not(@address))]"),
- isSql("SELECT * FROM [my:type] JOIN __ALLNODES__ as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN __ALLNODES__ as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE (NAME(nodeSet1) = 'a' AND NAME(nodeSet2) = 'b') AND ((nodeSet2.id IS NOT NULL and nodeSet2.name IS NOT NULL) OR (NOT(nodeSet2.address IS NOT NULL)))"));
+ isSql("SELECT * FROM [my:type] JOIN [nt:base] as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN [nt:base] as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE (NAME(nodeSet1) = 'a' AND NAME(nodeSet2) = 'b') AND ((nodeSet2.id IS NOT NULL and nodeSet2.name IS NOT NULL) OR (NOT(nodeSet2.address IS NOT NULL)))"));
assertThat(xpath("//element(*,my:type)[./a/b/((@id and @name) or not(@address))]"),
- isSql("SELECT * FROM [my:type] JOIN __ALLNODES__ as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN __ALLNODES__ as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE (NAME(nodeSet1) = 'a' AND NAME(nodeSet2) = 'b') AND ((nodeSet2.id IS NOT NULL and nodeSet2.name IS NOT NULL) OR (NOT(nodeSet2.address IS NOT NULL)))"));
+ isSql("SELECT * FROM [my:type] JOIN [nt:base] as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN [nt:base] as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE (NAME(nodeSet1) = 'a' AND NAME(nodeSet2) = 'b') AND ((nodeSet2.id IS NOT NULL and nodeSet2.name IS NOT NULL) OR (NOT(nodeSet2.address IS NOT NULL)))"));
assertThat(xpath("//element(*,my:type)[a/b/((@id and @name) or not(jcr:contains(@desc,'rock star')))]"),
- isSql("SELECT * FROM [my:type] JOIN __ALLNODES__ as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN __ALLNODES__ as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE (NAME(nodeSet1) = 'a' AND NAME(nodeSet2) = 'b') AND ((nodeSet2.id IS NOT NULL and nodeSet2.name IS NOT NULL) OR (NOT(CONTAINS(nodeSet2.desc,'rock star'))))"));
+ isSql("SELECT * FROM [my:type] JOIN [nt:base] as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN [nt:base] as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE (NAME(nodeSet1) = 'a' AND NAME(nodeSet2) = 'b') AND ((nodeSet2.id IS NOT NULL and nodeSet2.name IS NOT NULL) OR (NOT(CONTAINS(nodeSet2.desc,'rock star'))))"));
assertThat(xpath("//element(*,my:type)[*/@id]"),
- isSql("SELECT * FROM [my:type] JOIN __ALLNODES__ as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) WHERE nodeSet1.id IS NOT NULL"));
+ isSql("SELECT * FROM [my:type] JOIN [nt:base] as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) WHERE nodeSet1.id IS NOT NULL"));
assertThat(xpath("//element(*,my:type)[*/*/@id]"),
- isSql("SELECT * FROM [my:type] JOIN __ALLNODES__ as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN __ALLNODES__ as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE nodeSet2.id IS NOT NULL"));
+ isSql("SELECT * FROM [my:type] JOIN [nt:base] as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN [nt:base] as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE nodeSet2.id IS NOT NULL"));
assertThat(xpath("//element(*,my:type)[./*/*/@id]"),
- isSql("SELECT * FROM [my:type] JOIN __ALLNODES__ as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN __ALLNODES__ as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE nodeSet2.id IS NOT NULL"));
+ isSql("SELECT * FROM [my:type] JOIN [nt:base] as nodeSet1 ON ISCHILDNODE(nodeSet1,[my:type]) JOIN [nt:base] as nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE nodeSet2.id IS NOT NULL"));
assertThat(xpath("//element(*,my:type)[.//@id]"),
- isSql("SELECT * FROM [my:type] JOIN __ALLNODES__ as nodeSet1 ON ISDESCENDANTNODE(nodeSet1,[my:type]) WHERE nodeSet1.id IS NOT NULL"));
+ isSql("SELECT * FROM [my:type] JOIN [nt:base] as nodeSet1 ON ISDESCENDANTNODE(nodeSet1,[my:type]) WHERE nodeSet1.id IS NOT NULL"));
}
@Test
public void shouldTranslateFromXPathContainingPredicatesIdentifyingPropertiesThatMustHaveValues() {
+ assertThat(xpath("/jcr:root/testroot/serializationNode[@jcr:primaryType]"),
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/testroot/serializationNode' AND nodeSet1.[jcr:primaryType] IS NOT NULL"));
assertThat(xpath("//element(*,my:type)[@id]"), isSql("SELECT * FROM [my:type] WHERE id IS NOT NULL"));
assertThat(xpath("//element(*,my:type)[@id][@name]"),
isSql("SELECT * FROM [my:type] WHERE id IS NOT NULL AND name IS NOT NULL"));
@@ -191,8 +192,8 @@
assertThat(xpath("//element(*,my:type)/(@id union @name)"), isSql("SELECT id, name FROM [my:type]"));
assertThat(xpath("//element(*,my:type)/(@id union @name union @x:address)"),
isSql("SELECT id, name, [x:address] FROM [my:type]"));
- assertThat(xpath("//(@id|@name)"), isSql("SELECT nodeSet1.id, nodeSet1.name FROM __ALLNODES__ AS nodeSet1"));
- assertThat(xpath("//./(@id|@name)"), isSql("SELECT nodeSet1.id, nodeSet1.name FROM __ALLNODES__ AS nodeSet1"));
+ assertThat(xpath("//(@id|@name)"), isSql("SELECT nodeSet1.id, nodeSet1.name FROM [nt:base] AS nodeSet1"));
+ assertThat(xpath("//./(@id|@name)"), isSql("SELECT nodeSet1.id, nodeSet1.name FROM [nt:base] AS nodeSet1"));
}
@Test
@@ -207,68 +208,64 @@
@Test
public void shouldTranslateFromXPathOfAnyNodeWithName() {
- assertThat(xpath("//element(nodeName,*)"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName'"));
+ assertThat(xpath("//element(nodeName,*)"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName'"));
- assertThat(xpath("//element(nodeName,*)"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName'"));
+ assertThat(xpath("//element(nodeName,*)"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName'"));
- assertThat(xpath("//nodeName"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName'"));
+ assertThat(xpath("//nodeName"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName'"));
assertThat(xpath("/jcr:root//element(nodeName,*)"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName'"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName'"));
- assertThat(xpath("/jcr:root//nodeName"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName'"));
+ assertThat(xpath("/jcr:root//nodeName"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName'"));
}
@Test
public void shouldTranslateFromXPathOfNodeWithNameUnderRoot() {
assertThat(xpath("/jcr:root/element(nodeName,*)"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName' AND DEPTH(nodeSet1) = CAST(1 AS LONG)"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName' AND DEPTH(nodeSet1) = CAST(1 AS LONG)"));
- assertThat(xpath("/jcr:root/nodeName"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/nodeName'"));
+ assertThat(xpath("/jcr:root/nodeName"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/nodeName'"));
- assertThat(xpath("nodeName"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/nodeName'"));
+ assertThat(xpath("nodeName"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/nodeName'"));
}
@Test
public void shouldTranslateFromXPathOfAnyNodeUsingPredicate() {
assertThat(xpath("//.[jcr:contains(.,'bar')]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE CONTAINS(nodeSet1.*,'bar')"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE CONTAINS(nodeSet1.*,'bar')"));
assertThat(xpath("//.[jcr:contains(a,'bar')]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 JOIN __ALLNODES__ AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE CONTAINS(nodeSet2.*,'bar')"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 JOIN [nt:base] AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE CONTAINS(nodeSet2.*,'bar')"));
assertThat(xpath("//*[jcr:contains(.,'bar')]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE CONTAINS(nodeSet1.*,'bar')"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE CONTAINS(nodeSet1.*,'bar')"));
assertThat(xpath("//*[jcr:contains(a,'bar')]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 JOIN __ALLNODES__ AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE CONTAINS(nodeSet2.*,'bar')"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 JOIN [nt:base] AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE CONTAINS(nodeSet2.*,'bar')"));
assertThat(xpath("//*[jcr:contains(a/@b,'bar')]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 JOIN __ALLNODES__ AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet2.b,'bar')"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 JOIN [nt:base] AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet2.b,'bar')"));
assertThat(xpath("//*[jcr:contains(a/*/@b,'bar')]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 JOIN __ALLNODES__ AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) JOIN __ALLNODES__ AS nodeSet3 ON ISCHILDNODE(nodeSet3,nodeSet2) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet3.b,'bar')"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 JOIN [nt:base] AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) JOIN [nt:base] AS nodeSet3 ON ISCHILDNODE(nodeSet3,nodeSet2) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet3.b,'bar')"));
assertThat(xpath("/jcr:root//element(*)[jcr:contains(a/@b,'bar')]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 JOIN __ALLNODES__ AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet2.b,'bar')"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 JOIN [nt:base] AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet2.b,'bar')"));
assertThat(xpath("/jcr:root//element(*)[jcr:contains(a/*/@b,'bar')]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 JOIN __ALLNODES__ AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) JOIN __ALLNODES__ AS nodeSet3 ON ISCHILDNODE(nodeSet3,nodeSet2) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet3.b,'bar')"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 JOIN [nt:base] AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) JOIN [nt:base] AS nodeSet3 ON ISCHILDNODE(nodeSet3,nodeSet2) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet3.b,'bar')"));
assertThat(xpath("/jcr:root//*[jcr:contains(a/@b,'bar')]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 JOIN __ALLNODES__ AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet2.b,'bar')"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 JOIN [nt:base] AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet2.b,'bar')"));
assertThat(xpath("/jcr:root//*[jcr:contains(a/*/@b,'bar')]"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 JOIN __ALLNODES__ AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) JOIN __ALLNODES__ AS nodeSet3 ON ISCHILDNODE(nodeSet3,nodeSet2) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet3.b,'bar')"));
+ isSql("SELECT * FROM [nt:base] AS nodeSet1 JOIN [nt:base] AS nodeSet2 ON ISCHILDNODE(nodeSet2,nodeSet1) JOIN [nt:base] AS nodeSet3 ON ISCHILDNODE(nodeSet3,nodeSet2) WHERE NAME(nodeSet2) = 'a' AND CONTAINS(nodeSet3.b,'bar')"));
}
- // @Test
- // public void shouldParseXPathExpressions() {
- // assertParsable("/jcr:root/a/b/c");
- // assertParsable("/jcr:root/a/b/c[*]");
- // assertParsable("/jcr:root/some[1]/element(nodes, my:type)[1]");
- // assertParsable("//element(*,my:type)");
- // assertParsable("//element(*,my:type)[@jcr:title='something' and @globalProperty='something else']");
- // assertParsable("//element(*,my:type)[@jcr:title | @globalProperty]");
- // assertParsable("//element(*, my:type) order by @my:title");
- // assertParsable("//element(*, my:type) [jcr:contains(., 'jcr')] order by jcr:score() descending");
- // assertParsable("//element(*, employee)[@secretary and @assistant]");
- // }
+ @Test
+ public void shouldParseXPathExpressions() {
+ xpath("/jcr:root/a/b/c");
+ xpath("/jcr:root/a/b/c[*]");
+ xpath("/jcr:root/some[1]/element(nodes, my:type)[1]");
+ xpath("//element(*,my:type)");
+ xpath("//element(*,my:type)[@jcr:title='something' and @globalProperty='something else']");
+ xpath("//element(*,my:type)[@jcr:title | @globalProperty]");
+ xpath("//element(*, my:type) order by @my:title");
+ xpath("//element(*, my:type) [jcr:contains(., 'jcr')] order by jcr:score() descending");
+ xpath("//element(*, employee)[@secretary and @assistant]");
+ }
// ----------------------------------------------------------------------------------------------------------------
// utility methods
Modified: trunk/dna-jcr/src/test/resources/tck/repositoryForTckTests.xml
===================================================================
--- trunk/dna-jcr/src/test/resources/tck/repositoryForTckTests.xml 2009-12-31 18:53:06 UTC (rev 1507)
+++ trunk/dna-jcr/src/test/resources/tck/repositoryForTckTests.xml 2009-12-31 21:36:01 UTC (rev 1508)
@@ -49,4 +49,13 @@
Cannot test \r character here on Windows as TCK XML Parser will replace it with \n and fail the comparison
-->
<nt:unstructured jcr:name="node4" multi-line-property="Line	1
Line 2"/>
+ <!--
+ XPathPosIndexTest requires multiple nodes named 'node1'
+ -->
+ <nt:unstructured jcr:name="node1" prop1="<foo&foo>">
+ <nt:unstructured jcr:name="jcr:xmltext" jcr:xmlcharacters="This is the text for node1[2]!" />
+ </nt:unstructured>
+ <nt:unstructured jcr:name="node1" prop1="<foo&foo>">
+ <nt:unstructured jcr:name="jcr:xmltext" jcr:xmlcharacters="This is the text for node1[3]!" />
+ </nt:unstructured>
</testroot>
\ No newline at end of file
Modified: trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/AbstractLuceneSearchEngine.java
===================================================================
--- trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/AbstractLuceneSearchEngine.java 2009-12-31 18:53:06 UTC (rev 1507)
+++ trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/AbstractLuceneSearchEngine.java 2009-12-31 21:36:01 UTC (rev 1508)
@@ -25,6 +25,7 @@
import java.io.IOException;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
@@ -87,7 +88,6 @@
import org.jboss.dna.graph.query.model.PropertyExistence;
import org.jboss.dna.graph.query.model.PropertyValue;
import org.jboss.dna.graph.query.model.SameNode;
-import org.jboss.dna.graph.query.model.SelectorName;
import org.jboss.dna.graph.query.model.SetCriteria;
import org.jboss.dna.graph.query.model.StaticOperand;
import org.jboss.dna.graph.query.model.TypeSystem;
@@ -103,6 +103,8 @@
import org.jboss.dna.graph.search.SearchEngine;
import org.jboss.dna.graph.search.SearchEngineProcessor;
import org.jboss.dna.graph.search.SearchEngineWorkspace;
+import org.jboss.dna.search.lucene.query.HasValueQuery;
+import org.jboss.dna.search.lucene.query.MatchNoneQuery;
/**
* An abstract {@link SearchEngine} implementation that is set up to use the Lucene library. This provides an abstract
@@ -349,11 +351,22 @@
Query pushDownQuery = null;
Constraint postProcessConstraint = null;
try {
+ QueryFactory queryFactory = null;
for (Constraint andedConstraint : request.andedConstraints()) {
// Determine if it can be represented as a Lucene query ...
assert andedConstraint != null;
- Query constraintQuery = queryFactory(session, request.variables()).createQuery(andedConstraint);
+ if (queryFactory == null) queryFactory = queryFactory(session, request.variables());
+ Query constraintQuery = queryFactory.createQuery(andedConstraint);
if (constraintQuery != null) {
+ if (constraintQuery instanceof MatchAllDocsQuery) {
+ // This constraint includes all values, so we can just skip it ...
+ continue;
+ }
+ if (constraintQuery instanceof MatchNoneQuery) {
+ // This constraint invalidates all of the other AND-ed constraints ...
+ pushDownQuery = constraintQuery;
+ break;
+ }
// The AND-ed constraint _can_ be represented as a push-down Lucene query ...
if (pushDownQuery == null) {
// This must be the first query ...
@@ -398,47 +411,54 @@
// Get the results from Lucene ...
List<Object[]> tuples = null;
final Columns columns = request.resultColumns();
- try {
- // Execute the query against the content indexes ...
- IndexSearcher searcher = session.getContentSearcher();
- TupleCollector collector = session.createTupleCollector(columns);
- searcher.search(pushDownQuery, collector);
- tuples = collector.getTuples();
- } catch (IOException e) {
- // There was a problem executing the Lucene query ...
- request.setError(e);
- return;
+ if (pushDownQuery instanceof MatchNoneQuery) {
+ // There are no results ...
+ tuples = Collections.emptyList();
+ } else {
+ try {
+ // Execute the query against the content indexes ...
+ IndexSearcher searcher = session.getContentSearcher();
+ TupleCollector collector = session.createTupleCollector(columns);
+ searcher.search(pushDownQuery, collector);
+ tuples = collector.getTuples();
+ } catch (IOException e) {
+ // There was a problem executing the Lucene query ...
+ request.setError(e);
+ return;
+ }
}
- if (postProcessConstraint != null && !tuples.isEmpty()) {
- // Create a delegate processing component that will return the tuples we've already found ...
- final List<Object[]> allTuples = tuples;
- QueryContext queryContext = new QueryContext(request.schemata(), typeSystem, null, new SimpleProblems(),
- request.variables());
- ProcessingComponent tuplesProcessor = new ProcessingComponent(queryContext, columns) {
- @Override
- public List<Object[]> execute() {
- return allTuples;
- }
- };
- // Create a processing component that will apply these constraints to the tuples we already found ...
- SelectComponent selector = new SelectComponent(tuplesProcessor, postProcessConstraint, request.variables());
- tuples = selector.execute();
- }
+ if (!tuples.isEmpty()) {
+ if (postProcessConstraint != null) {
+ // Create a delegate processing component that will return the tuples we've already found ...
+ final List<Object[]> allTuples = tuples;
+ QueryContext queryContext = new QueryContext(request.schemata(), typeSystem, null, new SimpleProblems(),
+ request.variables());
+ ProcessingComponent tuplesProcessor = new ProcessingComponent(queryContext, columns) {
+ @Override
+ public List<Object[]> execute() {
+ return allTuples;
+ }
+ };
+ // Create a processing component that will apply these constraints to the tuples we already found ...
+ SelectComponent selector = new SelectComponent(tuplesProcessor, postProcessConstraint, request.variables());
+ tuples = selector.execute();
+ }
- // Limit the tuples ...
- Limit limit = request.limit();
- if (!limit.isUnlimited()) {
- int firstIndex = limit.getOffset();
- int maxRows = Math.min(tuples.size(), limit.getRowLimit());
- if (firstIndex > 0) {
- if (firstIndex > tuples.size()) {
- tuples.clear();
+ // Limit the tuples ...
+ Limit limit = request.limit();
+ if (!limit.isUnlimited()) {
+ int firstIndex = limit.getOffset();
+ int maxRows = Math.min(tuples.size(), limit.getRowLimit());
+ if (firstIndex > 0) {
+ if (firstIndex > tuples.size()) {
+ tuples.clear();
+ } else {
+ tuples = tuples.subList(firstIndex, maxRows);
+ }
} else {
- tuples = tuples.subList(firstIndex, maxRows);
+ tuples = tuples.subList(0, maxRows);
}
- } else {
- tuples = tuples.subList(0, maxRows);
}
}
@@ -518,7 +538,7 @@
}
if (constraint instanceof PropertyExistence) {
PropertyExistence existence = (PropertyExistence)constraint;
- return createQuery(existence.getSelectorName(), existence.getPropertyName());
+ return createQuery(existence);
}
if (constraint instanceof Between) {
Between between = (Between)constraint;
@@ -680,10 +700,13 @@
return valueFactory.create(stringFactory.create(value).toLowerCase());
}
- public Query createQuery( SelectorName selectorName,
- String propertyName ) {
- Term term = new Term(fieldNameFor(propertyName));
- return new TermQuery(term);
+ public Query createQuery( PropertyExistence existence ) {
+ String propertyName = existence.getPropertyName();
+ if ("jcr:primaryType".equals(propertyName)) {
+ // All nodes have a primary type, so therefore we can match all documents ...
+ return new MatchAllDocsQuery();
+ }
+ return new HasValueQuery(fieldNameFor(propertyName));
}
public Query createQuery( String fieldName,
Added: trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/query/HasValueQuery.java
===================================================================
--- trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/query/HasValueQuery.java (rev 0)
+++ trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/query/HasValueQuery.java 2009-12-31 21:36:01 UTC (rev 1508)
@@ -0,0 +1,259 @@
+/*
+ * JBoss DNA (http://www.jboss.org/dna)
+ * See the COPYRIGHT.txt file distributed with this work for information
+ * regarding copyright ownership. Some portions may be licensed
+ * to Red Hat, Inc. under one or more contributor license agreements.
+ * See the AUTHORS.txt file in the distribution for a full listing of
+ * individual contributors.
+ *
+ * JBoss DNA is free software. Unless otherwise indicated, all code in JBoss DNA
+ * is licensed to you under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * JBoss DNA is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.jboss.dna.search.lucene.query;
+
+import java.io.IOException;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.FieldSelector;
+import org.apache.lucene.document.FieldSelectorResult;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.Searcher;
+import org.apache.lucene.search.Similarity;
+import org.apache.lucene.search.Weight;
+
+/**
+ * A Lucene {@link Query} implementation that is satisfied if there is at least one value for a document.
+ */
+public class HasValueQuery extends Query {
+
+ private static final long serialVersionUID = 1L;
+
+ protected final String fieldName;
+ protected final FieldSelector fieldSelector;
+
+ /**
+ * Construct a {@link Query} implementation that scores nodes according to the supplied comparator.
+ *
+ * @param fieldName the name of the document field containing the value; may not be null
+ */
+ public HasValueQuery( final String fieldName ) {
+ this.fieldName = fieldName;
+ this.fieldSelector = new FieldSelector() {
+ private static final long serialVersionUID = 1L;
+
+ public FieldSelectorResult accept( String fieldName ) {
+ return HasValueQuery.this.fieldName.equals(fieldName) ? FieldSelectorResult.LOAD_AND_BREAK : FieldSelectorResult.NO_LOAD;
+ }
+ };
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.Query#clone()
+ */
+ @Override
+ public Object clone() {
+ return new HasValueQuery(fieldName);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.Query#createWeight(org.apache.lucene.search.Searcher)
+ */
+ @Override
+ public Weight createWeight( Searcher searcher ) {
+ return new ExistsWeight(searcher);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.Query#toString(java.lang.String)
+ */
+ @Override
+ public String toString( String field ) {
+ return fieldName + " exists";
+ }
+
+ protected boolean hasValue( IndexReader reader,
+ int docId ) throws IOException {
+ Document doc = reader.document(docId, fieldSelector);
+ String valueString = doc.get(fieldName);
+ return valueString != null;
+ }
+
+ /**
+ * Calculates query weights and builds query scores for our NOT queries.
+ */
+ protected class ExistsWeight extends Weight {
+ private static final long serialVersionUID = 1L;
+ private final Searcher searcher;
+
+ protected ExistsWeight( Searcher searcher ) {
+ this.searcher = searcher;
+ assert this.searcher != null;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.Weight#getQuery()
+ */
+ @Override
+ public Query getQuery() {
+ return HasValueQuery.this;
+ }
+
+ /**
+ * {@inheritDoc}
+ * <p>
+ * This implementation always returns a weight factor of 1.0.
+ * </p>
+ *
+ * @see org.apache.lucene.search.Weight#getValue()
+ */
+ @Override
+ public float getValue() {
+ return 1.0f; // weight factor of 1.0
+ }
+
+ /**
+ * {@inheritDoc}
+ * <p>
+ * This implementation always returns a normalization factor of 1.0.
+ * </p>
+ *
+ * @see org.apache.lucene.search.Weight#sumOfSquaredWeights()
+ */
+ @Override
+ public float sumOfSquaredWeights() {
+ return 1.0f; // normalization factor of 1.0
+ }
+
+ /**
+ * {@inheritDoc}
+ * <p>
+ * This implementation always does nothing, as there is nothing to normalize.
+ * </p>
+ *
+ * @see org.apache.lucene.search.Weight#normalize(float)
+ */
+ @Override
+ public void normalize( float norm ) {
+ // No need to do anything here
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.Weight#scorer(org.apache.lucene.index.IndexReader, boolean, boolean)
+ */
+ @Override
+ public Scorer scorer( IndexReader reader,
+ boolean scoreDocsInOrder,
+ boolean topScorer ) {
+ // Return a custom scorer ...
+ return new ExistsScorer(reader);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.Weight#explain(org.apache.lucene.index.IndexReader, int)
+ */
+ @Override
+ public Explanation explain( IndexReader reader,
+ int doc ) {
+ return new Explanation(getValue(), getQuery().toString());
+ }
+ }
+
+ /**
+ * A scorer for the Path query.
+ */
+ protected class ExistsScorer extends Scorer {
+ private int docId = -1;
+ private final int maxDocId;
+ private final IndexReader reader;
+
+ protected ExistsScorer( IndexReader reader ) {
+ // We don't care which Similarity we have, because we don't use it. So get the default.
+ super(Similarity.getDefault());
+ this.reader = reader;
+ assert this.reader != null;
+ this.maxDocId = this.reader.maxDoc() - 1;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.DocIdSetIterator#docID()
+ */
+ @Override
+ public int docID() {
+ return docId;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.DocIdSetIterator#nextDoc()
+ */
+ @Override
+ public int nextDoc() throws IOException {
+ do {
+ ++docId;
+ if (reader.isDeleted(docId)) {
+ // We should skip this document ...
+ continue;
+ }
+ if (hasValue(reader, docId)) return docId;
+ } while (docId < maxDocId);
+ return Scorer.NO_MORE_DOCS;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.DocIdSetIterator#advance(int)
+ */
+ @Override
+ public int advance( int target ) throws IOException {
+ if (target == Scorer.NO_MORE_DOCS) return target;
+ while (true) {
+ int doc = nextDoc();
+ if (doc >= target) return doc;
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ * <p>
+ * This method always returns a score of 1.0 for the current document, since only those documents that satisfy the NOT are
+ * scored by this scorer.
+ * </p>
+ *
+ * @see org.apache.lucene.search.Scorer#score()
+ */
+ @Override
+ public float score() {
+ return 1.0f;
+ }
+ }
+}
Property changes on: trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/query/HasValueQuery.java
___________________________________________________________________
Name: svn:keywords
+ Id Revision
Name: svn:eol-style
+ LF
Modified: trunk/extensions/dna-search-lucene/src/test/java/org/jboss/dna/search/lucene/LuceneSearchEngineTest.java
===================================================================
--- trunk/extensions/dna-search-lucene/src/test/java/org/jboss/dna/search/lucene/LuceneSearchEngineTest.java 2009-12-31 18:53:06 UTC (rev 1507)
+++ trunk/extensions/dna-search-lucene/src/test/java/org/jboss/dna/search/lucene/LuceneSearchEngineTest.java 2009-12-31 21:36:01 UTC (rev 1508)
@@ -373,6 +373,14 @@
// ----------------------------------------------------------------------------------------------------------------
@Test
+ public void shouldFindAllNodesBySimpleQuery() {
+ indexWorkspace(workspaceName1);
+ String query = "SELECT [jcr:primaryType] FROM __ALLNODES__";
+ QueryResults results = query(workspaceName1, query);
+ assertRowCount(results, 18);
+ }
+
+ @Test
public void shouldFindNodesBySimpleQuery() {
indexWorkspace(workspaceName1);
String query = "SELECT model, maker FROM __ALLNODES__";
14 years, 3 months
DNA SVN: r1507 - in trunk: extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene and 1 other directories.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2009-12-31 13:53:06 -0500 (Thu, 31 Dec 2009)
New Revision: 1507
Removed:
trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/EncodingNamespaceRegistry.java
trunk/extensions/dna-search-lucene/src/test/java/org/jboss/dna/search/lucene/EncodingNamespaceRegistryTest.java
Modified:
trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java
trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java
trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/AbstractLuceneSearchEngine.java
trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchProcessor.java
trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchSession.java
trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchWorkspace.java
Log:
DNA-468 Simplified the design of the Lucene search indexes, which improves the performance signifantly (the TCK unit tests run approximately 10% or more faster). The performance improvement should be even higher when there's a lot of content and the queries have more criteria.
Modified: trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java
===================================================================
--- trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java 2009-12-31 16:13:57 UTC (rev 1506)
+++ trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java 2009-12-31 18:53:06 UTC (rev 1507)
@@ -109,9 +109,16 @@
} finally {
stream.close();
}
+
+ // Prime creating a first XPath query and SQL query ...
+ session.getWorkspace().getQueryManager().createQuery("//element(*,nt:unstructured)", Query.XPATH);
+ session.getWorkspace().getQueryManager().createQuery("SELECT * FROM [nt:base]", JcrRepository.QueryLanguage.SQL);
} finally {
session.logout();
}
+
+ // Prime creating the schemata ...
+ repository.getRepositoryTypeManager().getRepositorySchemata();
}
@AfterClass
@@ -201,7 +208,7 @@
QueryResult result = query.execute();
assertThat(result, is(notNullValue()));
assertResultsHaveColumns(result, "jcr:primaryType");
- System.out.println(result);
+ // System.out.println(result);
}
// ----------------------------------------------------------------------------------------------------------------
@@ -238,7 +245,7 @@
assertThat(query, is(notNullValue()));
QueryResult result = query.execute();
assertThat(result, is(notNullValue()));
- System.out.println(result);
+ // System.out.println(result);
assertResultsHaveColumns(result,
"jcr:primaryType",
"jcr:path",
@@ -262,7 +269,7 @@
assertThat(query, is(notNullValue()));
QueryResult result = query.execute();
assertThat(result, is(notNullValue()));
- System.out.println(result);
+ // System.out.println(result);
assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
}
Modified: trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java
===================================================================
--- trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java 2009-12-31 16:13:57 UTC (rev 1506)
+++ trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java 2009-12-31 18:53:06 UTC (rev 1507)
@@ -288,7 +288,7 @@
// addTest(org.apache.jackrabbit.test.api.observation.TestAll.suite());
// addTest(org.apache.jackrabbit.test.api.version.TestAll.suite());
addTest(org.apache.jackrabbit.test.api.lock.TestAll.suite());
- // addTest(org.apache.jackrabbit.test.api.util.TestAll.suite());
+ addTest(org.apache.jackrabbit.test.api.util.TestAll.suite());
}
}
Modified: trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/AbstractLuceneSearchEngine.java
===================================================================
--- trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/AbstractLuceneSearchEngine.java 2009-12-31 16:13:57 UTC (rev 1506)
+++ trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/AbstractLuceneSearchEngine.java 2009-12-31 18:53:06 UTC (rev 1507)
@@ -30,7 +30,6 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import net.jcip.annotations.NotThreadSafe;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanQuery;
@@ -75,6 +74,7 @@
import org.jboss.dna.graph.query.model.FullTextSearch;
import org.jboss.dna.graph.query.model.FullTextSearchScore;
import org.jboss.dna.graph.query.model.Length;
+import org.jboss.dna.graph.query.model.Limit;
import org.jboss.dna.graph.query.model.Literal;
import org.jboss.dna.graph.query.model.LowerCase;
import org.jboss.dna.graph.query.model.NodeDepth;
@@ -425,6 +425,23 @@
SelectComponent selector = new SelectComponent(tuplesProcessor, postProcessConstraint, request.variables());
tuples = selector.execute();
}
+
+ // Limit the tuples ...
+ Limit limit = request.limit();
+ if (!limit.isUnlimited()) {
+ int firstIndex = limit.getOffset();
+ int maxRows = Math.min(tuples.size(), limit.getRowLimit());
+ if (firstIndex > 0) {
+ if (firstIndex > tuples.size()) {
+ tuples.clear();
+ } else {
+ tuples = tuples.subList(firstIndex, maxRows);
+ }
+ } else {
+ tuples = tuples.subList(0, maxRows);
+ }
+ }
+
executingNanos = System.nanoTime() - executingNanos;
Statistics stats = new Statistics(planningNanos, 0L, 0L, executingNanos);
request.setResults(tuples, stats);
@@ -753,17 +770,10 @@
*/
TupleCollector createTupleCollector( Columns columns );
- /**
- * Utility method to create a query to find all of the documents representing nodes with the supplied IDs.
- *
- * @param ids the IDs of the nodes that are to be found; may not be null
- * @return the query; never null
- * @throws IOException if there is a problem creating this query
- */
- Query findAllNodesWithIds( Set<String> ids ) throws IOException;
-
Query findAllNodesBelow( Path ancestorPath ) throws IOException;
+ Query findAllNodesAtOrBelow( Path ancestorPath ) throws IOException;
+
/**
* Return a query that can be used to find all of the documents that represent nodes that are children of the node at the
* supplied path.
@@ -845,6 +855,6 @@
*
* @return the tuples; never null
*/
- public abstract LinkedList<Object[]> getTuples();
+ public abstract List<Object[]> getTuples();
}
}
Deleted: trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/EncodingNamespaceRegistry.java
===================================================================
--- trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/EncodingNamespaceRegistry.java 2009-12-31 16:13:57 UTC (rev 1506)
+++ trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/EncodingNamespaceRegistry.java 2009-12-31 18:53:06 UTC (rev 1507)
@@ -1,233 +0,0 @@
-/*
- * JBoss DNA (http://www.jboss.org/dna)
- * See the COPYRIGHT.txt file distributed with this work for information
- * regarding copyright ownership. Some portions may be licensed
- * to Red Hat, Inc. under one or more contributor license agreements.
- * See the AUTHORS.txt file in the distribution for a full listing of
- * individual contributors.
- *
- * JBoss DNA is free software. Unless otherwise indicated, all code in JBoss DNA
- * is licensed to you under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2.1 of
- * the License, or (at your option) any later version.
- *
- * JBoss DNA is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with this software; if not, write to the Free
- * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
- * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
- */
-package org.jboss.dna.search.lucene;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-import net.jcip.annotations.GuardedBy;
-import net.jcip.annotations.ThreadSafe;
-import org.jboss.dna.common.text.NoOpEncoder;
-import org.jboss.dna.common.text.TextEncoder;
-import org.jboss.dna.graph.DnaLexicon;
-import org.jboss.dna.graph.JcrLexicon;
-import org.jboss.dna.graph.JcrMixLexicon;
-import org.jboss.dna.graph.JcrNtLexicon;
-import org.jboss.dna.graph.property.NamespaceRegistry;
-import org.jboss.dna.graph.property.basic.BasicNamespace;
-
-/**
- * A {@link NamespaceRegistry} implementation that uses encoded representations of the namespace URIs for the namespace prefixes.
- */
-@ThreadSafe
-class EncodingNamespaceRegistry implements NamespaceRegistry {
-
- public static final Set<String> DEFAULT_FIXED_NAMESPACES = Collections.unmodifiableSet(new HashSet<String>(
- Arrays.asList(new String[] {
- "",
- DnaLexicon.Namespace.URI,
- JcrLexicon.Namespace.URI,
- JcrNtLexicon.Namespace.URI,
- JcrMixLexicon.Namespace.URI})));
-
- private final NamespaceRegistry registry;
- private final TextEncoder encoder;
- private final ReadWriteLock lock = new ReentrantReadWriteLock();
- @GuardedBy( "lock" )
- private final Map<String, String> uriToEncodedPrefix = new HashMap<String, String>();
- @GuardedBy( "lock" )
- private final Map<String, String> encodedPrefixToUri = new HashMap<String, String>();
- private final Set<String> fixedNamespaceUris;
-
- /**
- * @param registry the original registry
- * @param encoder the encoder; may be null if no encoding should be used
- */
- EncodingNamespaceRegistry( NamespaceRegistry registry,
- TextEncoder encoder ) {
- this(registry, encoder, null);
- }
-
- /**
- * @param registry the original registry
- * @param encoder the encoder; may be null if no encoding should be used
- * @param fixedUris the set of URIs that is to be fixed and not encoded; or null if the default namespaces are to be fixed
- */
- EncodingNamespaceRegistry( NamespaceRegistry registry,
- TextEncoder encoder,
- Set<String> fixedUris ) {
- this.registry = registry;
- this.encoder = encoder != null ? encoder : new NoOpEncoder();
- this.fixedNamespaceUris = fixedUris != null ? Collections.unmodifiableSet(new HashSet<String>(fixedUris)) : DEFAULT_FIXED_NAMESPACES;
- assert this.registry != null;
- assert this.encoder != null;
- assert this.fixedNamespaceUris != null;
- }
-
- /**
- * @return fixedNamespaceUris
- */
- public Set<String> getFixedNamespaceUris() {
- return fixedNamespaceUris;
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.graph.property.NamespaceRegistry#getDefaultNamespaceUri()
- */
- public String getDefaultNamespaceUri() {
- return this.registry.getDefaultNamespaceUri();
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.graph.property.NamespaceRegistry#getNamespaceForPrefix(java.lang.String)
- */
- public String getNamespaceForPrefix( String prefix ) {
- // First look in the map ...
- String result = null;
- try {
- lock.readLock().lock();
- result = encodedPrefixToUri.get(prefix);
- if (result != null) return result;
- } finally {
- lock.readLock().unlock();
- }
-
- // Make sure we have encoded all the namespaces in the registry ...
- Set<Namespace> namespaces = new HashSet<Namespace>(this.registry.getNamespaces());
- Set<Namespace> encodedNamespaces = this.getNamespaces();
- namespaces.removeAll(encodedNamespaces);
- try {
- lock.writeLock().lock();
- for (Namespace namespace : namespaces) {
- String namespaceUri = namespace.getNamespaceUri();
- String encoded = fixedNamespaceUris.contains(namespaceUri) ? namespace.getPrefix() : encoder.encode(namespaceUri);
- uriToEncodedPrefix.put(namespaceUri, encoded);
- encodedPrefixToUri.put(encoded, namespaceUri);
- if (result == null && encoded.equals(prefix)) result = namespaceUri;
- }
- } finally {
- lock.writeLock().unlock();
- }
- if (result != null) return result;
-
- // There's nothing, so just delegate to the registry ...
- return this.registry.getNamespaceForPrefix(prefix);
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.graph.property.NamespaceRegistry#getRegisteredNamespaceUris()
- */
- public Set<String> getRegisteredNamespaceUris() {
- return this.registry.getRegisteredNamespaceUris();
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.graph.property.NamespaceRegistry#isRegisteredNamespaceUri(java.lang.String)
- */
- public boolean isRegisteredNamespaceUri( String namespaceUri ) {
- return this.registry.isRegisteredNamespaceUri(namespaceUri);
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.graph.property.NamespaceRegistry#getPrefixForNamespaceUri(java.lang.String, boolean)
- */
- public String getPrefixForNamespaceUri( String namespaceUri,
- boolean generateIfMissing ) {
- if (fixedNamespaceUris.contains(namespaceUri)) {
- return this.registry.getPrefixForNamespaceUri(namespaceUri, generateIfMissing);
- }
- String encoded = null;
- try {
- lock.readLock().lock();
- encoded = uriToEncodedPrefix.get(namespaceUri);
- } finally {
- lock.readLock().unlock();
- }
- if (encoded == null) {
- encoded = encoder.encode(namespaceUri);
- try {
- lock.writeLock().lock();
- uriToEncodedPrefix.put(namespaceUri, encoded);
- encodedPrefixToUri.put(encoded, namespaceUri);
- } finally {
- lock.writeLock().unlock();
- }
- }
- return encoded;
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.graph.property.NamespaceRegistry#getNamespaces()
- */
- public Set<Namespace> getNamespaces() {
- Set<Namespace> results = new HashSet<Namespace>();
- try {
- lock.readLock().lock();
- for (Map.Entry<String, String> entry : uriToEncodedPrefix.entrySet()) {
- String uri = entry.getKey();
- String prefix = entry.getValue();
- results.add(new BasicNamespace(prefix, uri));
- }
- } finally {
- lock.readLock().unlock();
- }
- return results;
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.graph.property.NamespaceRegistry#register(java.lang.String, java.lang.String)
- */
- public String register( String prefix,
- String namespaceUri ) {
- return this.registry.register(prefix, namespaceUri);
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.graph.property.NamespaceRegistry#unregister(java.lang.String)
- */
- public boolean unregister( String namespaceUri ) {
- return this.registry.unregister(namespaceUri);
- }
-}
Modified: trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchProcessor.java
===================================================================
--- trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchProcessor.java 2009-12-31 16:13:57 UTC (rev 1506)
+++ trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchProcessor.java 2009-12-31 18:53:06 UTC (rev 1507)
@@ -29,24 +29,15 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import java.util.UUID;
import net.jcip.annotations.NotThreadSafe;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.NumericField;
import org.apache.lucene.queryParser.ParseException;
-import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
-import org.jboss.dna.common.text.SecureHashTextEncoder;
-import org.jboss.dna.common.text.TextEncoder;
import org.jboss.dna.common.util.Logger;
-import org.jboss.dna.common.util.SecureHash.Algorithm;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.Location;
import org.jboss.dna.graph.observe.Observer;
import org.jboss.dna.graph.property.DateTime;
import org.jboss.dna.graph.property.Path;
-import org.jboss.dna.graph.property.Property;
import org.jboss.dna.graph.query.QueryResults.Columns;
import org.jboss.dna.graph.query.QueryResults.Statistics;
import org.jboss.dna.graph.query.process.FullTextSearchResultColumns;
@@ -69,7 +60,6 @@
import org.jboss.dna.graph.search.SearchEngineProcessor;
import org.jboss.dna.graph.search.AbstractSearchEngine.Workspaces;
import org.jboss.dna.search.lucene.AbstractLuceneSearchEngine.AbstractLuceneProcessor;
-import org.jboss.dna.search.lucene.LuceneSearchWorkspace.PathIndex;
/**
* Abstract {@link SearchEngineProcessor} implementation for the {@link LuceneSearchEngine}.
@@ -77,15 +67,6 @@
@NotThreadSafe
public class LuceneSearchProcessor extends AbstractLuceneProcessor<LuceneSearchWorkspace, LuceneSearchSession> {
- protected static final TextEncoder NAMESPACE_ENCODER = new SecureHashTextEncoder(Algorithm.SHA_1, 10);
-
- protected static ExecutionContext contextWithEncodedNamespaces( ExecutionContext context ) {
- return context;
- // NamespaceRegistry encodingRegistry = new EncodingNamespaceRegistry(context.getNamespaceRegistry(), NAMESPACE_ENCODER);
- // ExecutionContext encodingContext = context.with(encodingRegistry);
- // return encodingContext;
- }
-
protected static final Columns FULL_TEXT_RESULT_COLUMNS = new FullTextSearchResultColumns();
protected LuceneSearchProcessor( String sourceName,
@@ -94,7 +75,7 @@
Observer observer,
DateTime now,
boolean readOnly ) {
- super(sourceName, contextWithEncodedNamespaces(context), workspaces, observer, now, readOnly);
+ super(sourceName, context, workspaces, observer, now, readOnly);
}
/**
@@ -117,15 +98,6 @@
return LuceneSearchWorkspace.FULL_TEXT_PREFIX + propertyName;
}
- protected void addIdProperties( Location location,
- Document doc ) {
- if (!location.hasIdProperties()) return;
- for (Property idProp : location.getIdProperties()) {
- String fieldValue = serializeProperty(idProp);
- doc.add(new Field(PathIndex.LOCATION_ID_PROPERTIES, fieldValue, Field.Store.YES, Field.Index.NOT_ANALYZED));
- }
- }
-
/**
* {@inheritDoc}
*
@@ -157,7 +129,7 @@
if (session == null) return;
request.setActualWorkspaceName(session.getWorkspaceName());
try {
- request.setActualRootLocation(session.getLocationFor(pathFactory.createRootPath()));
+ request.setActualRootLocation(session.getLocationForRoot());
} catch (IOException e) {
request.setError(e);
}
@@ -201,43 +173,13 @@
}
try {
- // Create a separate document for the path, which makes it easier to handle moves since the path can
- // be changed without changing any other content fields ...
- Document doc = new Document();
- String idStr = createPathDocument(location, doc);
- session.getPathsWriter().addDocument(doc);
-
- // Now set the content ...
- session.setOrReplaceProperties(idStr, request.properties());
+ session.setOrReplaceProperties(location, request.properties());
session.recordChange();
} catch (IOException e) {
request.setError(e);
}
}
- protected String createPathDocument( Location location,
- Document doc ) {
- UUID uuid = location.getUuid();
- if (uuid == null) uuid = UUID.randomUUID();
- Path path = location.getPath();
- String idStr = stringFactory.create(uuid);
- String pathStr = pathAsString(path);
- String nameStr = path.isRoot() ? "" : stringFactory.create(path.getLastSegment().getName());
- String localNameStr = path.isRoot() ? "" : path.getLastSegment().getName().getLocalName();
- int sns = path.isRoot() ? 1 : path.getLastSegment().getIndex();
-
- // Create a separate document for the path, which makes it easier to handle moves since the path can
- // be changed without changing any other content fields ...
- doc.add(new Field(PathIndex.PATH, pathStr, Field.Store.YES, Field.Index.NOT_ANALYZED));
- doc.add(new Field(PathIndex.NODE_NAME, nameStr, Field.Store.YES, Field.Index.NOT_ANALYZED));
- doc.add(new Field(PathIndex.LOCAL_NAME, localNameStr, Field.Store.YES, Field.Index.NOT_ANALYZED));
- doc.add(new NumericField(PathIndex.SNS_INDEX, Field.Store.YES, true).setIntValue(sns));
- doc.add(new Field(PathIndex.ID, idStr, Field.Store.YES, Field.Index.NOT_ANALYZED));
- doc.add(new NumericField(PathIndex.DEPTH, Field.Store.YES, true).setIntValue(path.size()));
- addIdProperties(location, doc);
- return idStr;
- }
-
/**
* {@inheritDoc}
*
@@ -251,25 +193,11 @@
Location location = request.getActualLocationOfNode();
assert location != null;
- UUID uuid = location.getUuid();
try {
- // If we're updating the root properties, make sure there is a document in the path index ...
- String idStr = null;
- if (location.getPath() != null && location.getPath().isRoot()) {
- Document doc = new Document();
- idStr = createPathDocument(location, doc);
- session.getPathsWriter().addDocument(doc);
- } else if (uuid != null) {
- idStr = stringFactory.create(uuid);
- } else {
- // Need to look up the id string ...
- idStr = session.getIdFor(location.getPath());
- }
-
// We make a big assumption here: the UpdatePropertiesRequest created by the SearchEngineProcessor have the
// actual locations set ...
- session.setOrReplaceProperties(idStr, request.properties().values());
+ session.setOrReplaceProperties(location, request.properties().values());
session.recordChange();
} catch (IOException e) {
request.setError(e);
@@ -289,21 +217,11 @@
Path path = request.at().getPath();
assert !readOnly;
try {
- Query query = null;
- int numChanges = 0;
- if (path.isRoot()) {
- query = new MatchAllDocsQuery();
- numChanges = LuceneSearchWorkspace.CHANGES_BEFORE_OPTIMIZATION + 100;
- } else {
- // Create a query to find all the nodes at or below the specified path ...
- Set<String> ids = session.getIdsForDescendantsOf(path, true);
- query = session.findAllNodesWithIds(ids);
- numChanges = ids.size();
- }
+ // Create a query to find all the nodes at or below the specified path (this efficiently handles the root path) ...
+ Query query = session.findAllNodesAtOrBelow(path);
// Now delete the documents from each index using this query, which we can reuse ...
- session.getPathsWriter().deleteDocuments(query);
session.getContentWriter().deleteDocuments(query);
- session.recordChanges(numChanges);
+ session.recordChanges(100);
} catch (FileNotFoundException e) {
// There are no index files yet, so nothing to delete ...
} catch (IOException e) {
@@ -322,7 +240,7 @@
if (workspace == null) return;
try {
LuceneSearchSession session = getSessionFor(request, workspace.getWorkspaceName());
- request.setActualRootLocation(session.getLocationFor(pathFactory.createRootPath()));
+ request.setActualRootLocation(session.getLocationForRoot());
workspace.destroy(getExecutionContext());
session.recordChanges(LuceneSearchWorkspace.CHANGES_BEFORE_OPTIMIZATION + 100);
} catch (IOException e) {
Modified: trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchSession.java
===================================================================
--- trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchSession.java 2009-12-31 16:13:57 UTC (rev 1506)
+++ trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchSession.java 2009-12-31 18:53:06 UTC (rev 1507)
@@ -24,6 +24,7 @@
package org.jboss.dna.search.lucene;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
@@ -44,8 +45,6 @@
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.Collector;
-import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.NumericRangeQuery;
@@ -82,12 +81,10 @@
import org.jboss.dna.search.lucene.IndexRules.NumericRule;
import org.jboss.dna.search.lucene.IndexRules.Rule;
import org.jboss.dna.search.lucene.LuceneSearchWorkspace.ContentIndex;
-import org.jboss.dna.search.lucene.LuceneSearchWorkspace.PathIndex;
import org.jboss.dna.search.lucene.query.CompareLengthQuery;
import org.jboss.dna.search.lucene.query.CompareNameQuery;
import org.jboss.dna.search.lucene.query.ComparePathQuery;
import org.jboss.dna.search.lucene.query.CompareStringQuery;
-import org.jboss.dna.search.lucene.query.IdsQuery;
import org.jboss.dna.search.lucene.query.MatchNoneQuery;
import org.jboss.dna.search.lucene.query.NotQuery;
@@ -100,22 +97,11 @@
/**
* An immutable {@link FieldSelector} instance that accesses the UUID field.
*/
- protected static final FieldSelector DOC_ID_FIELD_SELECTOR = new FieldSelector() {
- private static final long serialVersionUID = 1L;
-
- public FieldSelectorResult accept( String fieldName ) {
- return ContentIndex.ID.equals(fieldName) ? FieldSelectorResult.LOAD_AND_BREAK : FieldSelectorResult.NO_LOAD;
- }
- };
-
- /**
- * An immutable {@link FieldSelector} instance that accesses the UUID field.
- */
protected static final FieldSelector LOCATION_FIELDS_SELECTOR = new FieldSelector() {
private static final long serialVersionUID = 1L;
public FieldSelectorResult accept( String fieldName ) {
- if (PathIndex.PATH.equals(fieldName) || PathIndex.LOCATION_ID_PROPERTIES.equals(fieldName)) {
+ if (ContentIndex.PATH.equals(fieldName) || ContentIndex.LOCATION_ID_PROPERTIES.equals(fieldName)) {
return FieldSelectorResult.LOAD;
}
return FieldSelectorResult.NO_LOAD;
@@ -129,11 +115,7 @@
private final LuceneSearchWorkspace workspace;
protected final LuceneSearchProcessor processor;
- private final Directory pathsIndexDirectory;
private final Directory contentIndexDirectory;
- private IndexReader pathsReader;
- private IndexWriter pathsWriter;
- private IndexSearcher pathsSearcher;
private IndexReader contentReader;
private IndexWriter contentWriter;
private IndexSearcher contentSearcher;
@@ -144,7 +126,6 @@
assert workspace != null;
assert processor != null;
this.workspace = workspace;
- this.pathsIndexDirectory = workspace.pathDirectory;
this.contentIndexDirectory = workspace.contentDirectory;
this.processor = processor;
}
@@ -165,21 +146,6 @@
return workspace;
}
- protected IndexReader getPathsReader() throws IOException {
- if (pathsReader == null) {
- try {
- pathsReader = IndexReader.open(pathsIndexDirectory, processor.readOnly);
- } catch (IOException e) {
- // try creating the workspace ...
- IndexWriter writer = new IndexWriter(pathsIndexDirectory, workspace.analyzer, MaxFieldLength.UNLIMITED);
- writer.close();
- // And try reading again ...
- pathsReader = IndexReader.open(pathsIndexDirectory, processor.readOnly);
- }
- }
- return pathsReader;
- }
-
protected IndexReader getContentReader() throws IOException {
if (contentReader == null) {
try {
@@ -195,15 +161,6 @@
return contentReader;
}
- protected IndexWriter getPathsWriter() throws IOException {
- assert !processor.readOnly;
- if (pathsWriter == null) {
- // Don't overwrite, but create if missing ...
- pathsWriter = new IndexWriter(pathsIndexDirectory, workspace.analyzer, MaxFieldLength.UNLIMITED);
- }
- return pathsWriter;
- }
-
protected IndexWriter getContentWriter() throws IOException {
assert !processor.readOnly;
if (contentWriter == null) {
@@ -213,13 +170,6 @@
return contentWriter;
}
- protected IndexSearcher getPathsSearcher() throws IOException {
- if (pathsSearcher == null) {
- pathsSearcher = new IndexSearcher(getPathsReader());
- }
- return pathsSearcher;
- }
-
public IndexSearcher getContentSearcher() throws IOException {
if (contentSearcher == null) {
contentSearcher = new IndexSearcher(getContentReader());
@@ -228,7 +178,7 @@
}
public boolean hasWriters() {
- return pathsWriter != null || contentWriter != null;
+ return contentWriter != null;
}
protected final void recordChange() {
@@ -261,47 +211,17 @@
IOException ioError = null;
RuntimeException runtimeError = null;
- if (pathsReader != null) {
+ if (contentReader != null) {
try {
- pathsReader.close();
+ contentReader.close();
} catch (IOException e) {
ioError = e;
} catch (RuntimeException e) {
runtimeError = e;
} finally {
- pathsReader = null;
- }
- }
- if (contentReader != null) {
- try {
- contentReader.close();
- } catch (IOException e) {
- if (ioError == null) ioError = e;
- } catch (RuntimeException e) {
- if (runtimeError == null) runtimeError = e;
- } finally {
contentReader = null;
}
}
- if (pathsWriter != null) {
- try {
- if (optimize) pathsWriter.optimize();
- } catch (IOException e) {
- if (ioError == null) ioError = e;
- } catch (RuntimeException e) {
- if (runtimeError == null) runtimeError = e;
- } finally {
- try {
- pathsWriter.close();
- } catch (IOException e) {
- if (ioError == null) ioError = e;
- } catch (RuntimeException e) {
- if (runtimeError == null) runtimeError = e;
- } finally {
- pathsWriter = null;
- }
- }
- }
if (contentWriter != null) {
try {
if (optimize) contentWriter.optimize();
@@ -339,45 +259,15 @@
numChanges = 0;
IOException ioError = null;
RuntimeException runtimeError = null;
- if (pathsReader != null) {
- try {
- pathsReader.close();
- } catch (IOException e) {
- ioError = e;
- } catch (RuntimeException e) {
- runtimeError = e;
- } finally {
- pathsReader = null;
- }
- }
if (contentReader != null) {
try {
contentReader.close();
} catch (IOException e) {
- if (ioError == null) ioError = e;
- } catch (RuntimeException e) {
- if (runtimeError == null) runtimeError = e;
- } finally {
- contentReader = null;
- }
- }
- if (pathsWriter != null) {
- try {
- pathsWriter.rollback();
- } catch (IOException e) {
ioError = e;
} catch (RuntimeException e) {
runtimeError = e;
} finally {
- try {
- pathsWriter.close();
- } catch (IOException e) {
- ioError = e;
- } catch (RuntimeException e) {
- runtimeError = e;
- } finally {
- pathsWriter = null;
- }
+ contentReader = null;
}
}
if (contentWriter != null) {
@@ -417,12 +307,10 @@
QueryParser parser = new QueryParser(Version.LUCENE_29, ContentIndex.FULL_TEXT, workspace.analyzer);
Query query = parser.parse(fullTextSearchExpression);
planningNanos = System.nanoTime() - planningNanos;
- TopDocs docs = getContentSearcher().search(query, maxRows + offset);
- // Collect the results ...
+ // Execute the search and place the results into the supplied list ...
+ TopDocs docs = getContentSearcher().search(query, maxRows + offset);
IndexReader contentReader = getContentReader();
- IndexReader pathReader = getPathsReader();
- IndexSearcher pathSearcher = getPathsSearcher();
ScoreDoc[] scoreDocs = docs.scoreDocs;
int numberOfResults = scoreDocs.length;
if (numberOfResults > offset) {
@@ -431,14 +319,8 @@
ScoreDoc result = scoreDocs[i];
int docId = result.doc;
// Find the UUID of the node (this UUID might be artificial, so we have to find the path) ...
- Document doc = contentReader.document(docId, DOC_ID_FIELD_SELECTOR);
- String id = doc.get(ContentIndex.ID);
- Document pathDoc = getPathDocument(id, pathReader, pathSearcher, LOCATION_FIELDS_SELECTOR);
- Location location = readLocation(pathDoc);
- if (location == null) {
- // No path record found ...
- continue;
- }
+ Document doc = contentReader.document(docId, LOCATION_FIELDS_SELECTOR);
+ Location location = readLocation(doc);
// Now add the location ...
results.add(new Object[] {location, result.score});
}
@@ -449,10 +331,10 @@
protected Location readLocation( Document doc ) {
// Read the path ...
- String pathString = doc.get(PathIndex.PATH);
+ String pathString = doc.get(ContentIndex.PATH);
Path path = processor.pathFactory.create(pathString);
// Look for the Location's ID properties ...
- String[] idProps = doc.getValues(PathIndex.LOCATION_ID_PROPERTIES);
+ String[] idProps = doc.getValues(ContentIndex.LOCATION_ID_PROPERTIES);
if (idProps.length == 0) {
return Location.create(path);
}
@@ -472,11 +354,33 @@
return properties.isEmpty() ? Location.create(path) : Location.create(path, properties);
}
- protected void setOrReplaceProperties( String idString,
+ protected void setOrReplaceProperties( Location location,
Iterable<Property> properties ) throws IOException {
// Create the document for the content (properties) ...
Document doc = new Document();
- doc.add(new Field(ContentIndex.ID, idString, Field.Store.YES, Field.Index.NOT_ANALYZED));
+
+ // Add the information every node has ...
+ Path path = location.getPath();
+ String pathStr = processor.pathAsString(path);
+ String nameStr = path.isRoot() ? "" : processor.stringFactory.create(path.getLastSegment().getName());
+ String localNameStr = path.isRoot() ? "" : path.getLastSegment().getName().getLocalName();
+ int sns = path.isRoot() ? 1 : path.getLastSegment().getIndex();
+
+ // Create a separate document for the path, which makes it easier to handle moves since the path can
+ // be changed without changing any other content fields ...
+ doc.add(new Field(ContentIndex.PATH, pathStr, Field.Store.YES, Field.Index.NOT_ANALYZED));
+ doc.add(new Field(ContentIndex.NODE_NAME, nameStr, Field.Store.YES, Field.Index.NOT_ANALYZED));
+ doc.add(new Field(ContentIndex.LOCAL_NAME, localNameStr, Field.Store.YES, Field.Index.NOT_ANALYZED));
+ doc.add(new NumericField(ContentIndex.SNS_INDEX, Field.Store.YES, true).setIntValue(sns));
+ doc.add(new NumericField(ContentIndex.DEPTH, Field.Store.YES, true).setIntValue(path.size()));
+ if (location.hasIdProperties()) {
+ for (Property idProp : location.getIdProperties()) {
+ String fieldValue = processor.serializeProperty(idProp);
+ doc.add(new Field(ContentIndex.LOCATION_ID_PROPERTIES, fieldValue, Field.Store.YES, Field.Index.NOT_ANALYZED));
+ }
+ }
+
+ // Index the properties
String stringValue = null;
StringBuilder fullTextSearchValue = null;
for (Property property : properties) {
@@ -559,193 +463,90 @@
if (fullTextSearchValue != null && fullTextSearchValue.length() != 0) {
doc.add(new Field(ContentIndex.FULL_TEXT, fullTextSearchValue.toString(), Field.Store.NO, Field.Index.ANALYZED));
}
- getContentWriter().updateDocument(new Term(ContentIndex.ID, idString), doc);
+ getContentWriter().updateDocument(new Term(ContentIndex.PATH, pathStr), doc);
}
- protected Document getPathDocument( String id,
- IndexReader pathReader,
- IndexSearcher pathSearcher,
- FieldSelector selector ) throws IOException {
- // Find the path for this node (is there a better way to do this than one search per ID?) ...
- TopDocs pathDocs = pathSearcher.search(new TermQuery(new Term(PathIndex.ID, id)), 1);
- if (pathDocs.scoreDocs.length < 1) {
- // No path record found ...
- return null;
- }
- return pathReader.document(pathDocs.scoreDocs[0].doc, selector);
- }
-
/**
- * Get the set of IDs for the children of the node at the given path.
+ * {@inheritDoc}
*
- * @param parentPath the path to the parent node; may not be null
- * @return the doc IDs of the child nodes; never null but possibly empty
- * @throws IOException if there is an error accessing the indexes
+ * @see org.jboss.dna.search.lucene.AbstractLuceneSearchEngine.WorkspaceSession#createTupleCollector(org.jboss.dna.graph.query.QueryResults.Columns)
*/
- protected Set<String> getIdsForChildrenOf( Path parentPath ) throws IOException {
- // Find the path of the parent ...
- String stringifiedPath = processor.pathAsString(parentPath);
- // Append a '/' to the parent path, so we'll only get decendants ...
- stringifiedPath = stringifiedPath + '/';
+ public TupleCollector createTupleCollector( Columns columns ) {
+ return new DualIndexTupleCollector(this, columns);
+ }
- // Create a query to find all the nodes below the parent path ...
- Query query = new PrefixQuery(new Term(PathIndex.PATH, stringifiedPath));
- // Include only the children ...
- int childrenDepth = parentPath.size() + 1;
- Query depthQuery = NumericRangeQuery.newIntRange(PathIndex.DEPTH, childrenDepth, childrenDepth, true, true);
- // And combine ...
- BooleanQuery combinedQuery = new BooleanQuery();
- combinedQuery.add(query, Occur.MUST);
- combinedQuery.add(depthQuery, Occur.MUST);
- query = combinedQuery;
+ public Location getLocationForRoot() throws IOException {
+ // Look for the root node ...
+ Query query = NumericRangeQuery.newIntRange(ContentIndex.DEPTH, 0, 0, true, true);
- // Now execute and collect the IDs ...
- IdCollector idCollector = new IdCollector();
- IndexSearcher searcher = getPathsSearcher();
- searcher.search(query, idCollector);
- return idCollector.getIds();
+ // Execute the search and place the results into the supplied list ...
+ List<Object[]> tuples = new ArrayList<Object[]>(1);
+ FullTextSearchTupleCollector collector = new FullTextSearchTupleCollector(this, tuples);
+ getContentSearcher().search(query, collector);
+
+ // Extract the location from the results ...
+ return tuples.isEmpty() ? Location.create(processor.pathFactory.createRootPath()) : (Location)tuples.get(0)[0];
}
- /**
- * Get the set of IDs for the nodes that are descendants of the node at the given path.
- *
- * @param parentPath the path to the parent node; may not be null and <i>may not be the root node</i>
- * @param includeParent true if the parent node should be included in the results, or false if only the descendants should be
- * included
- * @return the IDs of the nodes; never null but possibly empty
- * @throws IOException if there is an error accessing the indexes
- */
- protected Set<String> getIdsForDescendantsOf( Path parentPath,
- boolean includeParent ) throws IOException {
- assert !parentPath.isRoot();
-
+ public Query findAllNodesBelow( Path parentPath ) {
// Find the path of the parent ...
String stringifiedPath = processor.pathAsString(parentPath);
- if (!includeParent) {
- // Append a '/' to the parent path, and we'll only get decendants ...
- stringifiedPath = stringifiedPath + '/';
- }
+ // Append a '/' to the parent path, and we'll only get decendants ...
+ stringifiedPath = stringifiedPath + '/';
// Create a prefix query ...
- Query query = new PrefixQuery(new Term(PathIndex.PATH, stringifiedPath));
-
- // Now execute and collect the IDs ...
- IdCollector idCollector = new IdCollector();
- IndexSearcher searcher = getPathsSearcher();
- searcher.search(query, idCollector);
- return idCollector.getIds();
+ return new PrefixQuery(new Term(ContentIndex.PATH, stringifiedPath));
}
- /**
- * Get the set containing the single ID for the node at the given path.
- *
- * @param path the path to the node; may not be null
- * @return the ID of the supplied node; or null if the node cannot be found
- * @throws IOException if there is an error accessing the indexes
- */
- protected String getIdFor( Path path ) throws IOException {
- // Create a query to find all the nodes below the parent path ...
- IndexSearcher searcher = getPathsSearcher();
- Query query = null;
- if (path.isRoot()) {
- // Look for the query
- query = NumericRangeQuery.newIntRange(PathIndex.DEPTH, 0, 0, true, true);
- } else {
- String stringifiedPath = processor.pathAsString(path);
- query = new TermQuery(new Term(PathIndex.PATH, stringifiedPath));
+ public Query findAllNodesAtOrBelow( Path parentPath ) {
+ if (parentPath.isRoot()) {
+ return new MatchAllDocsQuery();
}
+ // Find the path of the parent ...
+ String stringifiedPath = processor.pathAsString(parentPath);
- // Now execute and collect the UUIDs ...
- TopDocs topDocs = searcher.search(query, 1);
- if (topDocs.totalHits == 0) return null;
- Document pathDoc = getPathsReader().document(topDocs.scoreDocs[0].doc);
- String idString = pathDoc.get(PathIndex.ID);
- assert idString != null;
- return idString;
+ // Create a prefix query ...
+ return new PrefixQuery(new Term(ContentIndex.PATH, stringifiedPath));
}
- protected Location getLocationFor( Path path ) throws IOException {
- // Create a query to find all the nodes below the parent path ...
- IndexSearcher searcher = getPathsSearcher();
- String stringifiedPath = processor.pathAsString(path);
- TermQuery query = new TermQuery(new Term(PathIndex.PATH, stringifiedPath));
-
- // Now execute and collect the UUIDs ...
- TopDocs topDocs = searcher.search(query, 1);
- if (topDocs.totalHits == 0) return null;
- Document pathDoc = getPathsReader().document(topDocs.scoreDocs[0].doc);
- return readLocation(pathDoc);
- }
-
/**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.search.lucene.AbstractLuceneSearchEngine.WorkspaceSession#createTupleCollector(org.jboss.dna.graph.query.QueryResults.Columns)
- */
- public TupleCollector createTupleCollector( Columns columns ) {
- return new DualIndexTupleCollector(this, columns);
- }
-
- public Query findAllNodesWithIds( Set<String> ids ) {
- if (ids.isEmpty()) {
- // There are no children, so return a null query ...
- return new MatchNoneQuery();
- }
- if (ids.size() == 1) {
- String id = ids.iterator().next();
- if (id == null) return new MatchNoneQuery();
- return new TermQuery(new Term(ContentIndex.ID, id));
- }
- if (ids.size() < 50) {
- // Create an OR boolean query for all the UUIDs, since this is probably more efficient ...
- BooleanQuery query = new BooleanQuery();
- for (String id : ids) {
- Query uuidQuery = new TermQuery(new Term(ContentIndex.ID, id));
- query.add(uuidQuery, Occur.SHOULD);
- }
- return query;
- }
- // Return a query that will always find all of the UUIDs ...
- return new IdsQuery(ContentIndex.ID, ids);
- }
-
- public Query findAllNodesBelow( Path ancestorPath ) throws IOException {
- if (ancestorPath.isRoot()) {
- return new MatchAllDocsQuery();
- }
- Set<String> ids = getIdsForDescendantsOf(ancestorPath, false);
- return findAllNodesWithIds(ids);
- }
-
- /**
* Return a query that can be used to find all of the documents that represent nodes that are children of the node at the
* supplied path.
*
* @param parentPath the path of the parent node.
* @return the query; never null
- * @throws IOException if there is an error finding the UUIDs of the child nodes
*/
- public Query findChildNodes( Path parentPath ) throws IOException {
- if (parentPath.isRoot()) {
- return new MatchAllDocsQuery();
- }
- Set<String> childIds = getIdsForChildrenOf(parentPath);
- return findAllNodesWithIds(childIds);
+ public Query findChildNodes( Path parentPath ) {
+ // Find the path of the parent ...
+ String stringifiedPath = processor.pathAsString(parentPath);
+ // Append a '/' to the parent path, so we'll only get decendants ...
+ stringifiedPath = stringifiedPath + '/';
+
+ // Create a query to find all the nodes below the parent path ...
+ Query query = new PrefixQuery(new Term(ContentIndex.PATH, stringifiedPath));
+ // Include only the children ...
+ int childrenDepth = parentPath.size() + 1;
+ Query depthQuery = NumericRangeQuery.newIntRange(ContentIndex.DEPTH, childrenDepth, childrenDepth, true, true);
+ // And combine ...
+ BooleanQuery combinedQuery = new BooleanQuery();
+ combinedQuery.add(query, Occur.MUST);
+ combinedQuery.add(depthQuery, Occur.MUST);
+ return combinedQuery;
}
/**
- * Create a query that can be used to find the one document (or node) that exists at the exact path supplied. This method
- * first queries the {@link PathIndex path index} to find the ID of the node at the supplied path, and then returns a query
- * that matches the ID.
+ * Create a query that can be used to find the one document (or node) that exists at the exact path supplied.
*
* @param path the path of the node
* @return the query; never null
- * @throws IOException if there is an error finding the ID for the supplied path
*/
- public Query findNodeAt( Path path ) throws IOException {
- String id = getIdFor(path);
- if (id == null) return null;
- return new TermQuery(new Term(ContentIndex.ID, id));
+ public Query findNodeAt( Path path ) {
+ if (path.isRoot()) {
+ // Look for the root node ...
+ return NumericRangeQuery.newIntRange(ContentIndex.DEPTH, 0, 0, true, true);
+ }
+ String stringifiedPath = processor.pathAsString(path);
+ return new TermQuery(new Term(ContentIndex.PATH, stringifiedPath));
}
public Query findNodesLike( String fieldName,
@@ -1037,7 +838,7 @@
Object upperValue,
boolean includesLower,
boolean includesUpper ) {
- return findNodesWithNumericRange(PathIndex.DEPTH, lowerValue, upperValue, includesLower, includesUpper);
+ return findNodesWithNumericRange(ContentIndex.DEPTH, lowerValue, upperValue, includesLower, includesUpper);
}
protected Query findNodesWithNumericRange( String field,
@@ -1085,7 +886,7 @@
public Query findNodesWith( NodePath nodePath,
Operator operator,
Object value,
- boolean caseSensitive ) throws IOException {
+ boolean caseSensitive ) {
if (!caseSensitive) value = processor.stringFactory.create(value).toLowerCase();
Path pathValue = operator != Operator.LIKE ? processor.pathFactory.create(value) : null;
Query query = null;
@@ -1096,44 +897,40 @@
return new NotQuery(findNodeAt(pathValue));
case LIKE:
String likeExpression = processor.stringFactory.create(value);
- query = findNodesLike(PathIndex.PATH, likeExpression, caseSensitive);
+ query = findNodesLike(ContentIndex.PATH, likeExpression, caseSensitive);
break;
case GREATER_THAN:
query = ComparePathQuery.createQueryForNodesWithPathGreaterThan(pathValue,
- PathIndex.PATH,
+ ContentIndex.PATH,
processor.valueFactories,
caseSensitive);
break;
case GREATER_THAN_OR_EQUAL_TO:
query = ComparePathQuery.createQueryForNodesWithPathGreaterThanOrEqualTo(pathValue,
- PathIndex.PATH,
+ ContentIndex.PATH,
processor.valueFactories,
caseSensitive);
break;
case LESS_THAN:
query = ComparePathQuery.createQueryForNodesWithPathLessThan(pathValue,
- PathIndex.PATH,
+ ContentIndex.PATH,
processor.valueFactories,
caseSensitive);
break;
case LESS_THAN_OR_EQUAL_TO:
query = ComparePathQuery.createQueryForNodesWithPathLessThanOrEqualTo(pathValue,
- PathIndex.PATH,
+ ContentIndex.PATH,
processor.valueFactories,
caseSensitive);
break;
}
- // Now execute and collect the IDs ...
- IdCollector idCollector = new IdCollector();
- IndexSearcher searcher = getPathsSearcher();
- searcher.search(query, idCollector);
- return findAllNodesWithIds(idCollector.getIds());
+ return query;
}
public Query findNodesWith( NodeName nodeName,
Operator operator,
Object value,
- boolean caseSensitive ) throws IOException {
+ boolean caseSensitive ) {
ValueFactories factories = processor.valueFactories;
String stringValue = processor.stringFactory.create(value);
if (!caseSensitive) stringValue = stringValue.toLowerCase();
@@ -1143,39 +940,41 @@
switch (operator) {
case EQUAL_TO:
BooleanQuery booleanQuery = new BooleanQuery();
- booleanQuery.add(new TermQuery(new Term(PathIndex.NODE_NAME, stringValue)), Occur.MUST);
- booleanQuery.add(NumericRangeQuery.newIntRange(PathIndex.SNS_INDEX, snsIndex, snsIndex, true, false), Occur.MUST);
+ booleanQuery.add(new TermQuery(new Term(ContentIndex.NODE_NAME, stringValue)), Occur.MUST);
+ booleanQuery.add(NumericRangeQuery.newIntRange(ContentIndex.SNS_INDEX, snsIndex, snsIndex, true, false),
+ Occur.MUST);
return booleanQuery;
case NOT_EQUAL_TO:
booleanQuery = new BooleanQuery();
- booleanQuery.add(new TermQuery(new Term(PathIndex.NODE_NAME, stringValue)), Occur.MUST);
- booleanQuery.add(NumericRangeQuery.newIntRange(PathIndex.SNS_INDEX, snsIndex, snsIndex, true, false), Occur.MUST);
+ booleanQuery.add(new TermQuery(new Term(ContentIndex.NODE_NAME, stringValue)), Occur.MUST);
+ booleanQuery.add(NumericRangeQuery.newIntRange(ContentIndex.SNS_INDEX, snsIndex, snsIndex, true, false),
+ Occur.MUST);
return new NotQuery(booleanQuery);
case GREATER_THAN:
query = CompareNameQuery.createQueryForNodesWithNameGreaterThan(segment,
- PathIndex.NODE_NAME,
- PathIndex.SNS_INDEX,
+ ContentIndex.NODE_NAME,
+ ContentIndex.SNS_INDEX,
factories,
caseSensitive);
break;
case GREATER_THAN_OR_EQUAL_TO:
query = CompareNameQuery.createQueryForNodesWithNameGreaterThanOrEqualTo(segment,
- PathIndex.NODE_NAME,
- PathIndex.SNS_INDEX,
+ ContentIndex.NODE_NAME,
+ ContentIndex.SNS_INDEX,
factories,
caseSensitive);
break;
case LESS_THAN:
query = CompareNameQuery.createQueryForNodesWithNameLessThan(segment,
- PathIndex.NODE_NAME,
- PathIndex.SNS_INDEX,
+ ContentIndex.NODE_NAME,
+ ContentIndex.SNS_INDEX,
factories,
caseSensitive);
break;
case LESS_THAN_OR_EQUAL_TO:
query = CompareNameQuery.createQueryForNodesWithNameLessThanOrEqualTo(segment,
- PathIndex.NODE_NAME,
- PathIndex.SNS_INDEX,
+ ContentIndex.NODE_NAME,
+ ContentIndex.SNS_INDEX,
factories,
caseSensitive);
break;
@@ -1187,7 +986,7 @@
String localNameExpression = likeExpression.substring(0, openBracketIndex);
String snsIndexExpression = likeExpression.substring(openBracketIndex);
Query localNameQuery = CompareStringQuery.createQueryForNodesWithFieldLike(localNameExpression,
- PathIndex.NODE_NAME,
+ ContentIndex.NODE_NAME,
factories,
caseSensitive);
Query snsQuery = createSnsIndexQuery(snsIndexExpression);
@@ -1213,120 +1012,98 @@
} else {
// There is no SNS expression ...
query = CompareStringQuery.createQueryForNodesWithFieldLike(likeExpression,
- PathIndex.NODE_NAME,
+ ContentIndex.NODE_NAME,
factories,
caseSensitive);
}
assert query != null;
break;
}
-
- // Now execute and collect the IDs ...
- IdCollector idCollector = new IdCollector();
- IndexSearcher searcher = getPathsSearcher();
- searcher.search(query, idCollector);
- return findAllNodesWithIds(idCollector.getIds());
+ return query;
}
public Query findNodesWith( NodeLocalName nodeName,
Operator operator,
Object value,
- boolean caseSensitive ) throws IOException {
+ boolean caseSensitive ) {
String nameValue = processor.stringFactory.create(value);
Query query = null;
switch (operator) {
case LIKE:
String likeExpression = processor.stringFactory.create(value);
- query = findNodesLike(PathIndex.LOCAL_NAME, likeExpression, caseSensitive);
+ query = findNodesLike(ContentIndex.LOCAL_NAME, likeExpression, caseSensitive);
break;
case EQUAL_TO:
query = CompareStringQuery.createQueryForNodesWithFieldEqualTo(nameValue,
- PathIndex.LOCAL_NAME,
+ ContentIndex.LOCAL_NAME,
processor.valueFactories,
caseSensitive);
break;
case NOT_EQUAL_TO:
query = CompareStringQuery.createQueryForNodesWithFieldEqualTo(nameValue,
- PathIndex.LOCAL_NAME,
+ ContentIndex.LOCAL_NAME,
processor.valueFactories,
caseSensitive);
query = new NotQuery(query);
break;
case GREATER_THAN:
query = CompareStringQuery.createQueryForNodesWithFieldGreaterThan(nameValue,
- PathIndex.LOCAL_NAME,
+ ContentIndex.LOCAL_NAME,
processor.valueFactories,
caseSensitive);
break;
case GREATER_THAN_OR_EQUAL_TO:
query = CompareStringQuery.createQueryForNodesWithFieldGreaterThanOrEqualTo(nameValue,
- PathIndex.LOCAL_NAME,
+ ContentIndex.LOCAL_NAME,
processor.valueFactories,
caseSensitive);
break;
case LESS_THAN:
query = CompareStringQuery.createQueryForNodesWithFieldLessThan(nameValue,
- PathIndex.LOCAL_NAME,
+ ContentIndex.LOCAL_NAME,
processor.valueFactories,
caseSensitive);
break;
case LESS_THAN_OR_EQUAL_TO:
query = CompareStringQuery.createQueryForNodesWithFieldLessThanOrEqualTo(nameValue,
- PathIndex.LOCAL_NAME,
+ ContentIndex.LOCAL_NAME,
processor.valueFactories,
caseSensitive);
break;
}
-
- // Now execute and collect the IDs ...
- IdCollector idCollector = new IdCollector();
- IndexSearcher searcher = getPathsSearcher();
- searcher.search(query, idCollector);
- return findAllNodesWithIds(idCollector.getIds());
+ return query;
}
public Query findNodesWith( NodeDepth depthConstraint,
Operator operator,
- Object value ) throws IOException {
+ Object value ) {
int depth = processor.valueFactories.getLongFactory().create(value).intValue();
- Query query = null;
switch (operator) {
case EQUAL_TO:
- query = NumericRangeQuery.newIntRange(PathIndex.DEPTH, depth, depth, true, true);
- break;
+ return NumericRangeQuery.newIntRange(ContentIndex.DEPTH, depth, depth, true, true);
case NOT_EQUAL_TO:
- query = NumericRangeQuery.newIntRange(PathIndex.DEPTH, depth, depth, true, true);
- query = new NotQuery(query);
- break;
+ Query query = NumericRangeQuery.newIntRange(ContentIndex.DEPTH, depth, depth, true, true);
+ return new NotQuery(query);
case GREATER_THAN:
- query = NumericRangeQuery.newIntRange(PathIndex.DEPTH, depth, MAX_DEPTH, false, true);
- break;
+ return NumericRangeQuery.newIntRange(ContentIndex.DEPTH, depth, MAX_DEPTH, false, true);
case GREATER_THAN_OR_EQUAL_TO:
- query = NumericRangeQuery.newIntRange(PathIndex.DEPTH, depth, MAX_DEPTH, true, true);
- break;
+ return NumericRangeQuery.newIntRange(ContentIndex.DEPTH, depth, MAX_DEPTH, true, true);
case LESS_THAN:
- query = NumericRangeQuery.newIntRange(PathIndex.DEPTH, MIN_DEPTH, depth, true, false);
- break;
+ return NumericRangeQuery.newIntRange(ContentIndex.DEPTH, MIN_DEPTH, depth, true, false);
case LESS_THAN_OR_EQUAL_TO:
- query = NumericRangeQuery.newIntRange(PathIndex.DEPTH, MIN_DEPTH, depth, true, true);
- break;
+ return NumericRangeQuery.newIntRange(ContentIndex.DEPTH, MIN_DEPTH, depth, true, true);
case LIKE:
// This is not allowed ...
return null;
}
-
- // Now execute and collect the IDs ...
- IdCollector idCollector = new IdCollector();
- IndexSearcher searcher = getPathsSearcher();
- searcher.search(query, idCollector);
- return findAllNodesWithIds(idCollector.getIds());
+ return null;
}
protected Query createLocalNameQuery( String likeExpression,
boolean caseSensitive ) {
if (likeExpression == null) return null;
return CompareStringQuery.createQueryForNodesWithFieldLike(likeExpression,
- PathIndex.LOCAL_NAME,
+ ContentIndex.LOCAL_NAME,
processor.valueFactories,
caseSensitive);
}
@@ -1358,16 +1135,16 @@
}
if (likeExpression.equals("_")) {
// The SNS expression can only be one digit ...
- return NumericRangeQuery.newIntRange(PathIndex.SNS_INDEX, MIN_SNS_INDEX, 9, true, true);
+ return NumericRangeQuery.newIntRange(ContentIndex.SNS_INDEX, MIN_SNS_INDEX, 9, true, true);
}
if (likeExpression.equals("%")) {
// The SNS expression can be any digits ...
- return NumericRangeQuery.newIntRange(PathIndex.SNS_INDEX, MIN_SNS_INDEX, MAX_SNS_INDEX, true, true);
+ return NumericRangeQuery.newIntRange(ContentIndex.SNS_INDEX, MIN_SNS_INDEX, MAX_SNS_INDEX, true, true);
}
if (likeExpression.indexOf('_') != -1) {
if (likeExpression.indexOf('%') != -1) {
// Contains both ...
- return findNodesLike(PathIndex.SNS_INDEX, likeExpression, true);
+ return findNodesLike(ContentIndex.SNS_INDEX, likeExpression, true);
}
// It presumably contains some numbers and at least one '_' character ...
int firstWildcardChar = likeExpression.indexOf('_');
@@ -1376,7 +1153,7 @@
int secondWildcardChar = likeExpression.indexOf('_', firstWildcardChar + 1);
if (secondWildcardChar != -1) {
// There are multiple '_' characters ...
- return findNodesLike(PathIndex.SNS_INDEX, likeExpression, true);
+ return findNodesLike(ContentIndex.SNS_INDEX, likeExpression, true);
}
}
// There's only one '_', so parse the lowermost value and uppermost value ...
@@ -1386,7 +1163,7 @@
// This SNS is just a number ...
int lowerSns = Integer.parseInt(lowerExpression);
int upperSns = Integer.parseInt(upperExpression);
- return NumericRangeQuery.newIntRange(PathIndex.SNS_INDEX, lowerSns, upperSns, true, true);
+ return NumericRangeQuery.newIntRange(ContentIndex.SNS_INDEX, lowerSns, upperSns, true, true);
} catch (NumberFormatException e) {
// It's not a number but it's in the SNS field, so there will be no results ...
return new MatchNoneQuery();
@@ -1394,13 +1171,13 @@
}
if (likeExpression.indexOf('%') != -1) {
// It presumably contains some numbers and at least one '%' character ...
- return findNodesLike(PathIndex.SNS_INDEX, likeExpression, true);
+ return findNodesLike(ContentIndex.SNS_INDEX, likeExpression, true);
}
// This is not a LIKE expression but an exact value specification and should be a number ...
try {
// This SNS is just a number ...
int sns = Integer.parseInt(likeExpression);
- return NumericRangeQuery.newIntRange(PathIndex.SNS_INDEX, sns, sns, true, true);
+ return NumericRangeQuery.newIntRange(ContentIndex.SNS_INDEX, sns, sns, true, true);
} catch (NumberFormatException e) {
// It's not a number but it's in the SNS field, so there will be no results ...
return new MatchNoneQuery();
@@ -1408,82 +1185,10 @@
}
/**
- * A {@link Collector} implementation that only captures the UUID of the documents returned by a query. Score information is
- * not recorded. This is often used when querying the {@link PathIndex} to collect the UUIDs of a set of nodes satisfying some
- * path constraint.
- *
- * @see LuceneSearchSession#findChildNodes(Path)
- */
- protected static class IdCollector extends Collector {
- private final Set<String> ids = new HashSet<String>();
- private String[] idsByDocId;
-
- // private int baseDocId;
-
- protected IdCollector() {
- }
-
- /**
- * Get the UUIDs that have been collected.
- *
- * @return the set of UUIDs; never null
- */
- public Set<String> getIds() {
- return ids;
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.apache.lucene.search.Collector#acceptsDocsOutOfOrder()
- */
- @Override
- public boolean acceptsDocsOutOfOrder() {
- return true;
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.apache.lucene.search.Collector#setScorer(org.apache.lucene.search.Scorer)
- */
- @Override
- public void setScorer( Scorer scorer ) {
- // we don't care about scoring
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.apache.lucene.search.Collector#collect(int)
- */
- @Override
- public void collect( int docId ) {
- assert docId >= 0;
- String idString = idsByDocId[docId];
- assert idString != null;
- ids.add(idString);
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.apache.lucene.search.Collector#setNextReader(org.apache.lucene.index.IndexReader, int)
- */
- @Override
- public void setNextReader( IndexReader reader,
- int docBase ) throws IOException {
- this.idsByDocId = FieldCache.DEFAULT.getStrings(reader, ContentIndex.ID); // same value as PathIndex.ID
- // this.baseDocId = docBase;
- }
- }
-
- /**
* This collector is responsible for loading the value for each of the columns into each tuple array.
*/
protected static class DualIndexTupleCollector extends TupleCollector {
private final LuceneSearchSession session;
- private final LuceneSearchProcessor processor;
private final LinkedList<Object[]> tuples = new LinkedList<Object[]>();
private final Columns columns;
private final int numValues;
@@ -1494,14 +1199,11 @@
private Scorer scorer;
private IndexReader currentReader;
private int docOffset;
- private boolean resolvedLocations = false;
protected DualIndexTupleCollector( LuceneSearchSession session,
Columns columns ) {
this.session = session;
- this.processor = session.processor;
this.columns = columns;
- assert this.processor != null;
assert this.columns != null;
this.numValues = this.columns.getTupleSize();
assert this.numValues >= 0;
@@ -1510,13 +1212,16 @@
this.locationIndex = this.columns.getLocationIndex(selectorName);
this.recordScore = this.columns.hasFullTextSearchScores();
this.scoreIndex = this.recordScore ? this.columns.getFullTextSearchScoreIndexFor(selectorName) : -1;
- final Set<String> columnNames = new HashSet<String>(this.columns.getColumnNames());
- columnNames.add(ContentIndex.ID); // add the UUID, which we'll put into the Location ...
+
+ // Create the set of field names that we need to load from the document ...
+ final Set<String> fieldNames = new HashSet<String>(this.columns.getColumnNames());
+ fieldNames.add(ContentIndex.LOCATION_ID_PROPERTIES); // add the UUID, which we'll put into the Location ...
+ fieldNames.add(ContentIndex.PATH); // add the UUID, which we'll put into the Location ...
this.fieldSelector = new FieldSelector() {
private static final long serialVersionUID = 1L;
public FieldSelectorResult accept( String fieldName ) {
- return columnNames.contains(fieldName) ? FieldSelectorResult.LOAD : FieldSelectorResult.NO_LOAD;
+ return fieldNames.contains(fieldName) ? FieldSelectorResult.LOAD : FieldSelectorResult.NO_LOAD;
}
};
}
@@ -1526,43 +1231,9 @@
*/
@Override
public LinkedList<Object[]> getTuples() {
- resolveLocations();
return tuples;
}
- protected void resolveLocations() {
- if (resolvedLocations) return;
- try {
- // The Location field in the tuples all contain the ID of the document, so we need to replace these
- // with the appropriate Location objects, using the content from the PathIndex ...
- IndexReader pathReader = session.getPathsReader();
- IndexSearcher pathSearcher = session.getPathsSearcher();
- for (Object[] tuple : tuples) {
- String id = (String)tuple[locationIndex];
- assert id != null;
- Location location = getLocationForDocument(id, pathReader, pathSearcher);
- if (location == null) continue;
- tuple[locationIndex] = location;
- }
- resolvedLocations = true;
- } catch (IOException e) {
- throw new LuceneException(e);
- }
- }
-
- protected Location getLocationForDocument( String id,
- IndexReader pathReader,
- IndexSearcher pathSearcher ) throws IOException {
- // Find the path for this node (is there a better way to do this than one search per ID?) ...
- TopDocs pathDocs = pathSearcher.search(new TermQuery(new Term(PathIndex.ID, id)), 1);
- if (pathDocs.scoreDocs.length < 1) {
- // No path record found ...
- return null;
- }
- Document pathDoc = pathReader.document(pathDocs.scoreDocs[0].doc);
- return session.readLocation(pathDoc);
- }
-
/**
* {@inheritDoc}
*
@@ -1626,10 +1297,88 @@
tuple[scoreIndex] = scorer.score();
}
- // Load the document ID (which is a stringified UUID) into the Location slot,
- // which will be replaced later with a real Location ...
- tuple[locationIndex] = document.get(ContentIndex.ID);
+ // Read the location ...
+ tuple[locationIndex] = session.readLocation(document);
tuples.add(tuple);
}
}
+
+ /**
+ * This collector is responsible for loading the value for each of the columns into each tuple array.
+ */
+ protected static class FullTextSearchTupleCollector extends TupleCollector {
+ private final List<Object[]> tuples;
+ private final FieldSelector fieldSelector;
+ private final LuceneSearchSession session;
+ private Scorer scorer;
+ private IndexReader currentReader;
+ private int docOffset;
+
+ protected FullTextSearchTupleCollector( LuceneSearchSession session,
+ List<Object[]> tuples ) {
+ assert session != null;
+ assert tuples != null;
+ this.session = session;
+ this.tuples = tuples;
+ this.fieldSelector = LOCATION_FIELDS_SELECTOR;
+ }
+
+ /**
+ * @return tuples
+ */
+ @Override
+ public List<Object[]> getTuples() {
+ return tuples;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.Collector#acceptsDocsOutOfOrder()
+ */
+ @Override
+ public boolean acceptsDocsOutOfOrder() {
+ return true;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.Collector#setNextReader(org.apache.lucene.index.IndexReader, int)
+ */
+ @Override
+ public void setNextReader( IndexReader reader,
+ int docBase ) {
+ this.currentReader = reader;
+ this.docOffset = docBase;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.Collector#setScorer(org.apache.lucene.search.Scorer)
+ */
+ @Override
+ public void setScorer( Scorer scorer ) {
+ this.scorer = scorer;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.lucene.search.Collector#collect(int)
+ */
+ @Override
+ public void collect( int doc ) throws IOException {
+ int docId = doc + docOffset;
+ Object[] tuple = new Object[2];
+ Document document = currentReader.document(docId, fieldSelector);
+ // Read the Location ...
+ tuple[0] = session.readLocation(document);
+ // And read the score ...
+ tuple[1] = scorer.score();
+ // And add the tuple ...
+ tuples.add(tuple);
+ }
+ }
}
Modified: trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchWorkspace.java
===================================================================
--- trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchWorkspace.java 2009-12-31 16:13:57 UTC (rev 1506)
+++ trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchWorkspace.java 2009-12-31 18:53:06 UTC (rev 1507)
@@ -45,8 +45,7 @@
*/
protected static final int CHANGES_BEFORE_OPTIMIZATION = 1;
- protected static final String PATHS_INDEX_NAME = "paths";
- protected static final String CONTENT_INDEX_NAME = "content";
+ protected static final String INDEX_NAME = "content";
/**
* Given the name of a property field of the form "<namespace>:<local>" (where <namespace> can be zero-length), this
@@ -57,24 +56,16 @@
protected static final String FULL_TEXT_PREFIX = ":ft:";
/**
- * This index stores only these fields, so we can use the most obvious names and not worry about clashes.
- */
- static class PathIndex {
- public static final String PATH = "pth";
- public static final String NODE_NAME = "nam";
- public static final String LOCAL_NAME = "loc";
- public static final String SNS_INDEX = "sns";
- public static final String LOCATION_ID_PROPERTIES = "idp";
- public static final String ID = ContentIndex.ID;
- public static final String DEPTH = "dep";
- }
-
- /**
- * This index stores these two fields <i>plus</i> all properties. Therefore, we have to worry about name clashes, which is why
+ * This index stores these fields <i>plus</i> all properties. Therefore, we have to worry about name clashes, which is why
* these field names are prefixed with '::', which is something that does appear in property names as they are serialized.
*/
static class ContentIndex {
- public static final String ID = "::id";
+ public static final String PATH = "::pth";
+ public static final String NODE_NAME = "::nam";
+ public static final String LOCAL_NAME = "::loc";
+ public static final String SNS_INDEX = "::sns";
+ public static final String LOCATION_ID_PROPERTIES = "::idp";
+ public static final String DEPTH = "::dep";
public static final String FULL_TEXT = "::fts";
}
@@ -82,7 +73,6 @@
private final String workspaceDirectoryName;
protected final IndexRules rules;
private final LuceneConfiguration configuration;
- protected final Directory pathDirectory;
protected final Directory contentDirectory;
protected final Analyzer analyzer;
private final Lock changesLock = new ReentrantLock();
@@ -99,8 +89,7 @@
this.analyzer = analyzer != null ? analyzer : new StandardAnalyzer(Version.LUCENE_30);
this.rules = rules != null ? rules : LuceneSearchEngine.DEFAULT_RULES;
this.configuration = configuration;
- this.pathDirectory = this.configuration.getDirectory(workspaceDirectoryName, PATHS_INDEX_NAME);
- this.contentDirectory = this.configuration.getDirectory(workspaceDirectoryName, CONTENT_INDEX_NAME);
+ this.contentDirectory = this.configuration.getDirectory(workspaceDirectoryName, INDEX_NAME);
}
/**
@@ -118,8 +107,7 @@
* @see org.jboss.dna.graph.search.SearchEngineWorkspace#destroy(org.jboss.dna.graph.ExecutionContext)
*/
public void destroy( ExecutionContext context ) {
- configuration.destroyDirectory(workspaceDirectoryName, PATHS_INDEX_NAME);
- configuration.destroyDirectory(workspaceDirectoryName, CONTENT_INDEX_NAME);
+ configuration.destroyDirectory(workspaceDirectoryName, INDEX_NAME);
}
/**
Deleted: trunk/extensions/dna-search-lucene/src/test/java/org/jboss/dna/search/lucene/EncodingNamespaceRegistryTest.java
===================================================================
--- trunk/extensions/dna-search-lucene/src/test/java/org/jboss/dna/search/lucene/EncodingNamespaceRegistryTest.java 2009-12-31 16:13:57 UTC (rev 1506)
+++ trunk/extensions/dna-search-lucene/src/test/java/org/jboss/dna/search/lucene/EncodingNamespaceRegistryTest.java 2009-12-31 18:53:06 UTC (rev 1507)
@@ -1,102 +0,0 @@
-/*
- * JBoss DNA (http://www.jboss.org/dna)
- * See the COPYRIGHT.txt file distributed with this work for information
- * regarding copyright ownership. Some portions may be licensed
- * to Red Hat, Inc. under one or more contributor license agreements.
- * See the AUTHORS.txt file in the distribution for a full listing of
- * individual contributors.
- *
- * JBoss DNA is free software. Unless otherwise indicated, all code in JBoss DNA
- * is licensed to you under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2.1 of
- * the License, or (at your option) any later version.
- *
- * JBoss DNA is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with this software; if not, write to the Free
- * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
- * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
- */
-package org.jboss.dna.search.lucene;
-
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertThat;
-import java.util.Collection;
-import org.jboss.dna.common.text.SecureHashTextEncoder;
-import org.jboss.dna.common.text.TextEncoder;
-import org.jboss.dna.common.util.SecureHash.Algorithm;
-import org.jboss.dna.graph.ExecutionContext;
-import org.jboss.dna.graph.property.NamespaceRegistry;
-import org.jboss.dna.graph.property.Path;
-import org.jboss.dna.graph.property.NamespaceRegistry.Namespace;
-import org.junit.Before;
-import org.junit.Test;
-
-/**
- *
- */
-public class EncodingNamespaceRegistryTest {
-
- private ExecutionContext context;
- private NamespaceRegistry registry;
- private EncodingNamespaceRegistry encodedRegistry;
- private TextEncoder encoder;
- private ExecutionContext encodedContext;
-
- @Before
- public void beforeEach() {
- this.context = new ExecutionContext();
- this.registry = this.context.getNamespaceRegistry();
- this.encoder = new SecureHashTextEncoder(Algorithm.SHA_1, 10);
- this.encodedRegistry = new EncodingNamespaceRegistry(registry, encoder);
- this.encodedContext = context.with(encodedRegistry);
- }
-
- @Test
- public void shouldHaveEncodedPrefixesForAllRegisteredNamespacesExceptFixedOnes() {
- Collection<Namespace> namespaces = registry.getNamespaces();
- assertThat(namespaces.size() > 4, is(true));
- for (Namespace namespace : namespaces) {
- String uri = namespace.getNamespaceUri();
- String actualEncodedPrefix = encodedRegistry.getPrefixForNamespaceUri(uri, false);
- if (encodedRegistry.getFixedNamespaceUris().contains(uri)) {
- assertThat(actualEncodedPrefix, is(namespace.getPrefix()));
- } else {
- String expectedEncodedPrefix = encoder.encode(uri);
- assertThat(expectedEncodedPrefix, is(actualEncodedPrefix));
- }
- String actualUri = encodedRegistry.getNamespaceForPrefix(actualEncodedPrefix);
- assertThat(uri, is(actualUri));
- }
- }
-
- @Test
- public void shouldAllowPathConversionToAndFromString() {
- String uri1 = "http://acme.com/wabbler";
- String uri2 = "http://troublemakers.com/contixity";
- String uri3 = "http://example.com/infinitiy";
- String ns1 = "wab";
- String ns2 = "ctx";
- String ns3 = "inf";
- registry.register(ns1, uri1);
- registry.register(ns2, uri2);
- registry.register(ns3, uri3);
- String pathStr = "/wab:part1/wab:part2/ctx:part3/inf:part4/dna:part5";
- Path actualPath = context.getValueFactories().getPathFactory().create(pathStr);
- String actualPathStr = context.getValueFactories().getStringFactory().create(actualPath);
- assertThat(pathStr, is(actualPathStr));
- String encodedPathStr = encodedContext.getValueFactories().getStringFactory().create(actualPath);
- String encodedPrefix1 = encoder.encode(uri1);
- String encodedPrefix2 = encoder.encode(uri2);
- String encodedPrefix3 = encoder.encode(uri3);
- String expectedPathStr = "/" + encodedPrefix1 + ":part1/" + encodedPrefix1 + ":part2/" + encodedPrefix2 + ":part3/"
- + encodedPrefix3 + ":part4/dna:part5";
- assertThat(expectedPathStr, is(encodedPathStr));
- Path actualPath2 = encodedContext.getValueFactories().getPathFactory().create(encodedPathStr);
- assertThat(actualPath, is(actualPath2));
- }
-}
14 years, 3 months
DNA SVN: r1506 - trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/model/simple.
by dna-commits@lists.jboss.org
Author: bcarothers
Date: 2009-12-31 11:13:57 -0500 (Thu, 31 Dec 2009)
New Revision: 1506
Modified:
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/model/simple/SimpleRequestProcessor.java
Log:
Added assert
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/model/simple/SimpleRequestProcessor.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/model/simple/SimpleRequestProcessor.java 2009-12-31 16:07:24 UTC (rev 1505)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/model/simple/SimpleRequestProcessor.java 2009-12-31 16:13:57 UTC (rev 1506)
@@ -11,7 +11,9 @@
import org.jboss.dna.graph.connector.map.MapNode;
import org.jboss.dna.graph.connector.map.MapRequestProcessor;
import org.jboss.dna.graph.observe.Observer;
+import org.jboss.dna.graph.property.Path;
import org.jboss.dna.graph.property.PathFactory;
+import org.jboss.dna.graph.property.PathNotFoundException;
import org.jboss.dna.graph.request.CloneWorkspaceRequest;
import org.jboss.dna.graph.request.CreateWorkspaceRequest;
import org.jboss.dna.graph.request.InvalidRequestException;
@@ -59,61 +61,69 @@
int maximumDepth = request.maximumDepth();
List<MapNode> branch = workspace.getBranch(request.at(), maximumDepth);
- if (!branch.isEmpty()) {
- Map<UUID, LocationWithDepth> locations = new HashMap<UUID, LocationWithDepth>(branch.size());
+ if (branch.isEmpty()) {
+ Path lowest = null;
- /*
- * Add the first (root) node to the request
- */
- MapNode root = branch.get(0);
- Location rootLocation = getActualLocation(request.at(), root);
- request.setActualLocationOfNode(rootLocation);
- locations.put(root.getUuid(), new LocationWithDepth(rootLocation, 0));
+ if (request.at().hasPath()) {
+ lowest = workspace.getLowestExistingPath(request.at().getPath());
+ }
+ request.setError(new PathNotFoundException(request.at(), lowest));
+ return;
+ }
- /*
- * The obvious thing to do here would be to call root.getChildren(), but that would
- * result in the JPA implementation running an extra query to load the collection of
- * children for the entity even though we've already loaded all of the children
- * with the call to workspace.getBranch(...) earlier.
- *
- * We'll build the list of children ourselves knowing that all children are in the result set.
- *
- * The concrete type is used in the variable declaration instead of the relevant interface
- * (Multimap<UUID, Location>) because we need to cast the result of a .get(UUID) operation
- * to a List<Location> below and the interface only guarantees a Collection<Location>.
- */
- LinkedListMultimap<UUID, Location> childrenByParentUuid = LinkedListMultimap.create();
+ Map<UUID, LocationWithDepth> locations = new HashMap<UUID, LocationWithDepth>(branch.size());
- /*
- * We don't want to process the root node (the first node) in this loop
- * as this would cause us to unnecessarily load the root node's parent node.
- */
- for (int i = 1; i < branch.size(); i++) {
- MapNode node = branch.get(i);
- UUID parentUuid = node.getParent().getUuid();
+ /*
+ * Add the first (root) node to the request
+ */
+ MapNode root = branch.get(0);
+ Location rootLocation = getActualLocation(request.at(), root);
+ request.setActualLocationOfNode(rootLocation);
+ locations.put(root.getUuid(), new LocationWithDepth(rootLocation, 0));
- LocationWithDepth parentLocation = locations.get(parentUuid);
- Location nodeLocation = locationFor(parentLocation.getLocation(), node);
- locations.put(node.getUuid(), new LocationWithDepth(nodeLocation, parentLocation.getDepth() + 1));
+ /*
+ * The obvious thing to do here would be to call root.getChildren(), but that would
+ * result in the JPA implementation running an extra query to load the collection of
+ * children for the entity even though we've already loaded all of the children
+ * with the call to workspace.getBranch(...) earlier.
+ *
+ * We'll build the list of children ourselves knowing that all children are in the result set.
+ *
+ * The concrete type is used in the variable declaration instead of the relevant interface
+ * (Multimap<UUID, Location>) because we need to cast the result of a .get(UUID) operation
+ * to a List<Location> below and the interface only guarantees a Collection<Location>.
+ */
+ LinkedListMultimap<UUID, Location> childrenByParentUuid = LinkedListMultimap.create();
- childrenByParentUuid.put(parentUuid, locationFor(locations.get(parentUuid).getLocation(), node));
- }
+ /*
+ * We don't want to process the root node (the first node) in this loop
+ * as this would cause us to unnecessarily load the root node's parent node.
+ */
+ for (int i = 1; i < branch.size(); i++) {
+ MapNode node = branch.get(i);
+ UUID parentUuid = node.getParent().getUuid();
- request.setChildren(rootLocation, childrenByParentUuid.get(root.getUuid()));
- request.setProperties(rootLocation, root.getProperties().values());
+ LocationWithDepth parentLocation = locations.get(parentUuid);
+ Location nodeLocation = locationFor(parentLocation.getLocation(), node);
+ locations.put(node.getUuid(), new LocationWithDepth(nodeLocation, parentLocation.getDepth() + 1));
- /*
- * Process the subsequent nodes
- */
- for (int i = 1; i < branch.size(); i++) {
- MapNode node = branch.get(i);
+ childrenByParentUuid.put(parentUuid, locationFor(locations.get(parentUuid).getLocation(), node));
+ }
- UUID nodeUuid = node.getUuid();
- LocationWithDepth nodeLocation = locations.get(nodeUuid);
- if (nodeLocation.getDepth() < maximumDepth) {
- request.setChildren(nodeLocation.getLocation(), childrenByParentUuid.get(nodeUuid));
- request.setProperties(nodeLocation.getLocation(), node.getProperties().values());
- }
+ request.setChildren(rootLocation, childrenByParentUuid.get(root.getUuid()));
+ request.setProperties(rootLocation, root.getProperties().values());
+
+ /*
+ * Process the subsequent nodes
+ */
+ for (int i = 1; i < branch.size(); i++) {
+ MapNode node = branch.get(i);
+
+ UUID nodeUuid = node.getUuid();
+ LocationWithDepth nodeLocation = locations.get(nodeUuid);
+ if (nodeLocation.getDepth() < maximumDepth) {
+ request.setChildren(nodeLocation.getLocation(), childrenByParentUuid.get(nodeUuid));
+ request.setProperties(nodeLocation.getLocation(), node.getProperties().values());
}
}
14 years, 3 months
DNA SVN: r1505 - in trunk: dna-graph/src/main/java/org/jboss/dna/graph/query/parse and 14 other directories.
by dna-commits@lists.jboss.org
Author: rhauch
Date: 2009-12-31 11:07:24 -0500 (Thu, 31 Dec 2009)
New Revision: 1505
Added:
trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java
trunk/dna-jcr/src/test/resources/cars.cnd
trunk/dna-jcr/src/test/resources/io/
trunk/dna-jcr/src/test/resources/io/cars-system-view.xml
trunk/docs/examples/gettingstarted/repositories/src/test/resources/log4j.properties
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/optimize/ReplaceViews.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/parse/SqlQueryParser.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/plan/CanonicalPlanner.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/plan/PlanUtil.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/process/SelectComponent.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/validate/Validator.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/search/SearchEngineIndexer.java
trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/optimize/RuleBasedOptimizerTest.java
trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/plan/CanonicalPlannerTest.java
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrContentHandler.java
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrPropertyDefinition.java
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrQueryManager.java
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrRepository.java
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrSession.java
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/NodeTypeSchemata.java
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/RepositoryQueryManager.java
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathQueryParser.java
trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslator.java
trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java
trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslatorTest.java
trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchProcessor.java
trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchSession.java
Log:
DNA-468 Further fixes and refinements for search and query support through the JCR API. We still have issues with certain XPath queries, and not all the XPath-related TCK tests are passing, but those will be addressed shortly.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/optimize/ReplaceViews.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/optimize/ReplaceViews.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/optimize/ReplaceViews.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -102,6 +102,7 @@
// Resolve the node to find the definition in the schemata ...
SelectorName tableName = sourceNode.getProperty(Property.SOURCE_NAME, SelectorName.class);
+ SelectorName tableAlias = sourceNode.getProperty(Property.SOURCE_ALIAS, SelectorName.class);
Table table = schemata.getTable(tableName);
if (table instanceof View) {
View view = (View)table;
@@ -120,6 +121,11 @@
// tables/views used by the view ...
PlanUtil.ColumnMapping viewMappings = PlanUtil.createMappingFor(view, viewPlan);
PlanUtil.replaceViewReferences(context, viewPlan, viewMappings);
+ if (tableAlias != null) {
+ // We also need to replace references to the alias for the view ...
+ PlanUtil.ColumnMapping aliasMappings = PlanUtil.createMappingForAliased(tableAlias, view, viewPlan);
+ PlanUtil.replaceViewReferences(context, viewPlan, aliasMappings);
+ }
if (viewPlan.is(Type.PROJECT)) {
// The PROJECT from the plan may actually not be needed if there is another PROJECT above it ...
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/parse/SqlQueryParser.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/parse/SqlQueryParser.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/parse/SqlQueryParser.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -493,6 +493,11 @@
throw new ParsingException(pos, msg);
}
}
+ } else {
+ // We expected SELECT ...
+ Position pos = tokens.nextPosition();
+ String msg = GraphI18n.unexpectedToken.text(tokens.consume(), pos.getLine(), pos.getColumn());
+ throw new ParsingException(pos, msg);
}
return command;
}
@@ -804,7 +809,7 @@
return new FullTextSearchParser().parse(expression);
} catch (ParsingException e) {
// Convert the position in the exception into a position in the query.
- Position queryPos = startOfExpression.add(e.getPosition());
+ Position queryPos = startOfExpression.add(e.getPosition());
throw new ParsingException(queryPos, e.getMessage());
}
}
@@ -1110,7 +1115,7 @@
}
// Otherwise the source should be a single named selector
if (source instanceof Selector) {
- selectorName = ((Selector)source).getName();
+ selectorName = ((Selector)source).getAliasOrName();
return new PropertyValue(selectorName, firstWord);
}
String msg = GraphI18n.mustBeScopedAtLineAndColumn.text(firstWord, pos.getLine(), pos.getColumn());
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/plan/CanonicalPlanner.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/plan/CanonicalPlanner.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/plan/CanonicalPlanner.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -391,14 +391,16 @@
if (columns.isEmpty()) {
columns = new LinkedList<Column>();
// SELECT *, so find all of the columns that are available from all the sources ...
- for (Table table : selectors.values()) {
+ for (Map.Entry<SelectorName, Table> entry : selectors.entrySet()) {
+ SelectorName tableName = entry.getKey();
+ Table table = entry.getValue();
// Add the selector that is being used ...
- projectNode.addSelector(table.getName());
+ projectNode.addSelector(tableName);
// Compute the columns from this selector ...
for (Schemata.Column column : table.getColumns()) {
String columnName = column.getName();
String propertyName = columnName;
- columns.add(new Column(table.getName(), propertyName, columnName));
+ columns.add(new Column(tableName, propertyName, columnName));
}
}
} else {
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/plan/PlanUtil.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/plan/PlanUtil.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/plan/PlanUtil.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -827,6 +827,30 @@
return mapping;
}
+ public static ColumnMapping createMappingForAliased( SelectorName viewAlias,
+ View view,
+ PlanNode viewPlan ) {
+ ColumnMapping mapping = new ColumnMapping(viewAlias);
+
+ // Find the PROJECT node in the view plan ...
+ PlanNode project = viewPlan.findAtOrBelow(Type.PROJECT);
+ assert project != null;
+
+ // Get the Columns from the PROJECT in the plan node ...
+ List<Column> projectedColumns = project.getPropertyAsList(Property.PROJECT_COLUMNS, Column.class);
+
+ // Get the Schemata columns defined by the view ...
+ List<org.jboss.dna.graph.query.validate.Schemata.Column> viewColumns = view.getColumns();
+ assert viewColumns.size() == projectedColumns.size();
+
+ for (int i = 0; i != viewColumns.size(); ++i) {
+ Column projectedColunn = projectedColumns.get(i);
+ String viewColumnName = viewColumns.get(i).getName();
+ mapping.map(viewColumnName, projectedColunn);
+ }
+ return mapping;
+ }
+
/**
* Defines how the view columns are mapped (or resolved) into the columns from the source tables.
*/
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/process/SelectComponent.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/process/SelectComponent.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/process/SelectComponent.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -26,6 +26,7 @@
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
@@ -44,6 +45,7 @@
import org.jboss.dna.graph.query.model.Or;
import org.jboss.dna.graph.query.model.PropertyExistence;
import org.jboss.dna.graph.query.model.SameNode;
+import org.jboss.dna.graph.query.model.SetCriteria;
import org.jboss.dna.graph.query.model.StaticOperand;
import org.jboss.dna.graph.query.model.TypeSystem;
import org.jboss.dna.graph.query.model.TypeSystem.TypeFactory;
@@ -234,7 +236,6 @@
* in perhaps an non-ideal manner
* @return the constraint evaluator; never null
*/
- @SuppressWarnings( "unchecked" )
protected ConstraintChecker createChecker( final TypeSystem types,
Schemata schemata,
Columns columns,
@@ -396,82 +397,122 @@
Comparison comparison = (Comparison)constraint;
// Create the correct dynamic operation ...
- final DynamicOperation dynamicOperation = createDynamicOperation(types, schemata, columns, comparison.getOperand1());
- final String expectedType = dynamicOperation.getExpectedType();
-
- // Determine the literal value ...
+ DynamicOperation dynamicOperation = createDynamicOperation(types, schemata, columns, comparison.getOperand1());
+ Operator operator = comparison.getOperator();
StaticOperand staticOperand = comparison.getOperand2();
- Object literalValue = null;
- if (staticOperand instanceof BindVariableName) {
- BindVariableName bindVariable = (BindVariableName)staticOperand;
- String variableName = bindVariable.getVariableName();
- literalValue = variables.get(variableName); // may be null
- } else {
- Literal literal = (Literal)staticOperand;
- literalValue = literal.getValue();
+ return createChecker(types, schemata, columns, dynamicOperation, operator, staticOperand);
+ }
+ if (constraint instanceof SetCriteria) {
+ SetCriteria setCriteria = (SetCriteria)constraint;
+ DynamicOperation dynamicOperation = createDynamicOperation(types, schemata, columns, setCriteria.getLeftOperand());
+ Operator operator = Operator.EQUAL_TO;
+ final List<ConstraintChecker> checkers = new LinkedList<ConstraintChecker>();
+ for (StaticOperand setValue : setCriteria.getRightOperands()) {
+ ConstraintChecker rightChecker = createChecker(types, schemata, columns, dynamicOperation, operator, setValue);
+ assert rightChecker != null;
+ checkers.add(rightChecker);
}
- // Create the correct comparator ...
- final TypeFactory<?> typeFactory = types.getTypeFactory(expectedType);
- assert typeFactory != null;
- final Comparator<Object> comparator = (Comparator<Object>)typeFactory.getComparator();
- assert comparator != null;
- // Create the correct operation ...
- final TypeFactory<?> literalFactory = types.getTypeFactory(expectedType);
- final Object rhs = literalFactory.create(literalValue);
- switch (comparison.getOperator()) {
- case EQUAL_TO:
- return new ConstraintChecker() {
- public boolean satisfiesConstraints( Object[] tuples ) {
- return comparator.compare(dynamicOperation.evaluate(tuples), rhs) == 0;
- }
- };
- case GREATER_THAN:
- return new ConstraintChecker() {
- public boolean satisfiesConstraints( Object[] tuples ) {
- return comparator.compare(dynamicOperation.evaluate(tuples), rhs) > 0;
- }
- };
- case GREATER_THAN_OR_EQUAL_TO:
- return new ConstraintChecker() {
- public boolean satisfiesConstraints( Object[] tuples ) {
- return comparator.compare(dynamicOperation.evaluate(tuples), rhs) >= 0;
- }
- };
- case LESS_THAN:
- return new ConstraintChecker() {
- public boolean satisfiesConstraints( Object[] tuples ) {
- return comparator.compare(dynamicOperation.evaluate(tuples), rhs) < 0;
- }
- };
- case LESS_THAN_OR_EQUAL_TO:
- return new ConstraintChecker() {
- public boolean satisfiesConstraints( Object[] tuples ) {
- return comparator.compare(dynamicOperation.evaluate(tuples), rhs) <= 0;
- }
- };
- case NOT_EQUAL_TO:
- return new ConstraintChecker() {
- public boolean satisfiesConstraints( Object[] tuples ) {
- return comparator.compare(dynamicOperation.evaluate(tuples), rhs) != 0;
- }
- };
- case LIKE:
- // Convert the LIKE expression to a regular expression
- final Pattern pattern = createRegexFromLikeExpression(types.asString(rhs));
- return new ConstraintChecker() {
- public boolean satisfiesConstraints( Object[] tuples ) {
- Object tupleValue = dynamicOperation.evaluate(tuples);
- if (tupleValue == null) return false;
- String value = types.asString(tupleValue);
- return pattern.matcher(value).matches();
- }
- };
+ if (checkers.isEmpty()) {
+ // Nothing will satisfy these constraints ...
+ return new ConstraintChecker() {
+ public boolean satisfiesConstraints( Object[] tuple ) {
+ return false;
+ }
+ };
}
+ return new ConstraintChecker() {
+ public boolean satisfiesConstraints( Object[] tuple ) {
+ for (ConstraintChecker checker : checkers) {
+ if (checker.satisfiesConstraints(tuple)) return true;
+ }
+ return false;
+ }
+ };
}
assert false;
return null;
}
+ @SuppressWarnings( "unchecked" )
+ protected ConstraintChecker createChecker( final TypeSystem types,
+ Schemata schemata,
+ Columns columns,
+ final DynamicOperation dynamicOperation,
+ Operator operator,
+ StaticOperand staticOperand ) {
+ final String expectedType = dynamicOperation.getExpectedType();
+
+ // Determine the literal value ...
+ Object literalValue = null;
+ if (staticOperand instanceof BindVariableName) {
+ BindVariableName bindVariable = (BindVariableName)staticOperand;
+ String variableName = bindVariable.getVariableName();
+ literalValue = variables.get(variableName); // may be null
+ } else {
+ Literal literal = (Literal)staticOperand;
+ literalValue = literal.getValue();
+ }
+ // Create the correct comparator ...
+ final TypeFactory<?> typeFactory = types.getTypeFactory(expectedType);
+ assert typeFactory != null;
+ final Comparator<Object> comparator = (Comparator<Object>)typeFactory.getComparator();
+ assert comparator != null;
+ // Create the correct operation ...
+ final TypeFactory<?> literalFactory = types.getTypeFactory(expectedType);
+ final Object rhs = literalFactory.create(literalValue);
+ switch (operator) {
+ case EQUAL_TO:
+ return new ConstraintChecker() {
+ public boolean satisfiesConstraints( Object[] tuples ) {
+ return comparator.compare(dynamicOperation.evaluate(tuples), rhs) == 0;
+ }
+ };
+ case GREATER_THAN:
+ return new ConstraintChecker() {
+ public boolean satisfiesConstraints( Object[] tuples ) {
+ return comparator.compare(dynamicOperation.evaluate(tuples), rhs) > 0;
+ }
+ };
+ case GREATER_THAN_OR_EQUAL_TO:
+ return new ConstraintChecker() {
+ public boolean satisfiesConstraints( Object[] tuples ) {
+ return comparator.compare(dynamicOperation.evaluate(tuples), rhs) >= 0;
+ }
+ };
+ case LESS_THAN:
+ return new ConstraintChecker() {
+ public boolean satisfiesConstraints( Object[] tuples ) {
+ return comparator.compare(dynamicOperation.evaluate(tuples), rhs) < 0;
+ }
+ };
+ case LESS_THAN_OR_EQUAL_TO:
+ return new ConstraintChecker() {
+ public boolean satisfiesConstraints( Object[] tuples ) {
+ return comparator.compare(dynamicOperation.evaluate(tuples), rhs) <= 0;
+ }
+ };
+ case NOT_EQUAL_TO:
+ return new ConstraintChecker() {
+ public boolean satisfiesConstraints( Object[] tuples ) {
+ return comparator.compare(dynamicOperation.evaluate(tuples), rhs) != 0;
+ }
+ };
+ case LIKE:
+ // Convert the LIKE expression to a regular expression
+ final Pattern pattern = createRegexFromLikeExpression(types.asString(rhs));
+ return new ConstraintChecker() {
+ public boolean satisfiesConstraints( Object[] tuples ) {
+ Object tupleValue = dynamicOperation.evaluate(tuples);
+ if (tupleValue == null) return false;
+ String value = types.asString(tupleValue);
+ return pattern.matcher(value).matches();
+ }
+ };
+ }
+ assert false;
+ return null;
+ }
+
protected static Pattern createRegexFromLikeExpression( String likeExpression ) {
return null;
}
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/validate/Validator.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/validate/Validator.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/query/validate/Validator.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -23,6 +23,7 @@
*/
package org.jboss.dna.graph.query.validate;
+import java.util.HashMap;
import java.util.Map;
import org.jboss.dna.common.collection.Problems;
import org.jboss.dna.graph.GraphI18n;
@@ -59,6 +60,7 @@
private final QueryContext context;
private final Problems problems;
+ private final Map<SelectorName, Table> selectorsByNameOrAlias;
private final Map<SelectorName, Table> selectorsByName;
/**
@@ -69,7 +71,11 @@
Map<SelectorName, Table> selectorsByName ) {
this.context = context;
this.problems = this.context.getProblems();
- this.selectorsByName = selectorsByName;
+ this.selectorsByNameOrAlias = selectorsByName;
+ this.selectorsByName = new HashMap<SelectorName, Table>();
+ for (Table table : selectorsByName.values()) {
+ this.selectorsByName.put(table.getName(), table);
+ }
}
/**
@@ -301,8 +307,17 @@
verify(obj.getSelector2Name());
}
+ protected Table tableWithNameOrAlias( SelectorName tableName ) {
+ Table table = selectorsByNameOrAlias.get(tableName);
+ if (table == null) {
+ // Try looking up the table by it's real name (if an alias were used) ...
+ table = selectorsByName.get(tableName);
+ }
+ return table;
+ }
+
protected Table verify( SelectorName selectorName ) {
- Table table = selectorsByName.get(selectorName);
+ Table table = tableWithNameOrAlias(selectorName);
if (table == null) {
problems.addError(GraphI18n.tableDoesNotExist, selectorName.getName());
}
@@ -310,7 +325,7 @@
}
protected Table verifyTable( SelectorName tableName ) {
- Table table = selectorsByName.get(tableName);
+ Table table = tableWithNameOrAlias(tableName);
if (table == null) {
problems.addError(GraphI18n.tableDoesNotExist, tableName.getName());
}
@@ -319,7 +334,7 @@
protected Schemata.Column verify( SelectorName selectorName,
String propertyName ) {
- Table table = selectorsByName.get(selectorName);
+ Table table = tableWithNameOrAlias(selectorName);
if (table == null) {
problems.addError(GraphI18n.tableDoesNotExist, selectorName.getName());
return null;
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/search/SearchEngineIndexer.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/search/SearchEngineIndexer.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/search/SearchEngineIndexer.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -35,8 +35,10 @@
import org.jboss.dna.common.util.CheckArg;
import org.jboss.dna.common.util.Logger;
import org.jboss.dna.common.util.NamedThreadFactory;
+import org.jboss.dna.graph.DnaLexicon;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.GraphI18n;
+import org.jboss.dna.graph.JcrLexicon;
import org.jboss.dna.graph.Location;
import org.jboss.dna.graph.connector.RepositoryConnectionFactory;
import org.jboss.dna.graph.connector.RepositorySourceException;
@@ -286,6 +288,11 @@
Location topNode = locationIter.next();
assert topNode.equals(startingLocation);
Map<Name, Property> properties = readSubgraph.getPropertiesFor(topNode);
+ if (startingLocation.getPath().isRoot()) {
+ // The properties of the root node generally don't include the primary type, but we need to add it here ...
+ Property rootPrimaryType = context.getPropertyFactory().create(JcrLexicon.PRIMARY_TYPE, DnaLexicon.ROOT);
+ properties.put(JcrLexicon.PRIMARY_TYPE, rootPrimaryType);
+ }
UpdatePropertiesRequest request = new UpdatePropertiesRequest(topNode, workspaceName, properties, true);
request.setActualLocationOfNode(topNode);
process(request);
Modified: trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/optimize/RuleBasedOptimizerTest.java
===================================================================
--- trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/optimize/RuleBasedOptimizerTest.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/optimize/RuleBasedOptimizerTest.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -184,6 +184,79 @@
}
@Test
+ public void shouldOptimizePlanForSimpleQueryWithSelectStarWithAlias() {
+ node = optimize("SELECT * FROM t1 AS x1");
+ // Create the expected plan ...
+ PlanNode expected = new PlanNode(Type.ACCESS, selector("x1"));
+ PlanNode project = new PlanNode(Type.PROJECT, expected, selector("x1"));
+ project.setProperty(Property.PROJECT_COLUMNS, columns(column("x1", "c11"), column("x1", "c12"), column("x1", "c13")));
+ PlanNode source = new PlanNode(Type.SOURCE, project, selector("x1"));
+ source.setProperty(Property.SOURCE_NAME, selector("t1"));
+ source.setProperty(Property.SOURCE_ALIAS, selector("x1"));
+ source.setProperty(Property.SOURCE_COLUMNS, context.getSchemata().getTable(selector("t1")).getColumns());
+ // Compare the expected and actual plan ...
+ assertThat(node.isSameAs(expected), is(true));
+ }
+
+ @Test
+ public void shouldOptimizePlanForSimpleQueryWithSelectStarFromTableWithAliasAndValueCriteria() {
+ node = optimize("SELECT * FROM t1 AS x1 WHERE c13 < CAST('3' AS LONG)");
+ // Create the expected plan ...
+ PlanNode expected = new PlanNode(Type.ACCESS, selector("x1"));
+ PlanNode project = new PlanNode(Type.PROJECT, expected, selector("x1"));
+ project.setProperty(Property.PROJECT_COLUMNS, columns(column("x1", "c11"), column("x1", "c12"), column("x1", "c13")));
+ PlanNode select = new PlanNode(Type.SELECT, project, selector("x1"));
+ select.setProperty(Property.SELECT_CRITERIA, new Comparison(new PropertyValue(selector("x1"), "c13"), Operator.LESS_THAN,
+ new Literal(3L)));
+ PlanNode source = new PlanNode(Type.SOURCE, select, selector("x1"));
+ source.setProperty(Property.SOURCE_NAME, selector("t1"));
+ source.setProperty(Property.SOURCE_ALIAS, selector("x1"));
+ source.setProperty(Property.SOURCE_COLUMNS, context.getSchemata().getTable(selector("t1")).getColumns());
+ // Compare the expected and actual plan ...
+ assertThat(node.isSameAs(expected), is(true));
+ }
+
+ @Test
+ public void shouldOptimizePlanForSimpleQueryWithSelectStarFromViewWithNoAliasAndValueCriteria() {
+ node = optimize("SELECT * FROM v1 WHERE c11 = 'value'");
+ // Create the expected plan ...
+ PlanNode expected = new PlanNode(Type.ACCESS, selector("t1"));
+ PlanNode project = new PlanNode(Type.PROJECT, expected, selector("t1"));
+ project.setProperty(Property.PROJECT_COLUMNS, columns(column("t1", "c11"), column("t1", "c12", "c2")));
+ PlanNode select1 = new PlanNode(Type.SELECT, project, selector("t1"));
+ select1.setProperty(Property.SELECT_CRITERIA, new Comparison(new PropertyValue(selector("t1"), "c11"), Operator.EQUAL_TO,
+ new Literal("value")));
+ PlanNode select2 = new PlanNode(Type.SELECT, select1, selector("t1"));
+ select2.setProperty(Property.SELECT_CRITERIA, new Comparison(new PropertyValue(selector("t1"), "c13"),
+ Operator.LESS_THAN, new Literal(3L)));
+ PlanNode source = new PlanNode(Type.SOURCE, select2, selector("t1"));
+ source.setProperty(Property.SOURCE_NAME, selector("t1"));
+ source.setProperty(Property.SOURCE_COLUMNS, context.getSchemata().getTable(selector("t1")).getColumns());
+ // Compare the expected and actual plan ...
+ assertThat(node.isSameAs(expected), is(true));
+ }
+
+ @Test
+ public void shouldOptimizePlanForSimpleQueryWithSelectStarFromViewWithAliasAndValueCriteria() {
+ node = optimize("SELECT * FROM v1 AS x1 WHERE c11 = 'value'");
+ // Create the expected plan ...
+ PlanNode expected = new PlanNode(Type.ACCESS, selector("t1"));
+ PlanNode project = new PlanNode(Type.PROJECT, expected, selector("t1"));
+ project.setProperty(Property.PROJECT_COLUMNS, columns(column("t1", "c11"), column("t1", "c12", "c2")));
+ PlanNode select1 = new PlanNode(Type.SELECT, project, selector("t1"));
+ select1.setProperty(Property.SELECT_CRITERIA, new Comparison(new PropertyValue(selector("t1"), "c11"), Operator.EQUAL_TO,
+ new Literal("value")));
+ PlanNode select2 = new PlanNode(Type.SELECT, select1, selector("t1"));
+ select2.setProperty(Property.SELECT_CRITERIA, new Comparison(new PropertyValue(selector("t1"), "c13"),
+ Operator.LESS_THAN, new Literal(3L)));
+ PlanNode source = new PlanNode(Type.SOURCE, select2, selector("t1"));
+ source.setProperty(Property.SOURCE_NAME, selector("t1"));
+ source.setProperty(Property.SOURCE_COLUMNS, context.getSchemata().getTable(selector("t1")).getColumns());
+ // Compare the expected and actual plan ...
+ assertThat(node.isSameAs(expected), is(true));
+ }
+
+ @Test
public void shouldOptimizePlanForSimpleQueryWithPropertyValueCriteria() {
node = optimize("SELECT c11, c12 FROM t1 WHERE c13 < CAST('3' AS LONG)");
// Create the expected plan ...
Modified: trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/plan/CanonicalPlannerTest.java
===================================================================
--- trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/plan/CanonicalPlannerTest.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/plan/CanonicalPlannerTest.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -26,7 +26,9 @@
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
import java.util.Collection;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
import org.jboss.dna.common.collection.Problems;
import org.jboss.dna.common.collection.SimpleProblems;
import org.jboss.dna.graph.ExecutionContext;
@@ -73,6 +75,14 @@
return new SelectorName(name);
}
+ protected Set<SelectorName> selectors( String... names ) {
+ Set<SelectorName> selectors = new HashSet<SelectorName>();
+ for (String name : names) {
+ selectors.add(selector(name));
+ }
+ return selectors;
+ }
+
@SuppressWarnings( "unchecked" )
protected void assertProjectNode( PlanNode node,
String... columnNames ) {
@@ -161,6 +171,41 @@
}
@Test
+ public void shouldProducePlanWhenSelectingColumnsFromTableWithoutAlias() {
+ schemata = schemataBuilder.addTable("someTable", "column1", "column2", "column3").build();
+ query = builder.select("column1", "column2").from("someTable").where().path("someTable").isEqualTo(1L).end().query();
+ queryContext = new QueryContext(schemata, typeSystem, hints, problems);
+ plan = planner.createPlan(queryContext, query);
+ assertThat(problems.hasErrors(), is(false));
+ assertThat(plan.getType(), is(PlanNode.Type.PROJECT));
+ assertThat(plan.getSelectors(), is(selectors("someTable")));
+ }
+
+ @Test
+ public void shouldProducePlanWhenSelectingColumnsFromTableWithAlias() {
+ schemata = schemataBuilder.addTable("dna:someTable", "column1", "column2", "column3").build();
+ query = builder.select("column1", "column2").from("dna:someTable AS t1").where().path("t1").isEqualTo(1L).end().query();
+ queryContext = new QueryContext(schemata, typeSystem, hints, problems);
+ plan = planner.createPlan(queryContext, query);
+ assertThat(problems.hasErrors(), is(false));
+ System.out.println(plan);
+ assertThat(plan.getType(), is(PlanNode.Type.PROJECT));
+ assertThat(plan.getSelectors(), is(selectors("t1")));
+ }
+
+ @Test
+ public void shouldProducePlanWhenSelectingAllColumnsFromTableWithAlias() {
+ schemata = schemataBuilder.addTable("dna:someTable", "column1", "column2", "column3").build();
+ query = builder.selectStar().from("dna:someTable AS t1").where().path("t1").isEqualTo(1L).end().query();
+ queryContext = new QueryContext(schemata, typeSystem, hints, problems);
+ plan = planner.createPlan(queryContext, query);
+ assertThat(problems.hasErrors(), is(false));
+ System.out.println(plan);
+ assertThat(plan.getType(), is(PlanNode.Type.PROJECT));
+ assertThat(plan.getSelectors(), is(selectors("t1")));
+ }
+
+ @Test
public void shouldProduceErrorWhenFullTextSearchingTableWithNoSearchableColumns() {
schemata = schemataBuilder.addTable("someTable", "column1", "column2", "column3").build();
// Make sure the query without the search criteria does not have an error
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrContentHandler.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrContentHandler.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrContentHandler.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -291,7 +291,7 @@
private String currentPropName;
private int currentPropType;
- private StringBuffer valueBuffer;
+ private StringBuilder valueBuffer;
private final Map<String, List<Value>> currentProps;
/**
@@ -303,7 +303,6 @@
this.parentStack.push(currentNode);
this.currentProps = new HashMap<String, List<Value>>();
- this.valueBuffer = new StringBuffer();
this.svNameName = JcrSvLexicon.NAME.getString(namespaces());
this.svTypeName = JcrSvLexicon.TYPE.getString(namespaces());
@@ -320,6 +319,8 @@
String localName,
String name,
Attributes atts ) throws SAXException {
+ // Always create a new string buffer for the content value, because we're starting a new element ...
+ valueBuffer = new StringBuilder();
if ("node".equals(localName)) {
if (currentNodeName != null) {
addNodeIfPending();
@@ -346,7 +347,7 @@
if (rawUuid != null) {
assert rawUuid.size() == 1;
uuid = UUID.fromString(rawUuid.get(0).getString());
-
+
try {
// Deal with any existing node ...
AbstractJcrNode existingNodeWithUuid = cache().findJcrNode(Location.create(uuid));
@@ -363,20 +364,23 @@
throw new ConstraintViolationException(
JcrI18n.cannotRemoveParentNodeOfTarget.text(existingNodeWithUuid.getPath(),
uuid,
- parentStack.firstElement().getPath()));
+ parentStack.firstElement()
+ .getPath()));
}
existingNodeWithUuid.remove();
break;
case ImportUUIDBehavior.IMPORT_UUID_COLLISION_THROW:
throw new ItemExistsException(
JcrI18n.itemAlreadyExistsWithUuid.text(uuid,
- cache().session().workspace().getName(),
+ cache().session()
+ .workspace()
+ .getName(),
existingNodeWithUuid.getPath()));
}
} catch (ItemNotFoundException e) {
// there wasn't an existing item, so just continue
}
-
+
}
String typeName = currentProps.get(primaryTypeName).get(0).getString();
@@ -439,7 +443,6 @@
} catch (RepositoryException re) {
throw new EnclosingSAXException(re);
}
- valueBuffer = new StringBuffer();
}
}
@@ -453,7 +456,6 @@
int start,
int length ) {
valueBuffer.append(ch, start, length);
-
}
}
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrPropertyDefinition.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrPropertyDefinition.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrPropertyDefinition.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -31,6 +31,7 @@
import javax.jcr.Value;
import javax.jcr.nodetype.PropertyDefinition;
import net.jcip.annotations.Immutable;
+import org.jboss.dna.graph.DnaIntLexicon;
import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.Location;
import org.jboss.dna.graph.property.Binary;
@@ -56,6 +57,7 @@
private final String[] valueConstraints;
private final boolean multiple;
private final boolean fullTextSearchable;
+ private final boolean isPrivate;
private PropertyDefinitionId id;
private ConstraintChecker checker = null;
@@ -77,6 +79,7 @@
this.valueConstraints = valueConstraints;
this.multiple = multiple;
this.fullTextSearchable = fullTextSearchable;
+ this.isPrivate = name.getNamespaceUri().equals(DnaIntLexicon.Namespace.URI);
}
/**
@@ -128,6 +131,15 @@
return multiple;
}
+ /**
+ * Return whether this property definition is considered private.
+ *
+ * @return true if the definition is private, or false otherwise
+ */
+ public boolean isPrivate() {
+ return isPrivate;
+ }
+
public boolean isFullTextSearchable() {
return fullTextSearchable;
}
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrQueryManager.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrQueryManager.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrQueryManager.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -114,6 +114,10 @@
try {
// Parsing must be done now ...
QueryCommand command = parser.parseQuery(expression, typeSystem);
+ if (command == null) {
+ // The query is not well-formed and cannot be parsed ...
+ throw new InvalidQueryException(JcrI18n.queryCannotBeParsedUsingLanguage.text(language, expression));
+ }
PlanHints hints = new PlanHints();
// If using XPath, we need to add a few hints ...
if (Query.XPATH.equals(language)) {
@@ -420,6 +424,16 @@
final List<Object[]> tuples = results.getTuples();
return new QueryResultRowIterator(session, results.getColumns(), tuples.iterator(), numRows);
}
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return results.toString();
+ }
}
/**
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrRepository.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrRepository.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrRepository.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -321,6 +321,22 @@
*/
protected static final Map<Option, String> DEFAULT_OPTIONS;
+ public static final class QueryLanguage {
+ /**
+ * The standard JCR 1.0 XPath query language.
+ */
+ public static final String XPATH = Query.XPATH;
+ /**
+ * The SQL dialect that is based upon an enhanced version of the JCR 2.0 SQL query language.
+ */
+ public static final String SQL = SqlQueryParser.LANGUAGE;
+ /**
+ * The full-text search language defined as part of the abstract query model, in Section 6.7.19 of the JCR 2.0
+ * specification.
+ */
+ public static final String SEARCH = FullTextSearchParser.LANGUAGE;
+ }
+
static {
// Initialize the unmodifiable map of default options ...
EnumMap<Option, String> defaults = new EnumMap<Option, String>(Option.class);
@@ -932,7 +948,7 @@
if (this.federatedSource != null) {
this.federatedSource.close();
}
-
+
this.repositoryObservationManager.shutdown();
}
@@ -1192,7 +1208,7 @@
/**
* @param repositoryObservable the repository library observable this observer should register with
*/
- protected RepositoryObservationManager(Observable repositoryObservable) {
+ protected RepositoryObservationManager( Observable repositoryObservable ) {
this.repositoryObservable = repositoryObservable;
this.repositoryObservable.register(this);
}
@@ -1249,7 +1265,7 @@
this.observerService.shutdown();
}
}
-
+
/**
* {@inheritDoc}
*
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrSession.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrSession.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/JcrSession.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -65,6 +65,7 @@
import org.jboss.dna.graph.property.Path;
import org.jboss.dna.graph.property.PathFactory;
import org.jboss.dna.graph.property.ValueFactories;
+import org.jboss.dna.graph.request.InvalidWorkspaceException;
import org.jboss.dna.graph.session.GraphSession;
import org.jboss.dna.jcr.JcrContentHandler.EnclosingSAXException;
import org.jboss.dna.jcr.JcrContentHandler.SaveMode;
@@ -880,6 +881,29 @@
}
/**
+ * Crawl and index the content in this workspace.
+ *
+ * @throws IllegalArgumentException if the workspace is null
+ * @throws InvalidWorkspaceException if there is no workspace with the supplied name
+ */
+ public void reindexContent() {
+ repository().queryManager().reindexContent(workspace());
+ }
+
+ /**
+ * Crawl and index the content starting at the supplied path in this workspace, to the designated depth.
+ *
+ * @param path the path of the content to be indexed
+ * @param depth the depth of the content to be indexed
+ * @throws IllegalArgumentException if the workspace or path are null, or if the depth is less than 1
+ * @throws InvalidWorkspaceException if there is no workspace with the supplied name
+ */
+ public void reindexContent( String path,
+ int depth ) {
+ repository().queryManager().reindexContent(workspace(), path, depth);
+ }
+
+ /**
* Get a snapshot of the current session state. This snapshot is immutable and will not reflect any future state changes in
* the session.
*
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/NodeTypeSchemata.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/NodeTypeSchemata.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/NodeTypeSchemata.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -139,8 +139,10 @@
Set<String> fullTextSearchableNames = new HashSet<String>();
for (JcrPropertyDefinition defn : propertyDefinitions) {
if (defn.isResidual()) continue;
+ if (defn.isPrivate()) continue;
// if (defn.isMultiple()) continue;
Name name = defn.getInternalName();
+
String columnName = name.getString(registry);
if (first) {
builder.addTable(tableName, columnName);
@@ -229,12 +231,18 @@
for (JcrPropertyDefinition defn : defns) {
if (defn.isResidual()) continue;
if (defn.isMultiple()) continue;
+ if (defn.isPrivate()) continue;
Name name = defn.getInternalName();
+
String columnName = name.getString(registry);
if (first) first = false;
else viewDefinition.append(',');
viewDefinition.append('[').append(columnName).append(']');
}
+ if (first) {
+ // All the properties were skipped ...
+ return;
+ }
viewDefinition.append(" FROM ").append(AllNodes.ALL_NODES_NAME);
// The 'nt:base' node type will have every single object in it, so we don't need to add the type criteria ...
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/RepositoryQueryManager.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/RepositoryQueryManager.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/RepositoryQueryManager.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -43,6 +43,7 @@
import org.jboss.dna.graph.observe.Changes;
import org.jboss.dna.graph.observe.Observable;
import org.jboss.dna.graph.observe.Observer;
+import org.jboss.dna.graph.property.Path;
import org.jboss.dna.graph.query.QueryContext;
import org.jboss.dna.graph.query.QueryEngine;
import org.jboss.dna.graph.query.QueryResults;
@@ -62,8 +63,10 @@
import org.jboss.dna.graph.query.process.SelectComponent.Analyzer;
import org.jboss.dna.graph.query.validate.Schemata;
import org.jboss.dna.graph.request.AccessQueryRequest;
+import org.jboss.dna.graph.request.InvalidWorkspaceException;
import org.jboss.dna.graph.request.processor.RequestProcessor;
import org.jboss.dna.graph.search.SearchEngine;
+import org.jboss.dna.graph.search.SearchEngineIndexer;
import org.jboss.dna.graph.search.SearchEngineProcessor;
import org.jboss.dna.search.lucene.IndexRules;
import org.jboss.dna.search.lucene.LuceneConfiguration;
@@ -98,6 +101,42 @@
return workspace.graph().search(searchExpression, maxRowCount, offset);
}
+ /**
+ * Crawl and index the content in the named workspace.
+ *
+ * @throws IllegalArgumentException if the workspace is null
+ * @throws InvalidWorkspaceException if there is no workspace with the supplied name
+ */
+ public void reindexContent() {
+ // do nothing by default
+ }
+
+ /**
+ * Crawl and index the content in the named workspace.
+ *
+ * @param workspace the workspace
+ * @throws IllegalArgumentException if the workspace is null
+ * @throws InvalidWorkspaceException if there is no workspace with the supplied name
+ */
+ public void reindexContent( JcrWorkspace workspace ) {
+ // do nothing by default
+ }
+
+ /**
+ * Crawl and index the content starting at the supplied path in the named workspace, to the designated depth.
+ *
+ * @param workspace the workspace
+ * @param path the path of the content to be indexed
+ * @param depth the depth of the content to be indexed
+ * @throws IllegalArgumentException if the workspace or path are null, or if the depth is less than 1
+ * @throws InvalidWorkspaceException if there is no workspace with the supplied name
+ */
+ public void reindexContent( JcrWorkspace workspace,
+ String path,
+ int depth ) {
+ // do nothing by default
+ }
+
static class Disabled extends RepositoryQueryManager {
/**
@@ -138,6 +177,7 @@
private final Observer searchObserver;
private final ExecutorService service;
private final QueryEngine queryEngine;
+ private final RepositoryConnectionFactory connectionFactory;
SelfContained( ExecutionContext context,
String nameOfSourceToBeSearchable,
@@ -147,6 +187,7 @@
boolean updateIndexesSynchronously ) throws RepositoryException {
this.context = context;
this.sourceName = nameOfSourceToBeSearchable;
+ this.connectionFactory = connectionFactory;
// Define the configuration ...
TextEncoder encoder = new UrlEncoder();
if (indexDirectory != null) {
@@ -273,6 +314,59 @@
}
}
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.jcr.RepositoryQueryManager#reindexContent()
+ */
+ @Override
+ public void reindexContent() {
+ // Index the existing content ...
+ Graph graph = Graph.create(sourceName, connectionFactory, context);
+ SearchEngineIndexer indexer = new SearchEngineIndexer(context, searchEngine, connectionFactory);
+ try {
+ for (String workspace : graph.getWorkspaces()) {
+ indexer.index(workspace);
+ }
+ } finally {
+ indexer.close();
+ }
+
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.jcr.RepositoryQueryManager#reindexContent(org.jboss.dna.jcr.JcrWorkspace)
+ */
+ @Override
+ public void reindexContent( JcrWorkspace workspace ) {
+ SearchEngineIndexer indexer = new SearchEngineIndexer(context, searchEngine, connectionFactory);
+ try {
+ indexer.index(workspace.getName());
+ } finally {
+ indexer.close();
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.jcr.RepositoryQueryManager#reindexContent(org.jboss.dna.jcr.JcrWorkspace, java.lang.String, int)
+ */
+ @Override
+ public void reindexContent( JcrWorkspace workspace,
+ String path,
+ int depth ) {
+ Path at = workspace.context().getValueFactories().getPathFactory().create(path);
+ SearchEngineIndexer indexer = new SearchEngineIndexer(context, searchEngine, connectionFactory);
+ try {
+ indexer.index(workspace.getName(), at, depth);
+ } finally {
+ indexer.close();
+ }
+ }
+
protected class GraphQueryContext extends QueryContext {
private final RequestProcessor processor;
private final String workspaceName;
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathQueryParser.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathQueryParser.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathQueryParser.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -82,8 +82,6 @@
public QueryCommand parseQuery( String query,
TypeSystem typeSystem ) throws InvalidQueryException, ParsingException {
Component xpath = new XPathParser(typeSystem).parseXPath(query);
- System.out.println(query);
- System.out.println(" --> " + xpath);
// Convert the result into a QueryCommand ...
QueryCommand command = new XPathToQueryTranslator(typeSystem, query).createQuery(xpath);
return command;
Modified: trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslator.java
===================================================================
--- trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslator.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-jcr/src/main/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslator.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -30,6 +30,7 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
+import org.jboss.dna.graph.ExecutionContext;
import org.jboss.dna.graph.property.PropertyType;
import org.jboss.dna.graph.query.QueryBuilder;
import org.jboss.dna.graph.query.QueryBuilder.ConstraintBuilder;
@@ -37,6 +38,7 @@
import org.jboss.dna.graph.query.model.QueryCommand;
import org.jboss.dna.graph.query.model.TypeSystem;
import org.jboss.dna.graph.query.parse.InvalidQueryException;
+import org.jboss.dna.jcr.JcrNtLexicon;
import org.jboss.dna.jcr.xpath.XPath.And;
import org.jboss.dna.jcr.xpath.XPath.AttributeNameTest;
import org.jboss.dna.jcr.xpath.XPath.AxisStep;
@@ -298,6 +300,14 @@
protected String translateSource( String tableName,
List<StepExpression> path,
ConstraintBuilder where ) {
+ if (path.size() == 0) {
+ // This is a query against the root node ...
+ ExecutionContext context = new ExecutionContext();
+ String alias = newAlias();
+ builder.from(JcrNtLexicon.BASE.getString(context.getNamespaceRegistry()) + " AS " + alias);
+ where.path(alias).isEqualTo("/");
+ return alias;
+ }
String alias = newAlias();
if (tableName != null) {
// This is after some element(...) steps, so we need to join ...
@@ -310,7 +320,7 @@
if (path.size() == 1 && path.get(0).collapse() instanceof NameTest) {
// Node immediately below root ...
NameTest nodeName = (NameTest)path.get(0).collapse();
- where.nodeName(alias).isEqualTo(nameFrom(nodeName)).and().depth(alias).isEqualTo(1);
+ where.path(alias).isEqualTo("/" + nameFrom(nodeName));
} else if (path.size() == 2 && path.get(0) instanceof DescendantOrSelf && path.get(1).collapse() instanceof NameTest) {
// Node anywhere ...
NameTest nodeName = (NameTest)path.get(1).collapse();
Added: trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java
===================================================================
--- trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java (rev 0)
+++ trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -0,0 +1,278 @@
+/*
+ * JBoss DNA (http://www.jboss.org/dna)
+ * See the COPYRIGHT.txt file distributed with this work for information
+ * regarding copyright ownership. Some portions may be licensed
+ * to Red Hat, Inc. under one or more contributor license agreements.
+ * See the AUTHORS.txt file in the distribution for a full listing of
+ * individual contributors.
+ *
+ * JBoss DNA is free software. Unless otherwise indicated, all code in JBoss DNA
+ * is licensed to you under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * JBoss DNA is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.jboss.dna.jcr;
+
+import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsNull.notNullValue;
+import static org.junit.Assert.assertThat;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import javax.jcr.ImportUUIDBehavior;
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.query.Query;
+import javax.jcr.query.QueryResult;
+import org.jboss.dna.graph.connector.inmemory.InMemoryRepositorySource;
+import org.jboss.dna.graph.property.Name;
+import org.jboss.dna.graph.property.Path.Segment;
+import org.jboss.dna.jcr.JcrRepository.Option;
+import org.jboss.dna.jcr.JcrRepository.QueryLanguage;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * This is a test suite that operates against a complete JcrRepository instance created and managed using the JcrEngine.
+ * Essentially this is an integration test, but it does test lower-level functionality of the implementation of the JCR interfaces
+ * related to querying. (It is simply more difficult to unit test these implementations because of the difficulty in mocking the
+ * many other components to replicate the same functionality.)
+ * <p>
+ * Also, because queries are read-only, the engine is set up once and used for the entire set of test methods.
+ * </p>
+ */
+public class JcrQueryManagerTest {
+
+ protected static URI resourceUri( String name ) throws URISyntaxException {
+ return resourceUrl(name).toURI();
+ }
+
+ protected static URL resourceUrl( String name ) {
+ return JcrQueryManagerTest.class.getClassLoader().getResource(name);
+ }
+
+ protected static InputStream resourceStream( String name ) {
+ return JcrQueryManagerTest.class.getClassLoader().getResourceAsStream(name);
+ }
+
+ private static JcrConfiguration configuration;
+ private static JcrEngine engine;
+ private static JcrRepository repository;
+ private Session session;
+
+ @BeforeClass
+ public static void beforeAll() throws Exception {
+ configuration = new JcrConfiguration();
+ configuration.repositorySource("car-source")
+ .usingClass(InMemoryRepositorySource.class)
+ .setDescription("The automobile content");
+ configuration.repository("cars")
+ .setSource("car-source")
+ .registerNamespace("car", "http://www.jboss.org/dna/examples/cars/1.0")
+ .addNodeTypes(resourceUrl("cars.cnd"))
+ .setOption(Option.ANONYMOUS_USER_ROLES,
+ JcrSession.DNA_READ_PERMISSION + "," + JcrSession.DNA_WRITE_PERMISSION);
+ engine = configuration.build();
+ engine.start();
+
+ // Start the repository ...
+ repository = engine.getRepository("cars");
+
+ // Use a session to load the contents ...
+ Session session = repository.login();
+ try {
+ InputStream stream = resourceStream("io/cars-system-view.xml");
+ try {
+ session.getWorkspace().importXML("/", stream, ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW);
+ } catch (Throwable t) {
+ t.printStackTrace();
+ } finally {
+ stream.close();
+ }
+ } finally {
+ session.logout();
+ }
+ }
+
+ @AfterClass
+ public static void afterAll() throws Exception {
+ engine.shutdown();
+ engine.awaitTermination(3, TimeUnit.SECONDS);
+ engine = null;
+ configuration = null;
+ }
+
+ @Before
+ public void beforeEach() throws Exception {
+ // Obtain a session using the anonymous login capability, which we granted READ privilege
+ session = repository.login();
+ }
+
+ @After
+ public void afterEach() throws Exception {
+ if (session != null) {
+ try {
+ session.logout();
+ } finally {
+ session = null;
+ }
+ }
+ }
+
+ protected Name name( String name ) {
+ return engine.getExecutionContext().getValueFactories().getNameFactory().create(name);
+ }
+
+ protected Segment segment( String segment ) {
+ return engine.getExecutionContext().getValueFactories().getPathFactory().createSegment(segment);
+ }
+
+ protected List<Segment> segments( String... segments ) {
+ List<Segment> result = new ArrayList<Segment>();
+ for (String segment : segments) {
+ result.add(segment(segment));
+ }
+ return result;
+ }
+
+ protected void assertResultsHaveColumns( QueryResult result,
+ String... columnNames ) throws RepositoryException {
+ Set<String> expectedNames = new HashSet<String>();
+ for (String name : columnNames) {
+ expectedNames.add(name);
+ }
+ Set<String> actualNames = new HashSet<String>();
+ for (String name : result.getColumnNames()) {
+ actualNames.add(name);
+ }
+ assertThat(actualNames, is(expectedNames));
+ }
+
+ @Test
+ public void shouldStartUp() {
+ assertThat(engine.getRepositoryService(), is(notNullValue()));
+ }
+
+ @Test
+ public void shouldHaveLoadedContent() throws RepositoryException {
+ Node node = session.getRootNode().getNode("Cars");
+ assertThat(node, is(notNullValue()));
+ assertThat(node.hasNode("Sports"), is(true));
+ assertThat(node.hasNode("Utility"), is(true));
+ assertThat(node.hasNode("Hybrid"), is(true));
+ System.out.println(node.getNode("Hybrid").getNodes().nextNode().getPath());
+ assertThat(node.hasNode("Hybrid/Toyota Prius"), is(true));
+ assertThat(node.getPrimaryNodeType().getName(), is("nt:unstructured"));
+ }
+
+ @Test
+ public void shouldReturnQueryManagerFromWorkspace() throws RepositoryException {
+ assertThat(session.getWorkspace().getQueryManager(), is(notNullValue()));
+ }
+
+ // ----------------------------------------------------------------------------------------------------------------
+ // JCR2-SQL Queries
+ // ----------------------------------------------------------------------------------------------------------------
+
+ @Test
+ public void shouldBeAbleToCreateAndExecuteSqlQuery() throws RepositoryException {
+ Query query = session.getWorkspace().getQueryManager().createQuery("SELECT * FROM [nt:base]", QueryLanguage.SQL);
+ assertThat(query, is(notNullValue()));
+ QueryResult result = query.execute();
+ assertThat(result, is(notNullValue()));
+ assertResultsHaveColumns(result, "jcr:primaryType");
+ System.out.println(result);
+ }
+
+ // ----------------------------------------------------------------------------------------------------------------
+ // XPath Queries
+ // ----------------------------------------------------------------------------------------------------------------
+
+ @Test
+ public void shouldBeAbleToCreateXPathQuery() throws RepositoryException {
+ Query query = session.getWorkspace().getQueryManager().createQuery("//element(*,nt:unstructured)", Query.XPATH);
+ assertThat(query, is(notNullValue()));
+ }
+
+ @Test
+ public void shouldBeAbleToExecuteXPathQueryToFindAllNodes() throws RepositoryException {
+ Query query = session.getWorkspace().getQueryManager().createQuery("//element(*,nt:base)", Query.XPATH);
+ assertThat(query, is(notNullValue()));
+ QueryResult result = query.execute();
+ assertThat(result, is(notNullValue()));
+ assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
+ }
+
+ @Test
+ public void shouldBeAbleToExecuteXPathQueryToFindAllUnstructuredNodes() throws RepositoryException {
+ Query query = session.getWorkspace().getQueryManager().createQuery("//element(*,nt:unstructured)", Query.XPATH);
+ assertThat(query, is(notNullValue()));
+ QueryResult result = query.execute();
+ assertThat(result, is(notNullValue()));
+ assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
+ }
+
+ @Test
+ public void shouldBeAbleToExecuteXPathQueryToFindAllCarNodes() throws RepositoryException {
+ Query query = session.getWorkspace().getQueryManager().createQuery("//element(*,car:Car)", Query.XPATH);
+ assertThat(query, is(notNullValue()));
+ QueryResult result = query.execute();
+ assertThat(result, is(notNullValue()));
+ System.out.println(result);
+ assertResultsHaveColumns(result,
+ "jcr:primaryType",
+ "jcr:path",
+ "jcr:score",
+ "car:mpgCity",
+ "car:userRating",
+ "car:mpgHighway",
+ "car:engine",
+ "car:model",
+ "car:year",
+ "car:maker",
+ "car:lengthInInches",
+ "car:valueRating",
+ "car:wheelbaseInInches",
+ "car:msrp");
+ }
+
+ @Test
+ public void shouldBeAbleToExecuteXPathQueryToFindRootNode() throws RepositoryException {
+ Query query = session.getWorkspace().getQueryManager().createQuery("/jcr:root", Query.XPATH);
+ assertThat(query, is(notNullValue()));
+ QueryResult result = query.execute();
+ assertThat(result, is(notNullValue()));
+ System.out.println(result);
+ assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
+ }
+
+ // @Test
+ // public void shouldBeAbleToExecuteXPathQueryToFindChildOfRootNode() throws RepositoryException {
+ // Query query = session.getWorkspace().getQueryManager().createQuery("/jcr:root/Cars", Query.XPATH);
+ // assertThat(query, is(notNullValue()));
+ // QueryResult result = query.execute();
+ // assertThat(result, is(notNullValue()));
+ // System.out.println(result);
+ // assertResultsHaveColumns(result, "jcr:primaryType", "jcr:path", "jcr:score");
+ // }
+}
Property changes on: trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrQueryManagerTest.java
___________________________________________________________________
Name: svn:keywords
+ Id Revision
Name: svn:eol-style
+ LF
Modified: trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java
===================================================================
--- trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/JcrTckTest.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -186,7 +186,7 @@
addTestSuite(org.apache.jackrabbit.test.api.query.GetPersistentQueryPathLevel1Test.class);
addTestSuite(org.apache.jackrabbit.test.api.query.GetStatementTest.class);
addTestSuite(org.apache.jackrabbit.test.api.query.GetSupportedQueryLanguagesTest.class);
- // addTestSuite(org.apache.jackrabbit.test.api.query.GetPropertyNamesTest.class);
+ addTestSuite(org.apache.jackrabbit.test.api.query.GetPropertyNamesTest.class);
addTestSuite(org.apache.jackrabbit.test.api.query.PredicatesTest.class);
// addTestSuite(org.apache.jackrabbit.test.api.query.SimpleSelectionTest.class);
Modified: trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslatorTest.java
===================================================================
--- trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslatorTest.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/dna-jcr/src/test/java/org/jboss/dna/jcr/xpath/XPathToQueryTranslatorTest.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -65,7 +65,6 @@
@Test
public void shouldTranslateFromXPathOfAnyNode() {
- assertThat(xpath("/jcr:root"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1"));
assertThat(xpath("//element(*)"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1"));
assertThat(xpath("/jcr:root//element(*)"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1"));
assertThat(xpath("//*"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1"));
@@ -75,9 +74,13 @@
}
@Test
+ public void shouldTranslateFromXPathContainingExplicitRootPath() {
+ assertThat(xpath("/jcr:root"), isSql("SELECT * FROM [nt:base] AS nodeSet1 WHERE PATH(nodeSet1) = '/'"));
+ }
+
+ @Test
public void shouldTranslateFromXPathContainingExplicitPath() {
- assertThat(xpath("/jcr:root/a"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE NAME(nodeSet1) = 'a' AND DEPTH(nodeSet1) = CAST(1 AS LONG)"));
+ assertThat(xpath("/jcr:root/a"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a'"));
assertThat(xpath("/jcr:root/a/b"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b'"));
assertThat(xpath("/jcr:root/a/b/c"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b/c'"));
assertThat(xpath("/jcr:root/a/b/c/d"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/a/b/c/d'"));
@@ -225,10 +228,9 @@
isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName' AND DEPTH(nodeSet1) = CAST(1 AS LONG)"));
assertThat(xpath("/jcr:root/nodeName"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName' AND DEPTH(nodeSet1) = CAST(1 AS LONG)"));
+ isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/nodeName'"));
- assertThat(xpath("nodeName"),
- isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE NAME(nodeSet1) = 'nodeName' AND DEPTH(nodeSet1) = CAST(1 AS LONG)"));
+ assertThat(xpath("nodeName"), isSql("SELECT * FROM __ALLNODES__ AS nodeSet1 WHERE PATH(nodeSet1) = '/nodeName'"));
}
@Test
Added: trunk/dna-jcr/src/test/resources/cars.cnd
===================================================================
--- trunk/dna-jcr/src/test/resources/cars.cnd (rev 0)
+++ trunk/dna-jcr/src/test/resources/cars.cnd 2009-12-31 16:07:24 UTC (rev 1505)
@@ -0,0 +1,50 @@
+/*
+ * JBoss DNA (http://www.jboss.org/dna)
+ * See the COPYRIGHT.txt file distributed with this work for information
+ * regarding copyright ownership. Some portions may be licensed
+ * to Red Hat, Inc. under one or more contributor license agreements.
+ * See the AUTHORS.txt file in the distribution for a full listing of
+ * individual contributors.
+ *
+ * JBoss DNA is free software. Unless otherwise indicated, all code in JBoss DNA
+ * is licensed to you under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * JBoss DNA is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+
+//------------------------------------------------------------------------------
+// N A M E S P A C E S
+//------------------------------------------------------------------------------
+<jcr='http://www.jcp.org/jcr/1.0'>
+<nt='http://www.jcp.org/jcr/nt/1.0'>
+<mix='http://www.jcp.org/jcr/mix/1.0'>
+<car='http://www.jboss.org/dna/examples/cars/1.0'>
+
+//------------------------------------------------------------------------------
+// N O D E T Y P E S
+//------------------------------------------------------------------------------
+
+[car:Car] > nt:unstructured
+ - car:maker (string)
+ - car:model (string)
+ - car:year (string) < '(19|20)\d{2}' // any 4 digit number starting with '19' or '20'
+ - car:msrp (string) < '[$]\d{1,3}[,]?\d{3}([.]\d{2})?' // of the form "$X,XXX.ZZ", "$XX,XXX.ZZ" or "$XXX,XXX.ZZ"
+ // where '.ZZ' is optional
+ - car:userRating (long) < '[1,5]' // any value from 1 to 5 (inclusive)
+ - car:valueRating (long) < '[1,5]' // any value from 1 to 5 (inclusive)
+ - car:mpgCity (long) < '(0,]' // any value greater than 0
+ - car:mpgHighway (long) < '(0,]' // any value greater than 0
+ - car:lengthInInches (double) < '(0,]' // any value greater than 0
+ - car:wheelbaseInInches (double) < '(0,]' // any value greater than 0
+ - car:engine (string)
+
Added: trunk/dna-jcr/src/test/resources/io/cars-system-view.xml
===================================================================
--- trunk/dna-jcr/src/test/resources/io/cars-system-view.xml (rev 0)
+++ trunk/dna-jcr/src/test/resources/io/cars-system-view.xml 2009-12-31 16:07:24 UTC (rev 1505)
@@ -0,0 +1,147 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<sv:node xmlns:jcr="http://www.jcp.org/jcr/1.0"
+ xmlns:nt="http://www.jcp.org/jcr/nt/1.0"
+ xmlns:sv="http://www.jcp.org/jcr/sv/1.0"
+ xmlns:car="http://www.jboss.org/dna/examples/cars/1.0"
+ sv:name="Cars">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>nt:unstructured</sv:value></sv:property>
+ <sv:node sv:name="Hybrid">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>nt:unstructured</sv:value></sv:property>
+ <sv:node sv:name="Toyota Prius">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Toyota</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>Prius</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:msrp" sv:type="String"><sv:value>$21,500</sv:value></sv:property>
+ <sv:property sv:name="car:userRating" sv:type="Long"><sv:value>4</sv:value></sv:property>
+ <sv:property sv:name="car:valueRating" sv:type="Long"><sv:value>5</sv:value></sv:property>
+ <sv:property sv:name="car:mpgCity" sv:type="Long"><sv:value>48</sv:value></sv:property>
+ <sv:property sv:name="car:mpgHighway" sv:type="Long"><sv:value>45</sv:value></sv:property>
+ </sv:node>
+ <sv:node sv:name="Toyota Highlander">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Toyota</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>Highlander</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:msrp" sv:type="String"><sv:value>$34,200</sv:value></sv:property>
+ <sv:property sv:name="car:userRating" sv:type="Long"><sv:value>4</sv:value></sv:property>
+ <sv:property sv:name="car:valueRating" sv:type="Long"><sv:value>5</sv:value></sv:property>
+ <sv:property sv:name="car:mpgCity" sv:type="Long"><sv:value>27</sv:value></sv:property>
+ <sv:property sv:name="car:mpgHighway" sv:type="Long"><sv:value>25</sv:value></sv:property>
+ </sv:node>
+ <sv:node sv:name="Nissan Altima">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Nissan</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>Altima</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:msrp" sv:type="String"><sv:value>$18,260</sv:value></sv:property>
+ <sv:property sv:name="car:mpgCity" sv:type="Long"><sv:value>23</sv:value></sv:property>
+ <sv:property sv:name="car:mpgHighway" sv:type="Long"><sv:value>32</sv:value></sv:property>
+ </sv:node>
+ </sv:node>
+ <sv:node sv:name="Sports">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>nt:unstructured</sv:value></sv:property>
+ <sv:node sv:name="Aston Martin DB9">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Aston Martin</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>DB9</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:msrp" sv:type="String"><sv:value>$171,600</sv:value></sv:property>
+ <sv:property sv:name="car:userRating" sv:type="Long"><sv:value>5</sv:value></sv:property>
+ <sv:property sv:name="car:mpgCity" sv:type="Long"><sv:value>12</sv:value></sv:property>
+ <sv:property sv:name="car:mpgHighway" sv:type="Long"><sv:value>19</sv:value></sv:property>
+ <sv:property sv:name="car:lengthInInches" sv:type="Double"><sv:value>185.5</sv:value></sv:property>
+ <sv:property sv:name="car:wheelbaseInInches" sv:type="Double"><sv:value>108.0</sv:value></sv:property>
+ <sv:property sv:name="car:engine" sv:type="String"><sv:value>5,935 cc 5.9 liters V 12</sv:value></sv:property>
+ </sv:node>
+ <sv:node sv:name="Infiniti G37">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Infiniti</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>G37</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:msrp" sv:type="String"><sv:value>$34,900</sv:value></sv:property>
+ <sv:property sv:name="car:userRating" sv:type="Long"><sv:value>3</sv:value></sv:property>
+ <sv:property sv:name="car:valueRating" sv:type="Long"><sv:value>4</sv:value></sv:property>
+ <sv:property sv:name="car:mpgCity" sv:type="Long"><sv:value>18</sv:value></sv:property>
+ <sv:property sv:name="car:mpgHighway" sv:type="Long"><sv:value>24</sv:value></sv:property>
+ </sv:node>
+ </sv:node>
+ <sv:node sv:name="Luxury">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>nt:unstructured</sv:value></sv:property>
+ <sv:node sv:name="Cadillac DTS">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Cadillac</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>DTS</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:userRating" sv:type="Long"><sv:value>1</sv:value></sv:property>
+ <sv:property sv:name="car:engine" sv:type="String"><sv:value>3.6-liter V6</sv:value></sv:property>
+ </sv:node>
+ <sv:node sv:name="Bentley Continental">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Bentley</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>Continental</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:msrp" sv:type="String"><sv:value>$170,990</sv:value></sv:property>
+ <sv:property sv:name="car:mpgCity" sv:type="Long"><sv:value>10</sv:value></sv:property>
+ <sv:property sv:name="car:mpgHighway" sv:type="Long"><sv:value>17</sv:value></sv:property>
+ </sv:node>
+ <sv:node sv:name="Lexus IS350">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Lexus</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>IS350</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:msrp" sv:type="String"><sv:value>$36,305</sv:value></sv:property>
+ <sv:property sv:name="car:userRating" sv:type="Long"><sv:value>4</sv:value></sv:property>
+ <sv:property sv:name="car:valueRating" sv:type="Long"><sv:value>5</sv:value></sv:property>
+ <sv:property sv:name="car:mpgCity" sv:type="Long"><sv:value>18</sv:value></sv:property>
+ <sv:property sv:name="car:mpgHighway" sv:type="Long"><sv:value>25</sv:value></sv:property>
+ </sv:node>
+ </sv:node>
+ <sv:node sv:name="Utility">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>nt:unstructured</sv:value></sv:property>
+ <sv:node sv:name="Land Rover LR2">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Land Rover</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>LR2</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:msrp" sv:type="String"><sv:value>$33,985</sv:value></sv:property>
+ <sv:property sv:name="car:userRating" sv:type="Long"><sv:value>4</sv:value></sv:property>
+ <sv:property sv:name="car:valueRating" sv:type="Long"><sv:value>5</sv:value></sv:property>
+ <sv:property sv:name="car:mpgCity" sv:type="Long"><sv:value>16</sv:value></sv:property>
+ <sv:property sv:name="car:mpgHighway" sv:type="Long"><sv:value>23</sv:value></sv:property>
+ </sv:node>
+ <sv:node sv:name="Land Rover LR3">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Land Rover</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>LR3</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:msrp" sv:type="String"><sv:value>$48,525</sv:value></sv:property>
+ <sv:property sv:name="car:userRating" sv:type="Long"><sv:value>5</sv:value></sv:property>
+ <sv:property sv:name="car:valueRating" sv:type="Long"><sv:value>2</sv:value></sv:property>
+ <sv:property sv:name="car:mpgCity" sv:type="Long"><sv:value>12</sv:value></sv:property>
+ <sv:property sv:name="car:mpgHighway" sv:type="Long"><sv:value>17</sv:value></sv:property>
+ </sv:node>
+ <sv:node sv:name="Hummer H3">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Hummer</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>H3</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:msrp" sv:type="String"><sv:value>$30,595</sv:value></sv:property>
+ <sv:property sv:name="car:userRating" sv:type="Long"><sv:value>3</sv:value></sv:property>
+ <sv:property sv:name="car:valueRating" sv:type="Long"><sv:value>4</sv:value></sv:property>
+ <sv:property sv:name="car:mpgCity" sv:type="Long"><sv:value>13</sv:value></sv:property>
+ <sv:property sv:name="car:mpgHighway" sv:type="Long"><sv:value>16</sv:value></sv:property>
+ </sv:node>
+ <sv:node sv:name="Ford F-150">
+ <sv:property sv:name="jcr:primaryType" sv:type="Name"><sv:value>car:Car</sv:value></sv:property>
+ <sv:property sv:name="car:maker" sv:type="String"><sv:value>Ford</sv:value></sv:property>
+ <sv:property sv:name="car:model" sv:type="String"><sv:value>F-150</sv:value></sv:property>
+ <sv:property sv:name="car:year" sv:type="String"><sv:value>2008</sv:value></sv:property>
+ <sv:property sv:name="car:msrp" sv:type="String"><sv:value>$23,910</sv:value></sv:property>
+ <sv:property sv:name="car:userRating" sv:type="Long"><sv:value>5</sv:value></sv:property>
+ <sv:property sv:name="car:valueRating" sv:type="Long"><sv:value>1</sv:value></sv:property>
+ <sv:property sv:name="car:mpgCity" sv:type="Long"><sv:value>14</sv:value></sv:property>
+ <sv:property sv:name="car:mpgHighway" sv:type="Long"><sv:value>20</sv:value></sv:property>
+ </sv:node>
+ </sv:node>
+</sv:node>
Property changes on: trunk/dna-jcr/src/test/resources/io/cars-system-view.xml
___________________________________________________________________
Name: svn:keywords
+ Id Revision
Name: svn:eol-style
+ LF
Added: trunk/docs/examples/gettingstarted/repositories/src/test/resources/log4j.properties
===================================================================
--- trunk/docs/examples/gettingstarted/repositories/src/test/resources/log4j.properties (rev 0)
+++ trunk/docs/examples/gettingstarted/repositories/src/test/resources/log4j.properties 2009-12-31 16:07:24 UTC (rev 1505)
@@ -0,0 +1,11 @@
+# Direct log messages to stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %m%n
+
+# Root logger option
+log4j.rootLogger=WARNING, stdout
+
+# Set up the default logging to be INFO level, then override specific units
+log4j.logger.org.jboss.dna=WARNING
Modified: trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchProcessor.java
===================================================================
--- trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchProcessor.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchProcessor.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -45,7 +45,6 @@
import org.jboss.dna.graph.Location;
import org.jboss.dna.graph.observe.Observer;
import org.jboss.dna.graph.property.DateTime;
-import org.jboss.dna.graph.property.NamespaceRegistry;
import org.jboss.dna.graph.property.Path;
import org.jboss.dna.graph.property.Property;
import org.jboss.dna.graph.query.QueryResults.Columns;
@@ -81,9 +80,10 @@
protected static final TextEncoder NAMESPACE_ENCODER = new SecureHashTextEncoder(Algorithm.SHA_1, 10);
protected static ExecutionContext contextWithEncodedNamespaces( ExecutionContext context ) {
- NamespaceRegistry encodingRegistry = new EncodingNamespaceRegistry(context.getNamespaceRegistry(), NAMESPACE_ENCODER);
- ExecutionContext encodingContext = context.with(encodingRegistry);
- return encodingContext;
+ return context;
+ // NamespaceRegistry encodingRegistry = new EncodingNamespaceRegistry(context.getNamespaceRegistry(), NAMESPACE_ENCODER);
+ // ExecutionContext encodingContext = context.with(encodingRegistry);
+ // return encodingContext;
}
protected static final Columns FULL_TEXT_RESULT_COLUMNS = new FullTextSearchResultColumns();
Modified: trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchSession.java
===================================================================
--- trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchSession.java 2009-12-31 14:14:56 UTC (rev 1504)
+++ trunk/extensions/dna-search-lucene/src/main/java/org/jboss/dna/search/lucene/LuceneSearchSession.java 2009-12-31 16:07:24 UTC (rev 1505)
@@ -646,8 +646,14 @@
protected String getIdFor( Path path ) throws IOException {
// Create a query to find all the nodes below the parent path ...
IndexSearcher searcher = getPathsSearcher();
- String stringifiedPath = processor.pathAsString(path);
- TermQuery query = new TermQuery(new Term(PathIndex.PATH, stringifiedPath));
+ Query query = null;
+ if (path.isRoot()) {
+ // Look for the query
+ query = NumericRangeQuery.newIntRange(PathIndex.DEPTH, 0, 0, true, true);
+ } else {
+ String stringifiedPath = processor.pathAsString(path);
+ query = new TermQuery(new Term(PathIndex.PATH, stringifiedPath));
+ }
// Now execute and collect the UUIDs ...
TopDocs topDocs = searcher.search(query, 1);
14 years, 3 months
DNA SVN: r1504 - trunk/dna-repository/src/main/java/org/jboss/dna/repository.
by dna-commits@lists.jboss.org
Author: bcarothers
Date: 2009-12-31 09:14:56 -0500 (Thu, 31 Dec 2009)
New Revision: 1504
Modified:
trunk/dna-repository/src/main/java/org/jboss/dna/repository/DnaEngine.java
Log:
DNA-609 DnaEngine RepositoryConnectionFactory Returns Unpooled Connections
Applied path that removes DnaEngine's internal connection factory and replaces it with getRepositoryService().getRepositoryLibrary(). This will return connections that have been wrapped/pooled by a RepositoryConnectionPool.
Modified: trunk/dna-repository/src/main/java/org/jboss/dna/repository/DnaEngine.java
===================================================================
--- trunk/dna-repository/src/main/java/org/jboss/dna/repository/DnaEngine.java 2009-12-31 14:08:13 UTC (rev 1503)
+++ trunk/dna-repository/src/main/java/org/jboss/dna/repository/DnaEngine.java 2009-12-31 14:14:56 UTC (rev 1504)
@@ -46,7 +46,6 @@
import org.jboss.dna.graph.Location;
import org.jboss.dna.graph.Node;
import org.jboss.dna.graph.Subgraph;
-import org.jboss.dna.graph.connector.RepositoryConnection;
import org.jboss.dna.graph.connector.RepositoryConnectionFactory;
import org.jboss.dna.graph.connector.RepositoryContext;
import org.jboss.dna.graph.connector.RepositorySource;
@@ -84,8 +83,6 @@
private final ExecutorService executorService;
private final MimeTypeDetectors detectors;
- private final RepositoryConnectionFactory connectionFactory;
-
protected DnaEngine( ExecutionContext context,
DnaConfiguration.ConfigurationDefinition configuration ) {
this.problems = new SimpleProblems();
@@ -130,20 +127,9 @@
for (SequencerConfig sequencerConfig : scanner.getSequencingConfigurations()) {
sequencingService.addSequencer(sequencerConfig);
}
-
- // Set up the connection factory for this engine ...
- connectionFactory = new RepositoryConnectionFactory() {
- public RepositoryConnection createConnection( String sourceName ) throws RepositorySourceException {
- RepositorySource source = DnaEngine.this.getRepositorySource(sourceName);
- if (source == null) {
- throw new RepositorySourceException(sourceName);
- }
-
- return source.getConnection();
- }
- };
}
+
/**
* Get the problems that were encountered when setting up this engine from the configuration.
*
@@ -182,7 +168,7 @@
*/
public final RepositoryConnectionFactory getRepositoryConnectionFactory() {
checkRunning();
- return connectionFactory;
+ return repositoryService.getRepositoryLibrary();
}
/**
14 years, 3 months
DNA SVN: r1503 - in trunk: dna-graph/src/main/java/org/jboss/dna/graph/connector and 3 other directories.
by dna-commits@lists.jboss.org
Author: bcarothers
Date: 2009-12-31 09:08:13 -0500 (Thu, 31 Dec 2009)
New Revision: 1503
Modified:
trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java
trunk/dna-graph/src/main/java/org/jboss/dna/graph/connector/RepositoryConnectionPool.java
trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties
trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/optimize/RuleBasedOptimizerTest.java
trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/model/simple/SimpleJpaConnection.java
Log:
DNA-608 RepositoryConnectionPool.ConnectionWrapper.close Closes the Underlying Connection
A\
"
''['{\
]
.kvuio[u8-98ghn09nt changes the\
ConnectionWrapper close behavior to not attempt to close the underlying connection. This allowed a graph message to be retired and forced compensating changes in SimpleJpaConnector.
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java 2009-12-31 13:47:17 UTC (rev 1502)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/GraphI18n.java 2009-12-31 14:08:13 UTC (rev 1503)
@@ -33,7 +33,6 @@
*/
public final class GraphI18n {
- public static I18n closedConnectionMayNotBeUsed;
public static I18n errorConvertingIo;
public static I18n errorConvertingType;
public static I18n errorReadingPropertyValueBytes;
Modified: trunk/dna-graph/src/main/java/org/jboss/dna/graph/connector/RepositoryConnectionPool.java
===================================================================
--- trunk/dna-graph/src/main/java/org/jboss/dna/graph/connector/RepositoryConnectionPool.java 2009-12-31 13:47:17 UTC (rev 1502)
+++ trunk/dna-graph/src/main/java/org/jboss/dna/graph/connector/RepositoryConnectionPool.java 2009-12-31 14:08:13 UTC (rev 1503)
@@ -779,8 +779,10 @@
mainLock.lock();
// Remove the connection from the in-use set ...
boolean removed = this.inUseConnections.remove(wrapper);
- assert removed;
+ // This means that the wrapper was already closed at least once since the last time it was opened
+ if (!removed) return;
+
// If we're shutting down the pool, then just close the connection ...
if (this.runState != RUNNING) {
wrapperToClose = wrapper;
@@ -932,7 +934,6 @@
private final RepositoryConnection original;
private final long timeCreated;
private long lastUsed;
- private boolean closed = false;
protected ConnectionWrapper( RepositoryConnection connection ) {
assert connection != null;
@@ -972,7 +973,6 @@
* {@inheritDoc}
*/
public XAResource getXAResource() {
- if (closed) throw new IllegalStateException(GraphI18n.closedConnectionMayNotBeUsed.text());
return this.original.getXAResource();
}
@@ -980,7 +980,6 @@
* {@inheritDoc}
*/
public CachePolicy getDefaultCachePolicy() {
- if (closed) throw new IllegalStateException(GraphI18n.closedConnectionMayNotBeUsed.text());
return this.original.getDefaultCachePolicy();
}
@@ -992,7 +991,6 @@
*/
public void execute( ExecutionContext context,
Request request ) throws RepositorySourceException {
- if (closed) throw new IllegalStateException(GraphI18n.closedConnectionMayNotBeUsed.text());
this.original.execute(context, request);
}
@@ -1001,7 +999,6 @@
*/
public boolean ping( long time,
TimeUnit unit ) throws InterruptedException {
- if (closed) throw new IllegalStateException(GraphI18n.closedConnectionMayNotBeUsed.text());
return this.original.ping(time, unit);
}
@@ -1009,12 +1006,8 @@
* {@inheritDoc}
*/
public void close() {
- if (!closed) {
- this.lastUsed = System.currentTimeMillis();
- this.original.close();
- this.closed = true;
- returnConnection(this);
- }
+ this.lastUsed = System.currentTimeMillis();
+ returnConnection(this);
}
}
Modified: trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties
===================================================================
--- trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties 2009-12-31 13:47:17 UTC (rev 1502)
+++ trunk/dna-graph/src/main/resources/org/jboss/dna/graph/GraphI18n.properties 2009-12-31 14:08:13 UTC (rev 1503)
@@ -21,7 +21,6 @@
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA, or see the FSF site: http://www.fsf.org.
#
-closedConnectionMayNotBeUsed = The connection has been closed an may not be used
errorConvertingIo = Error converting {0} to a {1}
errorConvertingType = Error converting {0} to a {1}: {2}
errorReadingPropertyValueBytes = Error reading bytes
Modified: trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/optimize/RuleBasedOptimizerTest.java
===================================================================
--- trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/optimize/RuleBasedOptimizerTest.java 2009-12-31 13:47:17 UTC (rev 1502)
+++ trunk/dna-graph/src/test/java/org/jboss/dna/graph/query/optimize/RuleBasedOptimizerTest.java 2009-12-31 14:08:13 UTC (rev 1503)
@@ -68,7 +68,7 @@
private List<Integer> ruleExecutionOrder;
private QueryContext context;
private PlanNode node;
- private boolean print = false;
+ private final boolean print = false;
@Before
public void beforeEach() {
@@ -139,7 +139,7 @@
public PlanNode execute( QueryContext context,
PlanNode plan,
LinkedList<OptimizerRule> ruleStack ) {
- context.getProblems().addError(GraphI18n.closedConnectionMayNotBeUsed);
+ context.getProblems().addError(GraphI18n.errorReadingPropertyValueBytes);
return plan;
}
});
Modified: trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/model/simple/SimpleJpaConnection.java
===================================================================
--- trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/model/simple/SimpleJpaConnection.java 2009-12-31 13:47:17 UTC (rev 1502)
+++ trunk/extensions/dna-connector-store-jpa/src/main/java/org/jboss/dna/connector/store/jpa/model/simple/SimpleJpaConnection.java 2009-12-31 14:08:13 UTC (rev 1503)
@@ -45,24 +45,20 @@
@NotThreadSafe
public class SimpleJpaConnection implements RepositoryConnection {
- private final SimpleJpaRepository repository;
+ private SimpleJpaRepository repository;
private final JpaSource source;
private EntityManager entityManager;
public SimpleJpaConnection( JpaSource source ) {
this.source = source;
- this.entityManager = source.getEntityManagers().checkout();
- this.entityManager.getTransaction().begin();
- this.repository = new SimpleJpaRepository(source.getName(), source.getRootUuid(), source.getDefaultWorkspaceName(),
- source.getPredefinedWorkspaceNames(), entityManager,
- source.getRepositoryContext().getExecutionContext(), source.isCompressData(),
- source.isCreatingWorkspacesAllowed(), source.getLargeValueSizeInBytes());
}
public boolean ping( long time,
TimeUnit unit ) {
- return entityManager != null && entityManager.isOpen();
+ // Most pings will occur before or after an execute() call, when there is no entityManger
+ // If there is no entity manager, the connection is still valid!
+ return entityManager == null || entityManager.isOpen();
}
public CachePolicy getDefaultCachePolicy() {
@@ -77,7 +73,18 @@
return null;
}
- public void close() {
+ private void acquireRepository() {
+ this.entityManager = source.getEntityManagers().checkout();
+ this.entityManager.getTransaction().begin();
+ this.repository = new SimpleJpaRepository(source.getName(), source.getRootUuid(), source.getDefaultWorkspaceName(),
+ source.getPredefinedWorkspaceNames(), entityManager,
+ source.getRepositoryContext().getExecutionContext(), source.isCompressData(),
+ source.isCreatingWorkspacesAllowed(), source.getLargeValueSizeInBytes());
+
+ }
+
+ private void releaseRepository() {
+ this.repository = null;
if (entityManager != null) {
try {
source.getEntityManagers().checkin(entityManager);
@@ -85,8 +92,12 @@
entityManager = null;
}
}
+
}
+ public void close() {
+ }
+
/**
* {@inheritDoc}
*
@@ -101,6 +112,9 @@
sw = new Stopwatch();
sw.start();
}
+
+ acquireRepository();
+
// Do any commands update/write?
Observer observer = this.source.getRepositoryContext().getObserver();
RequestProcessor processor = new SimpleRequestProcessor(context, this.repository, observer, source.areUpdatesAllowed());
@@ -139,6 +153,9 @@
}
}
}
+
+ releaseRepository();
+
if (logger.isTraceEnabled()) {
assert sw != null;
sw.stop();
14 years, 3 months
DNA SVN: r1502 - trunk/extensions/dna-connector-svn/src/test/java/org/jboss/dna/connector/svn.
by dna-commits@lists.jboss.org
Author: spagop
Date: 2009-12-31 08:47:17 -0500 (Thu, 31 Dec 2009)
New Revision: 1502
Modified:
trunk/extensions/dna-connector-svn/src/test/java/org/jboss/dna/connector/svn/SVNRespositoryConnectorWriteableTest.java
Log:
OPEN - issue DNA-607: Delete node/folder or rather item/file from the svn repository through dna svn connector
https://jira.jboss.org/jira/browse/DNA-607
Modified: trunk/extensions/dna-connector-svn/src/test/java/org/jboss/dna/connector/svn/SVNRespositoryConnectorWriteableTest.java
===================================================================
--- trunk/extensions/dna-connector-svn/src/test/java/org/jboss/dna/connector/svn/SVNRespositoryConnectorWriteableTest.java 2009-12-31 13:46:43 UTC (rev 1501)
+++ trunk/extensions/dna-connector-svn/src/test/java/org/jboss/dna/connector/svn/SVNRespositoryConnectorWriteableTest.java 2009-12-31 13:47:17 UTC (rev 1502)
@@ -24,6 +24,7 @@
package org.jboss.dna.connector.svn;
import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsNot.not;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.junit.Assert.assertThat;
import java.io.ByteArrayOutputStream;
@@ -52,6 +53,7 @@
protected SVNNodeKind kind = null;
protected SVNProperties fileProperties = null;
protected ByteArrayOutputStream baos = null;
+
/**
* {@inheritDoc}
*
@@ -75,7 +77,7 @@
return source;
}
-
+
/**
* {@inheritDoc}
*
@@ -95,12 +97,12 @@
remoteRepos = null;
super.afterEach();
}
-
+
@Test( expected = RepositorySourceException.class )
public void shouldNotBeAbleToCreateInvalidTypeForRepository() {
graph.create("/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.UNSTRUCTURED).orReplace().and();
}
-
+
@Test( expected = RepositorySourceException.class )
public void shouldNotBeAbleToSetArbitraryProperties() {
graph.create("/testFile").with(JcrLexicon.MIXIN_TYPES, JcrMixLexicon.LOCKABLE).orReplace().and();
@@ -109,7 +111,6 @@
@Test
public void shouldBeAbleToCreateNodeFileWithContentLevel1() throws Exception {
-
// LEVEL 0
graph.create("/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
graph.create("/testFile/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
@@ -141,11 +142,11 @@
baos = new ByteArrayOutputStream();
remoteRepos.getFile("root/testFile", -1, fileProperties, baos);
assertContents(baos, TEST_CONTENT);
-
+
// LEVEL 2
graph.create("/root/a/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
graph.create("/root/a/testFile/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
- TEST_CONTENT.getBytes()).orReplace().and();
+ TEST_CONTENT.getBytes()).orReplace().and();
kind = remoteRepos.checkPath("root/a/testFile", -1);
assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
fileProperties = new SVNProperties();
@@ -153,22 +154,15 @@
remoteRepos.getFile("root/a/testFile", -1, fileProperties, baos);
assertContents(baos, TEST_CONTENT);
}
-
-
+
@Test
public void shouldRespectConflictBehaviorOnCreate() throws Exception {
graph.create("/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
- graph.create("/testFile/jcr:content")
- .with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE)
- .and(JcrLexicon.DATA, TEST_CONTENT.getBytes())
- .orReplace()
- .and();
+ graph.create("/testFile/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
+ TEST_CONTENT.getBytes()).orReplace().and();
- graph.create("/testFile/jcr:content")
- .with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE)
- .and(JcrLexicon.DATA, "Should not overwrite".getBytes())
- .ifAbsent()
- .and();
+ graph.create("/testFile/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
+ "Should not overwrite".getBytes()).ifAbsent().and();
kind = remoteRepos.checkPath("testFile", -1);
assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
@@ -177,7 +171,7 @@
remoteRepos.getFile("testFile", -1, fileProperties, baos);
assertContents(baos, TEST_CONTENT);
}
-
+
@Test
public void shouldBeAbleToCreateFileWithNoContent() throws Exception {
graph.create("/testEmptyFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
@@ -189,25 +183,25 @@
remoteRepos.getFile("testEmptyFile", -1, fileProperties, baos);
assertContents(baos, EMPTY_CONTENT);
}
-
+
@Test
public void shouldBeAbleToCreateFolder() throws Exception {
graph.create("/testFolder").orReplace().and();
kind = remoteRepos.checkPath("testFolder", -1);
assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
-
+
graph.create("/root/testFolder").orReplace().and();
kind = remoteRepos.checkPath("root/testFolder", -1);
assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
-
+
graph.create("/root/a/testFolder").orReplace().and();
kind = remoteRepos.checkPath("root/a/testFolder", -1);
assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
}
-
+
@Test
public void shouldBeAbleToAddChildrenToFolder() throws Exception {
graph.create("/testFolder").orReplace().and();
@@ -216,11 +210,8 @@
assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
graph.create("/testFolder/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
- graph.create("/testFolder/testFile/jcr:content")
- .with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE)
- .and(JcrLexicon.DATA, TEST_CONTENT.getBytes())
- .orReplace()
- .and();
+ graph.create("/testFolder/testFile/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
+ TEST_CONTENT.getBytes()).orReplace().and();
kind = remoteRepos.checkPath("testFolder/testFile", -1);
assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
@@ -228,18 +219,15 @@
baos = new ByteArrayOutputStream();
remoteRepos.getFile("testFolder/testFile", -1, fileProperties, baos);
assertContents(baos, TEST_CONTENT);
-
+
graph.create("/root/testFolder").orReplace().and();
kind = remoteRepos.checkPath("root/testFolder", -1);
assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
graph.create("/root/testFolder/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
- graph.create("/root/testFolder/testFile/jcr:content")
- .with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE)
- .and(JcrLexicon.DATA, TEST_CONTENT.getBytes())
- .orReplace()
- .and();
+ graph.create("/root/testFolder/testFile/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
+ TEST_CONTENT.getBytes()).orReplace().and();
kind = remoteRepos.checkPath("root/testFolder/testFile", -1);
assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
@@ -249,61 +237,210 @@
assertContents(baos, TEST_CONTENT);
}
-// @Test
-// public void shouldBeAbleToCopyFile() throws Exception {
-// graph.create("/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
-// graph.create("/testFile/jcr:content")
-// .with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE)
-// .and(JcrLexicon.DATA, TEST_CONTENT.getBytes())
-// .orReplace()
-// .and();
-//
-// kind = remoteRepos.checkPath("testFile", -1);
-// assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
-// fileProperties = new SVNProperties();
-// baos = new ByteArrayOutputStream();
-// remoteRepos.getFile("testFile", -1, fileProperties, baos);
-// assertContents(baos, TEST_CONTENT);
-//
-// graph.copy("/testFile").to("/copiedFile");
-// kind = remoteRepos.checkPath("copiedFile", -1);
-// assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
-// fileProperties = new SVNProperties();
-// baos = new ByteArrayOutputStream();
-// remoteRepos.getFile("copiedFile", -1, fileProperties, baos);
-// assertContents(baos, TEST_CONTENT);
-// }
-//
-// @Test
-// public void shouldBeAbleToCopyFolder() throws Exception {
-// graph.create("/testFolder").orReplace().and();
-// graph.create("/testFolder/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
-// graph.create("/testFolder/testFile/jcr:content")
-// .with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE)
-// .and(JcrLexicon.DATA, TEST_CONTENT.getBytes())
-// .orReplace()
-// .and();
-//
-// kind = remoteRepos.checkPath("testFolder/testFile", -1);
-// assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
-// fileProperties = new SVNProperties();
-// baos = new ByteArrayOutputStream();
-// remoteRepos.getFile("testFolder/testFile", -1, fileProperties, baos);
-// assertContents(baos, TEST_CONTENT);
-//
-//
-// graph.copy("/testFolder").to("/copiedFolder");
-// kind = remoteRepos.checkPath("copiedFolder", -1);
-// assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
-// fileProperties = new SVNProperties();
-// baos = new ByteArrayOutputStream();
-// remoteRepos.getFile("copiedFolder/testFile", -1, fileProperties, baos);
-// assertContents(baos, TEST_CONTENT);
-// }
+ // @Test
+ // public void shouldBeAbleToCopyFile() throws Exception {
+ // graph.create("/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
+ // graph.create("/testFile/jcr:content")
+ // .with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE)
+ // .and(JcrLexicon.DATA, TEST_CONTENT.getBytes())
+ // .orReplace()
+ // .and();
+ //
+ // kind = remoteRepos.checkPath("testFile", -1);
+ // assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+ // fileProperties = new SVNProperties();
+ // baos = new ByteArrayOutputStream();
+ // remoteRepos.getFile("testFile", -1, fileProperties, baos);
+ // assertContents(baos, TEST_CONTENT);
+ //
+ // graph.copy("/testFile").to("/copiedFile");
+ // kind = remoteRepos.checkPath("copiedFile", -1);
+ // assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+ // fileProperties = new SVNProperties();
+ // baos = new ByteArrayOutputStream();
+ // remoteRepos.getFile("copiedFile", -1, fileProperties, baos);
+ // assertContents(baos, TEST_CONTENT);
+ // }
+ //
+ // @Test
+ // public void shouldBeAbleToCopyFolder() throws Exception {
+ // graph.create("/testFolder").orReplace().and();
+ // graph.create("/testFolder/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
+ // graph.create("/testFolder/testFile/jcr:content")
+ // .with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE)
+ // .and(JcrLexicon.DATA, TEST_CONTENT.getBytes())
+ // .orReplace()
+ // .and();
+ //
+ // kind = remoteRepos.checkPath("testFolder/testFile", -1);
+ // assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+ // fileProperties = new SVNProperties();
+ // baos = new ByteArrayOutputStream();
+ // remoteRepos.getFile("testFolder/testFile", -1, fileProperties, baos);
+ // assertContents(baos, TEST_CONTENT);
+ //
+ //
+ // graph.copy("/testFolder").to("/copiedFolder");
+ // kind = remoteRepos.checkPath("copiedFolder", -1);
+ // assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
+ // fileProperties = new SVNProperties();
+ // baos = new ByteArrayOutputStream();
+ // remoteRepos.getFile("copiedFolder/testFile", -1, fileProperties, baos);
+ // assertContents(baos, TEST_CONTENT);
+ // }
+
+ @Test
+ public void shouldBeAbleToDeleteFolderWithContents() throws Exception {
+ graph.create("/testFolder").orReplace().and();
+ graph.create("/testFolder/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
+ graph.create("/testFolder/testFile/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
+ TEST_CONTENT.getBytes()).orReplace().and();
+
+ kind = remoteRepos.checkPath("testFolder", -1);
+ assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
+
+ kind = remoteRepos.checkPath("testFolder/testFile", -1);
+ assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+ fileProperties = new SVNProperties();
+ baos = new ByteArrayOutputStream();
+ remoteRepos.getFile("testFolder/testFile", -1, fileProperties, baos);
+ assertContents(baos, TEST_CONTENT);
+
+ graph.delete("/testFolder");
+
+ kind = remoteRepos.checkPath("testFolder", -1);
+ assertThat(kind == SVNNodeKind.NONE, is(Boolean.TRUE));
+
+
+ }
+
+ @Test
+ public void shouldBeAbleToDeleteFile() throws Exception {
+ graph.create("/testFolder").orReplace().and();
+ graph.create("/testFolder/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
+ graph.create("/testFolder/testFile/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
+ TEST_CONTENT.getBytes()).orReplace().and();
+
+ kind = remoteRepos.checkPath("testFolder", -1);
+ assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
+
+ kind = remoteRepos.checkPath("testFolder/testFile", -1);
+ assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+ fileProperties = new SVNProperties();
+ baos = new ByteArrayOutputStream();
+ remoteRepos.getFile("testFolder/testFile", -1, fileProperties, baos);
+ assertContents(baos, TEST_CONTENT);
+
+ graph.delete("/testFolder/testFile");
+
+ kind = remoteRepos.checkPath("testFolder/testFile", -1);
+ assertThat(kind == SVNNodeKind.NONE, is(Boolean.TRUE));
+
+
+ graph.create("/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
+ graph.create("/testFile/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
+ TEST_CONTENT.getBytes()).orReplace().and();
+ kind = remoteRepos.checkPath("testFile", -1);
+ assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+ fileProperties = new SVNProperties();
+ baos = new ByteArrayOutputStream();
+ remoteRepos.getFile("testFile", -1, fileProperties, baos);
+ assertContents(baos, TEST_CONTENT);
+
+ graph.delete("/testFile");
+
+ kind = remoteRepos.checkPath("testFile", -1);
+ assertThat(kind == SVNNodeKind.NONE, is(Boolean.TRUE));
+ }
+ @Test
+ public void shouldBeAbleToDeleteOnlyTheFileContent() throws Exception {
+ graph.create("/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
+ graph.create("/testFile/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
+ TEST_CONTENT.getBytes()).orReplace().and();
+ kind = remoteRepos.checkPath("testFile", -1);
+ assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+ fileProperties = new SVNProperties();
+ baos = new ByteArrayOutputStream();
+ remoteRepos.getFile("testFile", -1, fileProperties, baos);
+ assertContents(baos, TEST_CONTENT);
+
+ graph.delete("/testFile/jcr:content");
+
+ kind = remoteRepos.checkPath("testFile", -1);
+ assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+
+ fileProperties = new SVNProperties();
+ baos = new ByteArrayOutputStream();
+ remoteRepos.getFile("testFile", -1, fileProperties, baos);
+
+ assertEmptyContents(baos, TEST_CONTENT);
+
+ graph.create("/testFolder").orReplace().and();
+ graph.create("/testFolder/testFile").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
+ graph.create("/testFolder/testFile/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
+ TEST_CONTENT.getBytes()).orReplace().and();
+ kind = remoteRepos.checkPath("testFolder", -1);
+ assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
+
+ kind = remoteRepos.checkPath("testFolder/testFile", -1);
+ assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+ fileProperties = new SVNProperties();
+ baos = new ByteArrayOutputStream();
+ remoteRepos.getFile("testFolder/testFile", -1, fileProperties, baos);
+ assertContents(baos, TEST_CONTENT);
+
+ graph.delete("/testFolder/testFile/jcr:content");
+
+ kind = remoteRepos.checkPath("testFolder/testFile", -1);
+ assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+
+ fileProperties = new SVNProperties();
+ baos = new ByteArrayOutputStream();
+ remoteRepos.getFile("testFolder/testFile", -1, fileProperties, baos);
+
+ assertEmptyContents(baos, TEST_CONTENT);
+
+
+ graph.create("/testNode1").orReplace().and();
+ graph.create("/testNode1/testNode10").orReplace().and();
+ graph.create("/testNode1/testNode10/testItem0").with(JcrLexicon.PRIMARY_TYPE, JcrNtLexicon.FILE).orReplace().and();
+ graph.create("/testNode1/testNode10/testItem0/jcr:content").with(JcrLexicon.PRIMARY_TYPE, DnaLexicon.RESOURCE).and(JcrLexicon.DATA,
+ TEST_CONTENT.getBytes()).orReplace().and();
+ kind = remoteRepos.checkPath("testNode1", -1);
+ assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
+
+ kind = remoteRepos.checkPath("testNode1/testNode10", -1);
+ assertThat(kind == SVNNodeKind.DIR, is(Boolean.TRUE));
+
+ kind = remoteRepos.checkPath("testNode1/testNode10/testItem0", -1);
+ assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+ fileProperties = new SVNProperties();
+ baos = new ByteArrayOutputStream();
+ remoteRepos.getFile("testNode1/testNode10/testItem0", -1, fileProperties, baos);
+ assertContents(baos, TEST_CONTENT);
+
+ graph.delete("/testNode1/testNode10/testItem0/jcr:content");
+
+ kind = remoteRepos.checkPath("testNode1/testNode10/testItem0", -1);
+ assertThat(kind == SVNNodeKind.FILE, is(Boolean.TRUE));
+
+ fileProperties = new SVNProperties();
+ baos = new ByteArrayOutputStream();
+ remoteRepos.getFile("testNode1/testNode10/testItem0", -1, fileProperties, baos);
+
+ assertEmptyContents(baos, TEST_CONTENT);
+ }
+
protected void assertContents( ByteArrayOutputStream baos,
String contents ) {
assertThat(baos, notNullValue());
assertThat(baos.toString(), is(contents));
}
+
+ protected void assertEmptyContents( ByteArrayOutputStream baos,
+ String contents ) {
+ assertThat(baos, notNullValue());
+ assertThat(baos.toString(), not(contents));
+ }
}
14 years, 3 months
DNA SVN: r1501 - in trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector: svn and 1 other directories.
by dna-commits@lists.jboss.org
Author: spagop
Date: 2009-12-31 08:46:43 -0500 (Thu, 31 Dec 2009)
New Revision: 1501
Added:
trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/DeleteEntry.java
trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/UpdateFile.java
Removed:
trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/MergeFile.java
Modified:
trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/scm/ScmActionFactory.java
trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryConnectorI18n.java
trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryRequestProcessor.java
trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryUtil.java
trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/AddDirectory.java
Log:
OPEN - issue DNA-607: Delete node/folder or rather item/file from the svn repository through dna svn connector
https://jira.jboss.org/jira/browse/DNA-607
Modified: trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/scm/ScmActionFactory.java
===================================================================
--- trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/scm/ScmActionFactory.java 2009-12-31 13:45:57 UTC (rev 1500)
+++ trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/scm/ScmActionFactory.java 2009-12-31 13:46:43 UTC (rev 1501)
@@ -23,15 +23,13 @@
*/
package org.jboss.dna.connector.scm;
-/**
- */
public interface ScmActionFactory {
public ScmAction addFile( String path,
String file,
byte[] content );
- public ScmAction mergeFile( String rootPath,
+ public ScmAction updateFile( String rootPath,
String fileName,
byte[] oldData,
byte[] newData );
@@ -39,13 +37,6 @@
public ScmAction addDirectory( String root,
String path );
- public ScmAction copyDirectory( String path,
- String newPath,
- long revision );
+ public ScmAction deleteEntry( String path);
- public ScmAction deleteFile( String path,
- String file );
-
- public ScmAction deleteDirectory( String path );
-
}
Modified: trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryConnectorI18n.java
===================================================================
--- trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryConnectorI18n.java 2009-12-31 13:45:57 UTC (rev 1500)
+++ trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryConnectorI18n.java 2009-12-31 13:46:43 UTC (rev 1501)
@@ -63,6 +63,7 @@
public static I18n missingRequiredProperty;
public static I18n couldNotCreateFile;
public static I18n couldNotReadData;
+ public static I18n deleteFailed;
static {
try {
Modified: trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryRequestProcessor.java
===================================================================
--- trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryRequestProcessor.java 2009-12-31 13:45:57 UTC (rev 1500)
+++ trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryRequestProcessor.java 2009-12-31 13:46:43 UTC (rev 1501)
@@ -40,7 +40,8 @@
import org.jboss.dna.connector.scm.ScmActionFactory;
import org.jboss.dna.connector.svn.mgnt.AddDirectory;
import org.jboss.dna.connector.svn.mgnt.AddFile;
-import org.jboss.dna.connector.svn.mgnt.MergeFile;
+import org.jboss.dna.connector.svn.mgnt.DeleteEntry;
+import org.jboss.dna.connector.svn.mgnt.UpdateFile;
import org.jboss.dna.graph.DnaIntLexicon;
import org.jboss.dna.graph.DnaLexicon;
import org.jboss.dna.graph.ExecutionContext;
@@ -360,7 +361,7 @@
if (parentPath == null) return;
// svn connector does not support same name sibling
- sameNameSiblingIsNotSupported(parentPath, request);
+ sameNameSiblingIsNotSupported(parentPath);
SVNRepository workspaceRoot = getWorkspaceDirectory(request.inWorkspace());
assert workspaceRoot != null;
@@ -395,30 +396,32 @@
boolean skipWrite = false;
if (request.under().getPath().isRoot()) {
-
if (!accessData.getRepositoryRootUrl().equals(request.inWorkspace())) {
newChildPath = newName;
} else {
newChildPath = "/" + newName;
}
} else {
-
newChildPath = getPathAsString(request.under().getPath()) + "/" + newName;
-
if (!accessData.getRepositoryRootUrl().equals(request.inWorkspace())) {
newChildPath = newChildPath.substring(1);
}
}
// check if the new name already exist
- if (SVNRepositoryUtil.exists(workspaceRoot, newChildPath)) {
- if (request.conflictBehavior().equals(NodeConflictBehavior.APPEND)) {
- I18n msg = SVNRepositoryConnectorI18n.sameNameSiblingsAreNotAllowed;
- throw new InvalidRequestException(msg.text(this.getSourceName(), newName));
- } else if (request.conflictBehavior().equals(NodeConflictBehavior.DO_NOT_REPLACE)) {
- skipWrite = true;
+ try {
+ if (SVNRepositoryUtil.exists(workspaceRoot, newChildPath)) {
+ if (request.conflictBehavior().equals(NodeConflictBehavior.APPEND)) {
+ I18n msg = SVNRepositoryConnectorI18n.sameNameSiblingsAreNotAllowed;
+ throw new InvalidRequestException(msg.text("SVN Connector does not support Same Name Sibling"));
+ } else if (request.conflictBehavior().equals(NodeConflictBehavior.DO_NOT_REPLACE)) {
+ skipWrite = true;
+ }
}
+ } catch (SVNException e1) {
+ throw new RepositorySourceException(getSourceName(), e1.getMessage());
}
+
// Don't try to write if the node conflict behavior is DO_NOT_REPLACE
if (!skipWrite) {
// create a new, empty file
@@ -430,7 +433,7 @@
} else {
rootPath = getPathAsString(request.under().getPath());
}
- newFile(rootPath, newName, "".getBytes(), null, request.inWorkspace(), this.defaultWorkspace);
+ newFile(rootPath, newName, "".getBytes(), null, request.inWorkspace(), workspaceRoot);
} catch (SVNException e) {
I18n msg = SVNRepositoryConnectorI18n.couldNotCreateFile;
request.setError(new RepositorySourceException(getSourceName(),
@@ -480,7 +483,7 @@
if (parent != SVNNodeKind.NONE || parent != SVNNodeKind.UNKNOWN) {
if (request.conflictBehavior().equals(NodeConflictBehavior.APPEND)) {
I18n msg = SVNRepositoryConnectorI18n.sameNameSiblingsAreNotAllowed;
- throw new InvalidRequestException(msg.text(this.getSourceName(), newName));
+ throw new InvalidRequestException(msg.text("SVN Connector does not support Same Name Sibling"));
} else if (request.conflictBehavior().equals(NodeConflictBehavior.DO_NOT_REPLACE)) {
// TODO check if the file already has content
skipWrite = true;
@@ -521,13 +524,7 @@
rootPath = "";
}
- modifyFile(rootPath,
- fileName,
- oldData,
- binary.getBytes(),
- null,
- request.inWorkspace(),
- this.defaultWorkspace);
+ modifyFile(rootPath, fileName, oldData, binary.getBytes(), null, request.inWorkspace(), workspaceRoot);
}
} catch (SVNException e) {
I18n msg = SVNRepositoryConnectorI18n.couldNotReadData;
@@ -600,7 +597,7 @@
for (Path.Segment segment : path) {
if (segment.getIndex() > 1) {
I18n msg = SVNRepositoryConnectorI18n.sameNameSiblingsAreNotAllowed;
- throw new RepositorySourceException(getSourceName(), msg.text(getSourceName(), request));
+ throw new RepositorySourceException(getSourceName(), msg.text("SVN Connector does not support Same Name Sibling"));
}
}
@@ -625,13 +622,12 @@
return kind;
}
- private void sameNameSiblingIsNotSupported( Path path,
- CreateNodeRequest request ) {
+ protected void sameNameSiblingIsNotSupported( Path path ) {
for (Path.Segment segment : path) {
// Verify the segment is valid ...
if (segment.getIndex() > 1) {
I18n msg = SVNRepositoryConnectorI18n.sameNameSiblingsAreNotAllowed;
- throw new RepositorySourceException(getSourceName(), msg.text(getSourceName(), request));
+ throw new RepositorySourceException(getSourceName(), msg.text("SVN Connector does not support Same Name Sibling"));
}
}
}
@@ -674,7 +670,67 @@
*/
@Override
public void process( DeleteBranchRequest request ) {
- updatesAllowed(request);
+ logger.trace(request.toString());
+ if (!updatesAllowed(request)) return;
+
+ SVNRepository workspaceRoot = getWorkspaceDirectory(request.inWorkspace());
+ assert workspaceRoot != null;
+
+ NamespaceRegistry registry = getExecutionContext().getNamespaceRegistry();
+
+ Path requestedPath = request.at().getPath();
+ // svn connector does not support same name sibling
+ sameNameSiblingIsNotSupported(requestedPath);
+
+ if (!requestedPath.isRoot() && JcrLexicon.CONTENT.equals(requestedPath.getLastSegment().getName())) {
+ Path p = requestedPath.getAncestor(1);
+ if(p != null) {
+ String itemPath = getPathAsString(p);
+ if (itemPath.equals("") || itemPath.equals("/")) {
+ return;
+ }
+ String filePath = null;
+ if (!accessData.getRepositoryRootUrl().equals(request.inWorkspace())) {
+ filePath = itemPath.substring(1);
+ }
+ try {
+ //check if the file exist
+ if (!SVNRepositoryUtil.exists(workspaceRoot, filePath)) return;
+
+ //update the file
+ SVNProperties fileProperties = new SVNProperties();
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ workspaceRoot.getFile(filePath, -1, fileProperties, baos);
+
+ String rootPath = getPathAsString(p.getAncestor(1));
+ String fileName = p.getLastSegment().getString(registry);
+ modifyFile(rootPath, fileName, baos.toByteArray(), "".getBytes(), null, request.inWorkspace(), workspaceRoot);
+
+ } catch (SVNException e) {
+ throw new RepositorySourceException(getSourceName(),
+ SVNRepositoryConnectorI18n.deleteFailed.text(itemPath, getSourceName()));
+
+ }
+ }
+
+
+ } else {
+
+ String nodePath = getPathAsString(requestedPath);
+
+ if (!accessData.getRepositoryRootUrl().equals(request.inWorkspace())) {
+ nodePath = nodePath.substring(1);
+ }
+
+ try {
+ if (!SVNRepositoryUtil.exists(workspaceRoot, nodePath)) return;
+ eraseEntry(nodePath, null, request.inWorkspace(), workspaceRoot);
+ } catch (SVNException e) {
+ throw new RepositorySourceException(getSourceName(),
+ SVNRepositoryConnectorI18n.deleteFailed.text(nodePath, getSourceName()));
+
+ }
+ }
}
/**
@@ -822,102 +878,6 @@
}
/**
- * Verify if change is allowed on a specific source.
- *
- * @throws RepositorySourceException if change on that repository source is not allowed.
- */
- protected void verifyUpdatesAllowed() {
- if (!updatesAllowed) {
- throw new InvalidRequestException(SVNRepositoryConnectorI18n.sourceIsReadOnly.text(getSourceName()));
- }
- }
-
- protected boolean updatesAllowed( Request request ) {
- if (!updatesAllowed) {
- request.setError(new InvalidRequestException(SVNRepositoryConnectorI18n.sourceIsReadOnly.text(getSourceName())));
- }
- return !request.hasError();
- }
-
- /**
- * Factory for sample name.
- *
- * @return the name factory
- */
- protected NameFactory nameFactory() {
- return getExecutionContext().getValueFactories().getNameFactory();
- }
-
- /**
- * Factory for path creation.
- *
- * @return a path factory.
- */
- protected PathFactory pathFactory() {
- return getExecutionContext().getValueFactories().getPathFactory();
- }
-
- /**
- * Factory for property creation.
- *
- * @return the property factory.
- */
- protected PropertyFactory propertyFactory() {
- return getExecutionContext().getPropertyFactory();
- }
-
- /**
- * Factory for date creation.
- *
- * @return the date factory.
- */
- protected DateTimeFactory dateFactory() {
- return getExecutionContext().getValueFactories().getDateFactory();
- }
-
- /**
- * Factory for binary creation.
- *
- * @return the binary factory..
- */
- protected ValueFactory<Binary> binaryFactory() {
- return getExecutionContext().getValueFactories().getBinaryFactory();
- }
-
- /**
- * Get the path for a locarion and check if the path is null or not.
- *
- * @param location - the location.
- * @param request - the requested path.
- * @return the path.
- * @throws RepositorySourceException if the path of a location is null.
- */
- protected Path getPathFor( Location location,
- Request request ) {
- Path path = location.getPath();
- if (path == null) {
- I18n msg = SVNRepositoryConnectorI18n.locationInRequestMustHavePath;
- throw new RepositorySourceException(getSourceName(), msg.text(getSourceName(), request));
- }
- return path;
- }
-
- /**
- * Get the content of a file.
- *
- * @param path - the path to that file.
- * @param properties - the properties of the file.
- * @param os - the output stream where to store the content.
- * @throws SVNException - throws if such path is not at that revision or in case of a connection problem.
- */
- protected void getData( String path,
- SVNProperties properties,
- OutputStream os ) throws SVNException {
- getDefaultWorkspace().getFile(path, -1, properties, os);
-
- }
-
- /**
* Get the repository driver.
*
* @return repository
@@ -927,72 +887,6 @@
}
/**
- * Validate the kind of node and throws an exception if necessary.
- *
- * @param repos
- * @param requestedPath
- * @return the kind.
- */
- protected SVNNodeKind validateNodeKind( SVNRepository repos,
- Path requestedPath ) {
- SVNNodeKind kind;
- String myPath;
- if (getPathAsString(requestedPath).trim().equals("/")) {
- myPath = getPathAsString(requestedPath);
- } else if (requestedPath.endsWith(JcrLexicon.CONTENT)) {
- myPath = getPathAsString(requestedPath.getParent());
- } else {
- // directory and file
- myPath = getPathAsString(requestedPath);
- }
-
- try {
-
- kind = repos.checkPath(myPath, -1);
- if (kind == SVNNodeKind.NONE) {
- // node does not exist or requested node is not correct.
- throw new PathNotFoundException(Location.create(requestedPath), null,
- SVNRepositoryConnectorI18n.nodeDoesNotExist.text(myPath));
- } else if (kind == SVNNodeKind.UNKNOWN) {
- // node is unknown
- throw new PathNotFoundException(Location.create(requestedPath), null,
- SVNRepositoryConnectorI18n.nodeIsActuallyUnknow.text(myPath));
- }
- } catch (SVNException e) {
- throw new RepositorySourceException(
- getSourceName(),
- SVNRepositoryConnectorI18n.connectingFailureOrUserAuthenticationProblem.text(getSourceName()));
- }
-
- return kind;
- }
-
- private String getPathAsString( Path path ) {
- return path.getString(getExecutionContext().getNamespaceRegistry());
- }
-
- /**
- * Get some important informations of a path
- *
- * @param repos
- * @param path - the path
- * @return - the {@link SVNDirEntry}, or null if there is no such entry
- */
- protected SVNDirEntry getEntryInfo( SVNRepository repos,
- String path ) {
- assert path != null;
- SVNDirEntry entry = null;
- try {
- entry = repos.info(path, -1);
- } catch (SVNException e) {
- throw new RepositorySourceException(
- getSourceName(),
- SVNRepositoryConnectorI18n.connectingFailureOrUserAuthenticationProblem.text(getSourceName()));
- }
- return entry;
- }
-
- /**
* Get the last revision.
*
* @param repos
@@ -1068,7 +962,7 @@
* @param currentRepository
* @throws SVNException
*/
- public void newFile( String rootDirPath,
+ private void newFile( String rootDirPath,
String childFilePath,
byte[] content,
String comment,
@@ -1153,7 +1047,7 @@
throw new SVNException(err);
}
- ScmAction modifyFileAction = mergeFile(rootPath, fileName, oldData, newData);
+ ScmAction modifyFileAction = updateFile(rootPath, fileName, oldData, newData);
SVNActionExecutor executor = new SVNActionExecutor(currentRepository);
comment = comment == null ? "modify the " + fileName : comment;
executor.execute(modifyFileAction, comment);
@@ -1166,6 +1060,39 @@
}
/**
+ * Delete entry from the repository
+ *
+ * @param path
+ * @param comment
+ * @param inWorkspace
+ * @param currentRepository
+ * @throws SVNException
+ */
+ private void eraseEntry( String path,
+ String comment,
+ String inWorkspace,
+ SVNRepository currentRepository ) throws SVNException {
+ assert path != null;
+ assert inWorkspace != null;
+ if (path.equals("/") || path.equals("")) {
+ SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.BAD_URL, "The root directory cannot be deleted");
+ throw new SVNException(err);
+ }
+
+ try {
+ ScmAction deleteEntryAction = deleteEntry(path);
+ SVNActionExecutor executor = new SVNActionExecutor(currentRepository);
+ comment = comment == null ? "Delete the " + path : comment;
+ executor.execute(deleteEntryAction, comment);
+ } catch (SVNException e) {
+ SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.UNKNOWN,
+ "unknow error during delete action: {0)",
+ e.getMessage());
+ throw new SVNException(err);
+ }
+ }
+
+ /**
* {@inheritDoc}
*
* @see org.jboss.dna.connector.scm.ScmActionFactory#addDirectory(java.lang.String, java.lang.String)
@@ -1189,73 +1116,31 @@
/**
* {@inheritDoc}
*
- * @see org.jboss.dna.connector.scm.ScmActionFactory#mergeFile(java.lang.String, java.lang.String, byte[], byte[])
+ * @see org.jboss.dna.connector.scm.ScmActionFactory#updateFile(java.lang.String, java.lang.String, byte[], byte[])
*/
- public ScmAction mergeFile( String rootPath,
- String fileName,
- byte[] oldData,
- byte[] newData ) {
- return new MergeFile(rootPath, fileName, oldData, newData);
+ public ScmAction updateFile( String rootPath,
+ String fileName,
+ byte[] oldData,
+ byte[] newData ) {
+ return new UpdateFile(rootPath, fileName, oldData, newData);
}
/**
* {@inheritDoc}
*
- * @see org.jboss.dna.connector.scm.ScmActionFactory#copyDirectory(java.lang.String, java.lang.String, long)
+ * @see org.jboss.dna.connector.scm.ScmActionFactory#deleteEntry(java.lang.String)
*/
- public ScmAction copyDirectory( String path,
- String newPath,
- long revision ) {
- return null;
+ public ScmAction deleteEntry( String path ) {
+ return new DeleteEntry(path);
}
- /**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.connector.scm.ScmActionFactory#deleteDirectory(java.lang.String)
- */
- public ScmAction deleteDirectory( String path ) {
- return null;
- }
-
- /**
- * {@inheritDoc}
- *
- * @see org.jboss.dna.connector.scm.ScmActionFactory#deleteFile(java.lang.String, java.lang.String)
- */
- public ScmAction deleteFile( String path,
- String file ) {
- return null;
- }
-
- @SuppressWarnings( "unused" )
- private byte[] getContent( Object[] objs ) {
- byte[] content = null;
- for (Object object : objs) {
- if (object != null && object instanceof Binary) {
- Binary buf = (Binary)object;
- content = buf.getBytes();
- }
- }
- return content;
- }
-
- @SuppressWarnings( "unused" )
- private Object[] values( Collection<Property> childNodeProperties ) {
- Set<Object> result = new HashSet<Object>();
- for (Property property : childNodeProperties) {
- result.add(property.getFirstValue());
- }
- return result.toArray();
- }
-
- private void checkThePath( Path path,
- Request request ) {
+ protected void checkThePath( Path path,
+ Request request ) {
for (Path.Segment segment : path) {
// Verify the segment is valid ...
if (segment.getIndex() > 1) {
I18n msg = SVNRepositoryConnectorI18n.sameNameSiblingsAreNotAllowed;
- throw new RepositorySourceException(getSourceName(), msg.text(getSourceName(), request));
+ throw new RepositorySourceException(getSourceName(), msg.text("SVN Connector does not support Same Name Sibling"));
}
}
}
@@ -1329,4 +1214,166 @@
SVNRepositoryConnectorI18n.invalidPropertyNames.text(invalidNames.toString()));
}
}
+
+ /**
+ * Validate the kind of node and throws an exception if necessary.
+ *
+ * @param repos
+ * @param requestedPath
+ * @return the kind.
+ */
+ protected SVNNodeKind validateNodeKind( SVNRepository repos,
+ Path requestedPath ) {
+ SVNNodeKind kind;
+ String myPath;
+ if (getPathAsString(requestedPath).trim().equals("/")) {
+ myPath = getPathAsString(requestedPath);
+ } else if (requestedPath.endsWith(JcrLexicon.CONTENT)) {
+ myPath = getPathAsString(requestedPath.getParent());
+ } else {
+ // directory and file
+ myPath = getPathAsString(requestedPath);
+ }
+
+ try {
+
+ kind = repos.checkPath(myPath, -1);
+ if (kind == SVNNodeKind.NONE) {
+ // node does not exist or requested node is not correct.
+ throw new PathNotFoundException(Location.create(requestedPath), null,
+ SVNRepositoryConnectorI18n.nodeDoesNotExist.text(myPath));
+ } else if (kind == SVNNodeKind.UNKNOWN) {
+ // node is unknown
+ throw new PathNotFoundException(Location.create(requestedPath), null,
+ SVNRepositoryConnectorI18n.nodeIsActuallyUnknow.text(myPath));
+ }
+ } catch (SVNException e) {
+ throw new RepositorySourceException(
+ getSourceName(),
+ SVNRepositoryConnectorI18n.connectingFailureOrUserAuthenticationProblem.text(getSourceName()));
+ }
+
+ return kind;
+ }
+
+ /**
+ * Verify if change is allowed on a specific source.
+ *
+ * @throws RepositorySourceException if change on that repository source is not allowed.
+ */
+ protected void verifyUpdatesAllowed() {
+ if (!updatesAllowed) {
+ throw new InvalidRequestException(SVNRepositoryConnectorI18n.sourceIsReadOnly.text(getSourceName()));
+ }
+ }
+
+ protected boolean updatesAllowed( Request request ) {
+ if (!updatesAllowed) {
+ request.setError(new InvalidRequestException(SVNRepositoryConnectorI18n.sourceIsReadOnly.text(getSourceName())));
+ }
+ return !request.hasError();
+ }
+
+ /**
+ * Factory for sample name.
+ *
+ * @return the name factory
+ */
+ protected NameFactory nameFactory() {
+ return getExecutionContext().getValueFactories().getNameFactory();
+ }
+
+ /**
+ * Factory for path creation.
+ *
+ * @return a path factory.
+ */
+ protected PathFactory pathFactory() {
+ return getExecutionContext().getValueFactories().getPathFactory();
+ }
+
+ /**
+ * Factory for property creation.
+ *
+ * @return the property factory.
+ */
+ protected PropertyFactory propertyFactory() {
+ return getExecutionContext().getPropertyFactory();
+ }
+
+ /**
+ * Factory for date creation.
+ *
+ * @return the date factory.
+ */
+ protected DateTimeFactory dateFactory() {
+ return getExecutionContext().getValueFactories().getDateFactory();
+ }
+
+ /**
+ * Factory for binary creation.
+ *
+ * @return the binary factory..
+ */
+ protected ValueFactory<Binary> binaryFactory() {
+ return getExecutionContext().getValueFactories().getBinaryFactory();
+ }
+
+ /**
+ * Get the path for a locarion and check if the path is null or not.
+ *
+ * @param location - the location.
+ * @param request - the requested path.
+ * @return the path.
+ * @throws RepositorySourceException if the path of a location is null.
+ */
+ protected Path getPathFor( Location location,
+ Request request ) {
+ Path path = location.getPath();
+ if (path == null) {
+ I18n msg = SVNRepositoryConnectorI18n.locationInRequestMustHavePath;
+ throw new RepositorySourceException(getSourceName(), msg.text(getSourceName(), request));
+ }
+ return path;
+ }
+
+ /**
+ * Get the content of a file.
+ *
+ * @param path - the path to that file.
+ * @param properties - the properties of the file.
+ * @param os - the output stream where to store the content.
+ * @throws SVNException - throws if such path is not at that revision or in case of a connection problem.
+ */
+ protected void getData( String path,
+ SVNProperties properties,
+ OutputStream os ) throws SVNException {
+ getDefaultWorkspace().getFile(path, -1, properties, os);
+
+ }
+
+ protected String getPathAsString( Path path ) {
+ return path.getString(getExecutionContext().getNamespaceRegistry());
+ }
+
+ /**
+ * Get some important informations of a path
+ *
+ * @param repos
+ * @param path - the path
+ * @return - the {@link SVNDirEntry}, or null if there is no such entry
+ */
+ protected SVNDirEntry getEntryInfo( SVNRepository repos,
+ String path ) {
+ assert path != null;
+ SVNDirEntry entry = null;
+ try {
+ entry = repos.info(path, -1);
+ } catch (SVNException e) {
+ throw new RepositorySourceException(
+ getSourceName(),
+ SVNRepositoryConnectorI18n.connectingFailureOrUserAuthenticationProblem.text(getSourceName()));
+ }
+ return entry;
+ }
}
Modified: trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryUtil.java
===================================================================
--- trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryUtil.java 2009-12-31 13:45:57 UTC (rev 1500)
+++ trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/SVNRepositoryUtil.java 2009-12-31 13:46:43 UTC (rev 1501)
@@ -28,6 +28,8 @@
import org.jboss.dna.graph.connector.RepositorySourceException;
import org.jboss.dna.graph.request.InvalidWorkspaceException;
import org.tmatesoft.svn.core.SVNDirEntry;
+import org.tmatesoft.svn.core.SVNErrorCode;
+import org.tmatesoft.svn.core.SVNErrorMessage;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNNodeKind;
import org.tmatesoft.svn.core.SVNURL;
@@ -215,7 +217,7 @@
}
public static boolean exists( SVNRepository repository,
- String path ) {
+ String path ) throws SVNException{
try {
if (repository.checkPath(path, -1) == SVNNodeKind.NONE) {
return false;
@@ -223,7 +225,10 @@
return false;
}
} catch (SVNException e) {
- return false;
+ SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.UNKNOWN,
+ "unknow error during delete action: {0)",
+ e.getMessage());
+ throw new SVNException(err);
}
return true;
}
Modified: trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/AddDirectory.java
===================================================================
--- trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/AddDirectory.java 2009-12-31 13:45:57 UTC (rev 1500)
+++ trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/AddDirectory.java 2009-12-31 13:46:43 UTC (rev 1501)
@@ -29,6 +29,7 @@
/**
* root should be the last, previously created, parent folder. Each directory in the path will be created.
+ * @author serge pagop
*/
public class AddDirectory implements ScmAction {
private String rootDirPath;
Added: trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/DeleteEntry.java
===================================================================
--- trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/DeleteEntry.java (rev 0)
+++ trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/DeleteEntry.java 2009-12-31 13:46:43 UTC (rev 1501)
@@ -0,0 +1,51 @@
+/*
+ * JBoss DNA (http://www.jboss.org/dna)
+ * See the COPYRIGHT.txt file distributed with this work for information
+ * regarding copyright ownership. Some portions may be licensed
+ * to Red Hat, Inc. under one or more contributor license agreements.
+ * See the AUTHORS.txt file in the distribution for a full listing of
+ * individual contributors.
+ *
+ * JBoss DNA is free software. Unless otherwise indicated, all code in JBoss DNA
+ * is licensed to you under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * JBoss DNA is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.jboss.dna.connector.svn.mgnt;
+
+import org.jboss.dna.connector.scm.ScmAction;
+import org.tmatesoft.svn.core.io.ISVNEditor;
+
+/**
+ * @author serge pagop
+ */
+public class DeleteEntry implements ScmAction {
+
+ private String path;
+
+ public DeleteEntry( String path ) {
+ this.path = path;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.jboss.dna.connector.scm.ScmAction#applyAction(java.lang.Object)
+ */
+ public void applyAction( Object context ) throws Exception {
+
+ ISVNEditor editor = (ISVNEditor)context;
+ editor.deleteEntry(this.path, -1);
+ }
+
+}
Property changes on: trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/DeleteEntry.java
___________________________________________________________________
Name: svn:keywords
+ Id Revision
Name: svn:eol-style
+ LF
Deleted: trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/MergeFile.java
===================================================================
--- trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/MergeFile.java 2009-12-31 13:45:57 UTC (rev 1500)
+++ trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/MergeFile.java 2009-12-31 13:46:43 UTC (rev 1501)
@@ -1,64 +0,0 @@
-/*
- * JBoss DNA (http://www.jboss.org/dna)
- * See the COPYRIGHT.txt file distributed with this work for information
- * regarding copyright ownership. Some portions may be licensed
- * to Red Hat, Inc. under one or more contributor license agreements.
- * See the AUTHORS.txt file in the distribution for a full listing of
- * individual contributors.
- *
- * JBoss DNA is free software. Unless otherwise indicated, all code in JBoss DNA
- * is licensed to you under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2.1 of
- * the License, or (at your option) any later version.
- *
- * JBoss DNA is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with this software; if not, write to the Free
- * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
- * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
- */
-package org.jboss.dna.connector.svn.mgnt;
-
-import java.io.ByteArrayInputStream;
-import org.jboss.dna.connector.scm.ScmAction;
-import org.tmatesoft.svn.core.io.ISVNEditor;
-import org.tmatesoft.svn.core.io.diff.SVNDeltaGenerator;
-
-public class MergeFile implements ScmAction {
- private String path;
- private String file;
- private byte[] oldData;
- private byte[] newData;
-
- public MergeFile( String path,
- String file,
- byte[] oldData,
- byte[] newData ) {
- this.path = path;
- this.file = file;
- this.oldData = oldData;
- this.newData = newData;
- }
-
- public void applyAction( Object context ) throws Exception {
- ISVNEditor editor = (ISVNEditor)context;
- ISVNEditorUtil.openDirectories(editor, this.path);
-
- editor.openFile(this.path + "/" + this.file, -1);
- editor.applyTextDelta(this.path + "/" + this.file, null);
- SVNDeltaGenerator deltaGenerator = new SVNDeltaGenerator();
- String checksum = deltaGenerator.sendDelta(this.path + "/" + this.file,
- new ByteArrayInputStream(this.oldData),
- 0,
- new ByteArrayInputStream(this.newData),
- editor,
- true);
- editor.closeFile(this.path + "/" + this.file, checksum);
- ISVNEditorUtil.closeDirectories(editor, path);
- }
-
-}
\ No newline at end of file
Copied: trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/UpdateFile.java (from rev 1494, trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/MergeFile.java)
===================================================================
--- trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/UpdateFile.java (rev 0)
+++ trunk/extensions/dna-connector-svn/src/main/java/org/jboss/dna/connector/svn/mgnt/UpdateFile.java 2009-12-31 13:46:43 UTC (rev 1501)
@@ -0,0 +1,64 @@
+/*
+ * JBoss DNA (http://www.jboss.org/dna)
+ * See the COPYRIGHT.txt file distributed with this work for information
+ * regarding copyright ownership. Some portions may be licensed
+ * to Red Hat, Inc. under one or more contributor license agreements.
+ * See the AUTHORS.txt file in the distribution for a full listing of
+ * individual contributors.
+ *
+ * JBoss DNA is free software. Unless otherwise indicated, all code in JBoss DNA
+ * is licensed to you under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * JBoss DNA is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.jboss.dna.connector.svn.mgnt;
+
+import java.io.ByteArrayInputStream;
+import org.jboss.dna.connector.scm.ScmAction;
+import org.tmatesoft.svn.core.io.ISVNEditor;
+import org.tmatesoft.svn.core.io.diff.SVNDeltaGenerator;
+
+public class UpdateFile implements ScmAction {
+ private String path;
+ private String file;
+ private byte[] oldData;
+ private byte[] newData;
+
+ public UpdateFile( String path,
+ String file,
+ byte[] oldData,
+ byte[] newData ) {
+ this.path = path;
+ this.file = file;
+ this.oldData = oldData;
+ this.newData = newData;
+ }
+
+ public void applyAction( Object context ) throws Exception {
+ ISVNEditor editor = (ISVNEditor)context;
+ ISVNEditorUtil.openDirectories(editor, this.path);
+
+ editor.openFile(this.path + "/" + this.file, -1);
+ editor.applyTextDelta(this.path + "/" + this.file, null);
+ SVNDeltaGenerator deltaGenerator = new SVNDeltaGenerator();
+ String checksum = deltaGenerator.sendDelta(this.path + "/" + this.file,
+ new ByteArrayInputStream(this.oldData),
+ 0,
+ new ByteArrayInputStream(this.newData),
+ editor,
+ true);
+ editor.closeFile(this.path + "/" + this.file, checksum);
+ ISVNEditorUtil.closeDirectories(editor, path);
+ }
+
+}
\ No newline at end of file
14 years, 3 months
DNA SVN: r1500 - trunk/extensions/dna-connector-svn/src/main/resources/org/jboss/dna/connector/svn.
by dna-commits@lists.jboss.org
Author: spagop
Date: 2009-12-31 08:45:57 -0500 (Thu, 31 Dec 2009)
New Revision: 1500
Modified:
trunk/extensions/dna-connector-svn/src/main/resources/org/jboss/dna/connector/svn/SVNRepositoryConnectorI18n.properties
Log:
OPEN - issue DNA-607: Delete node/folder or rather item/file from the svn repository through dna svn connector
https://jira.jboss.org/jira/browse/DNA-607
Modified: trunk/extensions/dna-connector-svn/src/main/resources/org/jboss/dna/connector/svn/SVNRepositoryConnectorI18n.properties
===================================================================
--- trunk/extensions/dna-connector-svn/src/main/resources/org/jboss/dna/connector/svn/SVNRepositoryConnectorI18n.properties 2009-12-30 23:45:35 UTC (rev 1499)
+++ trunk/extensions/dna-connector-svn/src/main/resources/org/jboss/dna/connector/svn/SVNRepositoryConnectorI18n.properties 2009-12-31 13:45:57 UTC (rev 1500)
@@ -40,7 +40,7 @@
pathForPredefinedWorkspaceDoesNotExist = The path "{0}" for the predefined workspace for the file system source "{1}" does not represent an existing directory
pathForPredefinedWorkspaceIsNotDirectory = The path "{0}" for the predefined workspace for the file system source "{1}" is actually a path to an existing file
pathForPredefinedWorkspaceCannotBeRead = The path "{0}" for the predefined workspace for the file system source "{1}" cannot be read
-sameNameSiblingsAreNotAllowed = {0} does not allow same name siblings on nodes: {1}
+sameNameSiblingsAreNotAllowed = {0}
onlyTheDefaultNamespaceIsAllowed = {0} requires node names use the default namespace: {1}
locationInRequestMustHavePath = {0} requires a path in the request: {1}
unableToCreateWorkspaces = {0} does not allow creating new workspaces (request was to create "{1}")
@@ -56,3 +56,4 @@
# Writable tests
couldNotCreateFile =Error reading data in workspace "{1}" "{0}" "{2}" "{3}"
couldNotReadData= Error reading data in workspace "{1}" "{0}" "{2}" "{3}"
+deleteFailed=Error deleting path {0} in workspace with source name {1}
\ No newline at end of file
14 years, 3 months