teiid SVN: r1163 - trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules.
by teiid-commits@lists.jboss.org
Author: shawkins
Date: 2009-07-21 19:02:07 -0400 (Tue, 21 Jul 2009)
New Revision: 1163
Modified:
trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java
Log:
TEIID-339 adding the feature to push aggregates through unions.
Modified: trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java 2009-07-21 22:53:49 UTC (rev 1162)
+++ trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java 2009-07-21 23:02:07 UTC (rev 1163)
@@ -141,12 +141,11 @@
* group [agg(agg(x)), {a, b}]
* source
* set op
- * source
- * project
- * [select]
- * group [agg(x), {a, b}]
- * source
- * child 1
+ * project
+ * [select]
+ * group [agg(x), {a, b}]
+ * source
+ * child 1
* ...
*
* Or if the child does not support pushdown we add dummy aggregate projection
14 years, 11 months
teiid SVN: r1162 - in trunk/engine/src: main/java/com/metamatrix/query/resolver/util and 3 other directories.
by teiid-commits@lists.jboss.org
Author: shawkins
Date: 2009-07-21 18:53:49 -0400 (Tue, 21 Jul 2009)
New Revision: 1162
Added:
trunk/engine/src/test/java/com/metamatrix/query/processor/TestAggregateProcessing.java
Modified:
trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleCollapseSource.java
trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePlanUnions.java
trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java
trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRaiseAccess.java
trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRemoveOptionalJoins.java
trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java
trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java
trunk/engine/src/test/java/com/metamatrix/query/optimizer/TestAggregatePushdown.java
trunk/engine/src/test/java/com/metamatrix/query/processor/TestProcessor.java
Log:
TEIID-339 adding the feature to push aggregates through unions.
Modified: trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleCollapseSource.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleCollapseSource.java 2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleCollapseSource.java 2009-07-21 22:53:49 UTC (rev 1162)
@@ -96,7 +96,7 @@
}
plan = removeUnnecessaryInlineView(plan, commandRoot);
QueryCommand queryCommand = createQuery(metadata, capFinder, accessNode, commandRoot);
- addSetOpDistinct(metadata, capFinder, accessNode, queryCommand);
+ addSetOpDistinct(metadata, capFinder, accessNode, queryCommand);
command = queryCommand;
if (intoGroup != null) {
Insert insertCommand = new Insert(intoGroup, ResolverUtil.resolveElementsInGroup(intoGroup, metadata), null);
@@ -127,7 +127,7 @@
}
parent = parent.getParent();
}
- if (!dupRemoval) {
+ if (!dupRemoval || NewCalculateCostUtil.usesKey(queryCommand.getProjectedSymbols(), metadata)) {
return;
}
//TODO: we should also order the results and update the set processing logic
Modified: trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePlanUnions.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePlanUnions.java 2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePlanUnions.java 2009-07-21 22:53:49 UTC (rev 1162)
@@ -159,7 +159,6 @@
/**
* TODO: union and intersect are associative
- * TODO: the not all flag should be raised to its highest point
*/
private void collectUnionSources(QueryMetadataInterface metadata,
CapabilitiesFinder capabilitiesFinder,
Modified: trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java 2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java 2009-07-21 22:53:49 UTC (rev 1162)
@@ -23,7 +23,9 @@
package com.metamatrix.query.optimizer.relational.rules;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
@@ -41,28 +43,39 @@
import com.metamatrix.query.analysis.AnalysisRecord;
import com.metamatrix.query.function.FunctionLibrary;
import com.metamatrix.query.metadata.QueryMetadataInterface;
+import com.metamatrix.query.metadata.TempMetadataAdapter;
+import com.metamatrix.query.metadata.TempMetadataStore;
import com.metamatrix.query.optimizer.capabilities.CapabilitiesFinder;
+import com.metamatrix.query.optimizer.capabilities.SourceCapabilities.Capability;
import com.metamatrix.query.optimizer.relational.OptimizerRule;
import com.metamatrix.query.optimizer.relational.RuleStack;
import com.metamatrix.query.optimizer.relational.plantree.NodeConstants;
import com.metamatrix.query.optimizer.relational.plantree.NodeEditor;
import com.metamatrix.query.optimizer.relational.plantree.NodeFactory;
import com.metamatrix.query.optimizer.relational.plantree.PlanNode;
+import com.metamatrix.query.resolver.util.ResolverUtil;
import com.metamatrix.query.resolver.util.ResolverVisitor;
+import com.metamatrix.query.rewriter.QueryRewriter;
import com.metamatrix.query.sql.ReservedWords;
import com.metamatrix.query.sql.lang.CompareCriteria;
import com.metamatrix.query.sql.lang.Criteria;
+import com.metamatrix.query.sql.lang.IsNullCriteria;
import com.metamatrix.query.sql.lang.JoinType;
+import com.metamatrix.query.sql.lang.Select;
+import com.metamatrix.query.sql.lang.SetQuery.Operation;
import com.metamatrix.query.sql.symbol.AggregateSymbol;
import com.metamatrix.query.sql.symbol.Constant;
import com.metamatrix.query.sql.symbol.ElementSymbol;
import com.metamatrix.query.sql.symbol.Expression;
+import com.metamatrix.query.sql.symbol.ExpressionSymbol;
import com.metamatrix.query.sql.symbol.Function;
import com.metamatrix.query.sql.symbol.GroupSymbol;
+import com.metamatrix.query.sql.symbol.SearchedCaseExpression;
import com.metamatrix.query.sql.symbol.SingleElementSymbol;
import com.metamatrix.query.sql.util.SymbolMap;
import com.metamatrix.query.sql.visitor.AggregateSymbolCollectorVisitor;
import com.metamatrix.query.sql.visitor.ElementCollectorVisitor;
+import com.metamatrix.query.sql.visitor.ExpressionMappingVisitor;
import com.metamatrix.query.sql.visitor.GroupsUsedByElementsVisitor;
import com.metamatrix.query.util.CommandContext;
@@ -88,13 +101,25 @@
MetaMatrixComponentException {
for (PlanNode groupNode : NodeEditor.findAllNodes(plan, NodeConstants.Types.GROUP, NodeConstants.Types.ACCESS)) {
- PlanNode joinNode = groupNode.getFirstChild();
+ PlanNode child = groupNode.getFirstChild();
- if (joinNode.getType() != NodeConstants.Types.JOIN) {
+ List<SingleElementSymbol> groupingExpressions = (List<SingleElementSymbol>)groupNode.getProperty(NodeConstants.Info.GROUP_COLS);
+
+ if (child.getType() == NodeConstants.Types.SOURCE) {
+ PlanNode setOp = child.getFirstChild();
+
+ try {
+ pushGroupNodeOverUnion(plan, metadata, capFinder, groupNode, child, groupingExpressions, setOp);
+ } catch (QueryResolverException e) {
+ throw new MetaMatrixComponentException(e);
+ }
continue;
}
+
+ if (child.getType() != NodeConstants.Types.JOIN) {
+ continue;
+ }
- List<SingleElementSymbol> groupingExpressions = (List<SingleElementSymbol>)groupNode.getProperty(NodeConstants.Info.GROUP_COLS);
Set<AggregateSymbol> aggregates = collectAggregates(groupNode);
pushGroupNode(groupNode, groupingExpressions, aggregates, metadata, capFinder);
@@ -103,6 +128,231 @@
return plan;
}
+ /**
+ * The plan tree looks like:
+ * group [agg(x), {a, b}]
+ * source
+ * set op
+ * child 1
+ * ...
+ *
+ * we need to make it into
+ *
+ * group [agg(agg(x)), {a, b}]
+ * source
+ * set op
+ * source
+ * project
+ * [select]
+ * group [agg(x), {a, b}]
+ * source
+ * child 1
+ * ...
+ *
+ * Or if the child does not support pushdown we add dummy aggregate projection
+ * count(*) = 1, count(x) = case x is null then 0 else 1 end, avg(x) = x, etc.
+ */
+ private void pushGroupNodeOverUnion(PlanNode plan,
+ QueryMetadataInterface metadata, CapabilitiesFinder capFinder,
+ PlanNode groupNode, PlanNode child,
+ List<SingleElementSymbol> groupingExpressions, PlanNode setOp)
+ throws MetaMatrixComponentException, QueryMetadataException,
+ QueryPlannerException, QueryResolverException {
+ if (setOp.getType() != NodeConstants.Types.SET_OP || setOp.getProperty(NodeConstants.Info.SET_OPERATION) != Operation.UNION) {
+ return; //must not be a union
+ }
+ LinkedHashSet<AggregateSymbol> aggregates = collectAggregates(groupNode);
+
+ /*
+ * if there are no aggregates, this is just duplicate removal
+ * mark the union as not all, which should be removed later but
+ * serves as a hint to distribute a distinct to the union queries
+ */
+ if (aggregates.isEmpty()) {
+ if (groupingExpressions != null && !groupingExpressions.isEmpty()) {
+ setOp.setProperty(NodeConstants.Info.USE_ALL, Boolean.FALSE);
+ }
+ return;
+ }
+
+ //check to see if any aggregate is dependent upon cardinality
+ boolean cardinalityDependent = false;
+ for (AggregateSymbol aggregateSymbol : aggregates) {
+ if (aggregateSymbol.getAggregateFunction().equals(ReservedWords.COUNT)
+ || aggregateSymbol.getAggregateFunction().equals(ReservedWords.AVG)
+ || aggregateSymbol.getAggregateFunction().equals(ReservedWords.SUM)) {
+ cardinalityDependent = true;
+ break;
+ }
+ }
+
+ LinkedList<PlanNode> unionChildren = new LinkedList<PlanNode>();
+ findUnionChildren(unionChildren, cardinalityDependent, setOp);
+
+ if (unionChildren.size() < 2) {
+ return;
+ }
+
+ SymbolMap parentMap = (SymbolMap)child.getProperty(NodeConstants.Info.SYMBOL_MAP);
+ List<ElementSymbol> virtualElements = parentMap.getKeys();
+
+ Map<AggregateSymbol, Expression> aggMap = buildAggregateMap(new ArrayList<SingleElementSymbol>(aggregates), metadata, aggregates);
+
+ boolean shouldPushdown = false;
+ List<Boolean> pushdownList = new ArrayList<Boolean>(unionChildren.size());
+ for (PlanNode planNode : unionChildren) {
+ boolean pushdown = canPushGroupByToUnionChild(metadata, capFinder, groupingExpressions, aggregates, planNode);
+ pushdownList.add(pushdown);
+ shouldPushdown |= pushdown;
+ }
+
+ if (!shouldPushdown) {
+ return;
+ }
+
+ Iterator<Boolean> pushdownIterator = pushdownList.iterator();
+ for (PlanNode planNode : unionChildren) {
+ addView(plan, planNode, pushdownIterator.next(), groupingExpressions, aggregates, virtualElements, metadata, capFinder);
+ }
+
+ //update the parent plan with the staged aggregates and the new projected symbols
+ List<SingleElementSymbol> projectedViewSymbols = (List<SingleElementSymbol>)NodeEditor.findNodePreOrder(child, NodeConstants.Types.PROJECT).getProperty(NodeConstants.Info.PROJECT_COLS);
+ SymbolMap newParentMap = SymbolMap.createSymbolMap(child.getGroups().iterator().next(), projectedViewSymbols);
+ child.setProperty(NodeConstants.Info.SYMBOL_MAP, newParentMap);
+ Map<AggregateSymbol, ElementSymbol> projectedMap = new HashMap<AggregateSymbol, ElementSymbol>();
+ Iterator<AggregateSymbol> aggIter = aggregates.iterator();
+ for (ElementSymbol projectedViewSymbol : newParentMap.getKeys().subList(projectedViewSymbols.size() - aggregates.size(), projectedViewSymbols.size())) {
+ projectedMap.put(aggIter.next(), projectedViewSymbol);
+ }
+ for (Expression expr : aggMap.values()) {
+ ExpressionMappingVisitor.mapExpressions(expr, projectedMap);
+ }
+ mapExpressions(groupNode.getParent(), aggMap);
+ }
+
+ private boolean canPushGroupByToUnionChild(QueryMetadataInterface metadata,
+ CapabilitiesFinder capFinder,
+ List<SingleElementSymbol> groupingExpressions,
+ LinkedHashSet<AggregateSymbol> aggregates, PlanNode planNode)
+ throws QueryMetadataException, MetaMatrixComponentException {
+ if (planNode.getType() != NodeConstants.Types.ACCESS) {
+ return false;
+ }
+ Object modelId = RuleRaiseAccess.getModelIDFromAccess(planNode, metadata);
+ if (!CapabilitiesUtil.supports(Capability.QUERY_FROM_INLINE_VIEWS, modelId, metadata, capFinder)
+ || !CapabilitiesUtil.supports(Capability.QUERY_GROUP_BY, modelId, metadata, capFinder)) {
+ return false;
+ }
+ for (AggregateSymbol aggregate : aggregates) {
+ if (!CapabilitiesUtil.supportsAggregateFunction(modelId, aggregate, metadata, capFinder)) {
+ return false;
+ }
+ }
+ if ((groupingExpressions == null || groupingExpressions.isEmpty()) && !CapabilitiesUtil.supports(Capability.QUERY_AGGREGATES_COUNT_STAR, modelId, metadata, capFinder)) {
+ return false;
+ }
+ //TODO: check to see if we are distinct
+ return true;
+ }
+
+ /**
+ * Recursively searches the union tree for all applicable source nodes
+ */
+ private PlanNode findUnionChildren(List<PlanNode> unionChildren, boolean carinalityDependent, PlanNode setOp) {
+ if (setOp.getType() != NodeConstants.Types.SET_OP || setOp.getProperty(NodeConstants.Info.SET_OPERATION) != Operation.UNION) {
+ return setOp;
+ }
+
+ if (!setOp.hasBooleanProperty(NodeConstants.Info.USE_ALL)) {
+ if (carinalityDependent) {
+ return setOp;
+ }
+ setOp.setProperty(NodeConstants.Info.USE_ALL, Boolean.TRUE);
+ }
+
+ for (PlanNode planNode : setOp.getChildren()) {
+ PlanNode child = findUnionChildren(unionChildren, carinalityDependent, planNode);
+ if (child != null) {
+ unionChildren.add(child);
+ }
+ }
+
+ return null;
+ }
+
+ public void addView(PlanNode root, PlanNode unionSource, boolean pushdown, List<SingleElementSymbol> groupingExpressions,
+ Set<AggregateSymbol> aggregates, List<ElementSymbol> virtualElements,
+ QueryMetadataInterface metadata, CapabilitiesFinder capFinder)
+ throws MetaMatrixComponentException, QueryPlannerException, QueryResolverException {
+ PlanNode originalNode = unionSource;
+ PlanNode intermediateView = NodeFactory.getNewNode(NodeConstants.Types.SOURCE);
+ unionSource.addAsParent(intermediateView);
+ unionSource = intermediateView;
+ TempMetadataStore store = new TempMetadataStore();
+ TempMetadataAdapter tma = new TempMetadataAdapter(metadata, store);
+ GroupSymbol group = new GroupSymbol("X"); //$NON-NLS-1$
+ try {
+ group.setMetadataID(ResolverUtil.addTempGroup(tma, group, virtualElements, false));
+ } catch (QueryResolverException e) {
+ throw new MetaMatrixComponentException(e);
+ }
+ intermediateView.addGroup(group);
+ List<ElementSymbol> projectedSymbols = ResolverUtil.resolveElementsInGroup(group, metadata);
+ SymbolMap symbolMap = SymbolMap.createSymbolMap(projectedSymbols,
+ (List<Expression>)NodeEditor.findNodePreOrder(unionSource, NodeConstants.Types.PROJECT).getProperty(NodeConstants.Info.PROJECT_COLS));
+ intermediateView.setProperty(NodeConstants.Info.SYMBOL_MAP, symbolMap);
+
+ Set<SingleElementSymbol> newGroupingExpressions = Collections.emptySet();
+ if (groupingExpressions != null) {
+ newGroupingExpressions = new HashSet<SingleElementSymbol>();
+ for (SingleElementSymbol singleElementSymbol : groupingExpressions) {
+ newGroupingExpressions.add((SingleElementSymbol)symbolMap.getKeys().get(virtualElements.indexOf(singleElementSymbol)).clone());
+ }
+ }
+
+ List<SingleElementSymbol> projectedViewSymbols = QueryRewriter.deepClone(projectedSymbols, SingleElementSymbol.class);
+
+ SymbolMap viewMapping = SymbolMap.createSymbolMap(NodeEditor.findParent(unionSource, NodeConstants.Types.SOURCE).getGroups().iterator().next(), projectedSymbols);
+ for (AggregateSymbol agg : aggregates) {
+ agg = (AggregateSymbol)agg.clone();
+ ExpressionMappingVisitor.mapExpressions(agg, viewMapping.asMap());
+ if (pushdown) {
+ projectedViewSymbols.add(agg);
+ } else {
+ if (agg.getAggregateFunction().equals(ReservedWords.COUNT)) {
+ SearchedCaseExpression count = new SearchedCaseExpression(Arrays.asList(new IsNullCriteria(agg.getExpression())), Arrays.asList(new Constant(Integer.valueOf(0))));
+ count.setElseExpression(new Constant(Integer.valueOf(1)));
+ count.setType(DataTypeManager.DefaultDataClasses.INTEGER);
+ projectedViewSymbols.add(new ExpressionSymbol("stagedAgg", count)); //$NON-NLS-1$
+ } else { //min, max, sum
+ Expression ex = agg.getExpression();
+ ex = ResolverUtil.convertExpression(ex, DataTypeManager.getDataTypeName(agg.getType()));
+ projectedViewSymbols.add(new ExpressionSymbol("stagedAgg", ex)); //$NON-NLS-1$
+ }
+ }
+ }
+
+ if (pushdown) {
+ unionSource = addGroupBy(unionSource, newGroupingExpressions, new LinkedList<AggregateSymbol>());
+ }
+
+ PlanNode projectPlanNode = NodeFactory.getNewNode(NodeConstants.Types.PROJECT);
+ unionSource.addAsParent(projectPlanNode);
+ unionSource = projectPlanNode;
+
+ //create proper names for the aggregate symbols
+ Select select = new Select(projectedViewSymbols);
+ QueryRewriter.makeSelectUnique(select, false);
+ projectedViewSymbols = select.getProjectedSymbols();
+ projectPlanNode.setProperty(NodeConstants.Info.PROJECT_COLS, projectedViewSymbols);
+ projectPlanNode.addGroup(group);
+ if (pushdown) {
+ while (RuleRaiseAccess.raiseAccessNode(root, originalNode, metadata, capFinder, true) != null) {
+ //continue to raise
+ }
+ }
+ }
+
/**
* Walk up the plan from the GROUP node. Should encounter only (optionally) a SELECT and can stop at the PROJECT node. Need to
* collect any AggregateSymbols used in the select criteria or projected columns.
@@ -111,8 +361,8 @@
* @return the set of aggregate symbols found
* @since 4.2
*/
- static Set<AggregateSymbol> collectAggregates(PlanNode groupNode) {
- Set<AggregateSymbol> aggregates = new HashSet<AggregateSymbol>();
+ static LinkedHashSet<AggregateSymbol> collectAggregates(PlanNode groupNode) {
+ LinkedHashSet<AggregateSymbol> aggregates = new LinkedHashSet<AggregateSymbol>();
PlanNode currentNode = groupNode.getParent();
while (currentNode != null) {
if (currentNode.getType() == NodeConstants.Types.PROJECT) {
@@ -146,7 +396,7 @@
CapabilitiesFinder capFinder) throws MetaMatrixComponentException,
QueryMetadataException, QueryPlannerException {
- Map<PlanNode, List<SingleElementSymbol>> aggregateMap = createNodeMapping(groupNode, allAggregates);
+ Map<PlanNode, List<AggregateSymbol>> aggregateMap = createNodeMapping(groupNode, allAggregates);
Map<PlanNode, List<SingleElementSymbol>> groupingMap = createNodeMapping(groupNode, groupingExpressions);
Set<PlanNode> possibleTargetNodes = new HashSet<PlanNode>(aggregateMap.keySet());
@@ -154,7 +404,7 @@
for (PlanNode planNode : possibleTargetNodes) {
Set<SingleElementSymbol> stagedGroupingSymbols = new LinkedHashSet<SingleElementSymbol>();
- List<SingleElementSymbol> aggregates = aggregateMap.get(planNode);
+ List<AggregateSymbol> aggregates = aggregateMap.get(planNode);
List<SingleElementSymbol> groupBy = groupingMap.get(planNode);
if (!canPush(groupNode, stagedGroupingSymbols, planNode)) {
@@ -168,15 +418,19 @@
collectSymbolsFromOtherAggregates(allAggregates, aggregates, planNode, stagedGroupingSymbols);
//if the grouping expressions are unique then there's no point in staging the aggregate
- //TODO: the uses key check is not really accurate.
+ //TODO: the uses key check is not really accurate, it doesn't take into consideration where
+ //we are in the plan.
+ //if a key column is used after a non 1-1 join or a union all, then it may be non-unique.
if (NewCalculateCostUtil.usesKey(stagedGroupingSymbols, metadata)) {
- continue;
+ continue;
}
+ //TODO: we should be doing another cost check here - especially if the aggregate cannot be pushed.
+
if (aggregates != null) {
stageAggregates(groupNode, metadata, stagedGroupingSymbols, aggregates);
} else {
- aggregates = new ArrayList<SingleElementSymbol>();
+ aggregates = new ArrayList<AggregateSymbol>(1);
}
if (aggregates.isEmpty() && stagedGroupingSymbols.isEmpty()) {
@@ -184,23 +438,8 @@
}
//TODO: if aggregates is empty, then could insert a dup remove node instead
- PlanNode stageGroup = NodeFactory.getNewNode(NodeConstants.Types.GROUP);
- planNode.addAsParent(stageGroup);
-
- if (!stagedGroupingSymbols.isEmpty()) {
- stageGroup.setProperty(NodeConstants.Info.GROUP_COLS, new ArrayList<SingleElementSymbol>(stagedGroupingSymbols));
- stageGroup.addGroups(GroupsUsedByElementsVisitor.getGroups(stagedGroupingSymbols));
- } else {
- // if the source has no rows we need to insert a select node with criteria count(*)>0
- PlanNode selectNode = NodeFactory.getNewNode(NodeConstants.Types.SELECT);
- AggregateSymbol count = new AggregateSymbol("stagedAgg", ReservedWords.COUNT, false, null); //$NON-NLS-1$
- aggregates.add(count); //consider the count aggregate for the push down call below
- selectNode.setProperty(NodeConstants.Info.SELECT_CRITERIA, new CompareCriteria(count, CompareCriteria.GT,
- new Constant(new Integer(0))));
- selectNode.setProperty(NodeConstants.Info.IS_HAVING, Boolean.TRUE);
- stageGroup.addAsParent(selectNode);
- }
-
+ PlanNode stageGroup = addGroupBy(planNode, stagedGroupingSymbols, aggregates);
+
//check for push down
if (stageGroup.getFirstChild().getType() == NodeConstants.Types.ACCESS
&& RuleRaiseAccess.canRaiseOverGroupBy(stageGroup, stageGroup.getFirstChild(), aggregates, metadata, capFinder)) {
@@ -212,18 +451,40 @@
}
}
- private void stageAggregates(PlanNode groupNode,
+ private PlanNode addGroupBy(PlanNode planNode,
+ Collection<SingleElementSymbol> stagedGroupingSymbols,
+ Collection<AggregateSymbol> aggregates) {
+ PlanNode stageGroup = NodeFactory.getNewNode(NodeConstants.Types.GROUP);
+ planNode.addAsParent(stageGroup);
+
+ if (!stagedGroupingSymbols.isEmpty()) {
+ stageGroup.setProperty(NodeConstants.Info.GROUP_COLS, new ArrayList<SingleElementSymbol>(stagedGroupingSymbols));
+ stageGroup.addGroups(GroupsUsedByElementsVisitor.getGroups(stagedGroupingSymbols));
+ } else {
+ // if the source has no rows we need to insert a select node with criteria count(*)>0
+ PlanNode selectNode = NodeFactory.getNewNode(NodeConstants.Types.SELECT);
+ AggregateSymbol count = new AggregateSymbol("stagedAgg", ReservedWords.COUNT, false, null); //$NON-NLS-1$
+ aggregates.add(count); //consider the count aggregate for the push down call below
+ selectNode.setProperty(NodeConstants.Info.SELECT_CRITERIA, new CompareCriteria(count, CompareCriteria.GT,
+ new Constant(new Integer(0))));
+ selectNode.setProperty(NodeConstants.Info.IS_HAVING, Boolean.TRUE);
+ stageGroup.addAsParent(selectNode);
+ }
+ return stageGroup;
+ }
+
+ static void stageAggregates(PlanNode groupNode,
QueryMetadataInterface metadata,
- Set<SingleElementSymbol> stagedGroupingSymbols,
- List<SingleElementSymbol> aggregates) throws MetaMatrixComponentException, QueryPlannerException {
+ Collection<SingleElementSymbol> stagedGroupingSymbols,
+ Collection<AggregateSymbol> aggregates) throws MetaMatrixComponentException, QueryPlannerException {
//remove any aggregates that are computed over a group by column
Set<Expression> expressions = new HashSet<Expression>();
for (SingleElementSymbol expression : stagedGroupingSymbols) {
expressions.add(SymbolMap.getExpression(expression));
}
- for (final Iterator<SingleElementSymbol> iterator = aggregates.iterator(); iterator.hasNext();) {
- final AggregateSymbol symbol = (AggregateSymbol)iterator.next();
+ for (final Iterator<AggregateSymbol> iterator = aggregates.iterator(); iterator.hasNext();) {
+ final AggregateSymbol symbol = iterator.next();
Expression expr = symbol.getExpression();
if (expr == null) {
continue;
@@ -248,7 +509,7 @@
}
private void collectSymbolsFromOtherAggregates(Collection<AggregateSymbol> allAggregates,
- Collection<SingleElementSymbol> aggregates,
+ Collection<AggregateSymbol> aggregates,
PlanNode current,
Set<SingleElementSymbol> stagedGroupingSymbols) {
Set<AggregateSymbol> otherAggs = new HashSet<AggregateSymbol>(allAggregates);
@@ -314,13 +575,13 @@
return true;
}
- private Map<PlanNode, List<SingleElementSymbol>> createNodeMapping(PlanNode groupNode,
- Collection<? extends SingleElementSymbol> expressions) {
- Map<PlanNode, List<SingleElementSymbol>> result = new HashMap<PlanNode, List<SingleElementSymbol>>();
+ private <T extends SingleElementSymbol> Map<PlanNode, List<T>> createNodeMapping(PlanNode groupNode,
+ Collection<T> expressions) {
+ Map<PlanNode, List<T>> result = new HashMap<PlanNode, List<T>>();
if (expressions == null) {
return result;
}
- for (SingleElementSymbol aggregateSymbol : expressions) {
+ for (T aggregateSymbol : expressions) {
if (aggregateSymbol instanceof AggregateSymbol) {
AggregateSymbol partitionAgg = (AggregateSymbol)aggregateSymbol;
if (partitionAgg.isDistinct()) {
@@ -350,9 +611,9 @@
continue;
}
- List<SingleElementSymbol> symbols = result.get(originatingNode);
+ List<T> symbols = result.get(originatingNode);
if (symbols == null) {
- symbols = new LinkedList<SingleElementSymbol>();
+ symbols = new LinkedList<T>();
result.put(originatingNode, symbols);
}
symbols.add(aggregateSymbol);
@@ -360,7 +621,7 @@
return result;
}
- private Map<AggregateSymbol, Expression> buildAggregateMap(Collection<SingleElementSymbol> aggregateExpressions,
+ private static Map<AggregateSymbol, Expression> buildAggregateMap(Collection<? extends SingleElementSymbol> aggregateExpressions,
QueryMetadataInterface metadata, Set<AggregateSymbol> nestedAggregates) throws QueryResolverException,
MetaMatrixComponentException {
Map<AggregateSymbol, Expression> aggMap = new HashMap<AggregateSymbol, Expression>();
Modified: trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRaiseAccess.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRaiseAccess.java 2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRaiseAccess.java 2009-07-21 22:53:49 UTC (rev 1162)
@@ -85,7 +85,7 @@
/**
* @return null if nothing changed, and a new plan root if something changed
*/
- PlanNode raiseAccessNode(PlanNode rootNode, PlanNode accessNode, QueryMetadataInterface metadata, CapabilitiesFinder capFinder, boolean afterJoinPlanning)
+ static PlanNode raiseAccessNode(PlanNode rootNode, PlanNode accessNode, QueryMetadataInterface metadata, CapabilitiesFinder capFinder, boolean afterJoinPlanning)
throws QueryPlannerException, QueryMetadataException, MetaMatrixComponentException {
PlanNode parentNode = accessNode.getParent();
@@ -683,7 +683,7 @@
return accessModelID;
}
- private boolean canRaiseOverSetQuery(PlanNode setOpNode,
+ private static boolean canRaiseOverSetQuery(PlanNode setOpNode,
QueryMetadataInterface metadata,
CapabilitiesFinder capFinder) throws QueryMetadataException, MetaMatrixComponentException {
Modified: trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRemoveOptionalJoins.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRemoveOptionalJoins.java 2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRemoveOptionalJoins.java 2009-07-21 22:53:49 UTC (rev 1162)
@@ -23,7 +23,6 @@
package com.metamatrix.query.optimizer.relational.rules;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@@ -43,7 +42,6 @@
import com.metamatrix.query.optimizer.relational.plantree.NodeEditor;
import com.metamatrix.query.optimizer.relational.plantree.PlanNode;
import com.metamatrix.query.resolver.util.ResolverUtil;
-import com.metamatrix.query.sql.LanguageObject;
import com.metamatrix.query.sql.ReservedWords;
import com.metamatrix.query.sql.lang.Criteria;
import com.metamatrix.query.sql.lang.JoinType;
Modified: trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java 2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java 2009-07-21 22:53:49 UTC (rev 1162)
@@ -607,7 +607,7 @@
}
}
- public static void addTempGroup(TempMetadataAdapter metadata,
+ public static TempMetadataID addTempGroup(TempMetadataAdapter metadata,
GroupSymbol symbol,
List symbols, boolean tempTable) throws QueryResolverException {
HashSet names = new HashSet();
@@ -622,7 +622,7 @@
resolveNullLiterals(symbols);
}
TempMetadataStore store = metadata.getMetadataStore();
- store.addTempGroup(symbol.getName(), symbols, !tempTable, tempTable);
+ return store.addTempGroup(symbol.getName(), symbols, !tempTable, tempTable);
}
public static void addTempTable(TempMetadataAdapter metadata,
Modified: trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java 2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java 2009-07-21 22:53:49 UTC (rev 1162)
@@ -775,7 +775,7 @@
rewriteExpressions(query.getSelect(), procCommand, metadata, context);
if (query.getOrderBy() != null && !query.getIsXML()) {
- makeSelectUnique(query, true);
+ makeSelectUnique(query.getSelect(), true);
rewriteOrderBy(query, procCommand, metadata, context);
}
@@ -947,7 +947,7 @@
setQuery.setRightQuery((QueryCommand)rewriteCommand(setQuery.getRightQuery(), procCommand, metadata, context, true));
if (setQuery.getOrderBy() != null) {
- makeSelectUnique(setQuery.getProjectedQuery(), true);
+ makeSelectUnique(setQuery.getProjectedQuery().getSelect(), true);
rewriteOrderBy(setQuery, procCommand, metadata, context);
}
@@ -2355,7 +2355,7 @@
TempMetadataStore store = new TempMetadataStore();
TempMetadataAdapter tma = new TempMetadataAdapter(metadata, store);
Query firstProject = nested.getProjectedQuery();
- makeSelectUnique(firstProject, false);
+ makeSelectUnique(firstProject.getSelect(), false);
store.addTempGroup(inlineGroup.getName(), nested.getProjectedSymbols());
inlineGroup.setMetadataID(store.getTempGroupID(inlineGroup.getName()));
@@ -2367,9 +2367,7 @@
actualTypes.add(ses.getType());
}
List selectSymbols = SetQuery.getTypedProjectedSymbols(ResolverUtil.resolveElementsInGroup(inlineGroup, tma), actualTypes);
- for (final Iterator iterator = selectSymbols.iterator(); iterator.hasNext();) {
- select.addSymbol((SingleElementSymbol)((SingleElementSymbol)iterator.next()).clone());
- }
+ select.addSymbols(deepClone(selectSymbols, SingleElementSymbol.class));
query.setFrom(from);
QueryResolver.resolveCommand(query, tma);
query.setOption(nested.getOption());
@@ -2383,11 +2381,19 @@
return query;
}
- public static void makeSelectUnique(Query query, boolean expressionSymbolsOnly) {
+ public static <S extends Expression, T extends S> List<S> deepClone(List<T> collection, Class<S> clazz) {
+ ArrayList<S> result = new ArrayList<S>(collection.size());
+ for (Expression expression : collection) {
+ result.add((S)expression.clone());
+ }
+ return result;
+ }
+
+ public static void makeSelectUnique(Select select, boolean expressionSymbolsOnly) {
- query.getSelect().setSymbols(query.getSelect().getProjectedSymbols());
+ select.setSymbols(select.getProjectedSymbols());
- List symbols = query.getSelect().getSymbols();
+ List symbols = select.getSymbols();
HashSet<String> uniqueNames = new HashSet<String>();
Modified: trunk/engine/src/test/java/com/metamatrix/query/optimizer/TestAggregatePushdown.java
===================================================================
--- trunk/engine/src/test/java/com/metamatrix/query/optimizer/TestAggregatePushdown.java 2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/test/java/com/metamatrix/query/optimizer/TestAggregatePushdown.java 2009-07-21 22:53:49 UTC (rev 1162)
@@ -35,13 +35,24 @@
import com.metamatrix.query.unittest.FakeMetadataFactory;
public class TestAggregatePushdown {
-
+
+ public static BasicSourceCapabilities getAggregateCapabilities() {
+ BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
+ caps.setCapabilitySupport(Capability.QUERY_FROM_INLINE_VIEWS, true);
+ caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_MAX, true);
+ caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
+ caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_MIN, true);
+ caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_AVG, true);
+ caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT, true);
+ caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT_STAR, true);
+ caps.setCapabilitySupport(Capability.QUERY_GROUP_BY, true);
+ caps.setCapabilitySupport(Capability.QUERY_HAVING, true);
+ return caps;
+ }
+
private FakeCapabilitiesFinder getAggregatesFinder() {
FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
- BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_MAX, true);
+ BasicSourceCapabilities caps = getAggregateCapabilities();
capFinder.addCapabilities("m1", caps); //$NON-NLS-1$
capFinder.addCapabilities("m2", caps); //$NON-NLS-1$
@@ -50,13 +61,9 @@
@Test public void testCase6327() {
FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
- BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT_STAR, true);
+ BasicSourceCapabilities caps = getAggregateCapabilities();
caps.setCapabilitySupport(Capability.QUERY_FROM_JOIN_SELFJOIN, true);
caps.setCapabilitySupport(Capability.QUERY_FROM_GROUP_ALIAS, true);
- caps.setCapabilitySupport(Capability.QUERY_FUNCTIONS_IN_GROUP_BY, true);
caps.setFunctionSupport("convert", true); //$NON-NLS-1$
capFinder.addCapabilities("BQT1", caps); //$NON-NLS-1$
capFinder.addCapabilities("BQT2", caps); //$NON-NLS-1$
@@ -90,12 +97,8 @@
*/
@Test public void testAggregateOfJoinExpression() throws Exception {
FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
- BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT_STAR, true);
+ BasicSourceCapabilities caps = getAggregateCapabilities();
caps.setCapabilitySupport(Capability.QUERY_FROM_GROUP_ALIAS, true);
- caps.setCapabilitySupport(Capability.QUERY_FUNCTIONS_IN_GROUP_BY, true);
caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
caps.setFunctionSupport("convert", true); //$NON-NLS-1$
capFinder.addCapabilities("BQT1", caps); //$NON-NLS-1$
@@ -129,9 +132,7 @@
*/
@Test public void testInvariantAggregate() throws Exception {
FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
- BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_MAX, true);
+ BasicSourceCapabilities caps = getAggregateCapabilities();
caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
capFinder.addCapabilities("pm2", caps); //$NON-NLS-1$
@@ -163,10 +164,7 @@
*/
@Test public void testCase6211() throws Exception {
FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
- BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
- caps.setCapabilitySupport(Capability.QUERY_FUNCTIONS_IN_GROUP_BY, true);
+ BasicSourceCapabilities caps = getAggregateCapabilities();
caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
caps.setFunctionSupport("convert", true); //$NON-NLS-1$
capFinder.addCapabilities("BQT1", caps); //$NON-NLS-1$
@@ -263,10 +261,7 @@
*/
@Test public void testAvgAggregate() throws Exception {
FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
- BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
+ BasicSourceCapabilities caps = getAggregateCapabilities();
caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
capFinder.addCapabilities("pm2", caps); //$NON-NLS-1$
@@ -295,10 +290,7 @@
@Test public void testCountAggregate() throws Exception {
FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
- BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
+ BasicSourceCapabilities caps = getAggregateCapabilities();
caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
capFinder.addCapabilities("pm2", caps); //$NON-NLS-1$
@@ -327,10 +319,7 @@
@Test public void testOuterJoinPreventsPushdown() throws Exception {
FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
- BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
+ BasicSourceCapabilities caps = getAggregateCapabilities();
caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
capFinder.addCapabilities("pm2", caps); //$NON-NLS-1$
@@ -363,10 +352,7 @@
*/
@Test public void testCase5724() {
FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
- BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT_STAR, true);
+ BasicSourceCapabilities caps = getAggregateCapabilities();
caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
capFinder.addCapabilities("BQT1", caps); //$NON-NLS-1$
capFinder.addCapabilities("BQT2", caps); //$NON-NLS-1$
@@ -399,12 +385,9 @@
@Test public void testCase6210() throws Exception {
FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
- BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
- caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
+ BasicSourceCapabilities caps = getAggregateCapabilities();
caps.setCapabilitySupport(Capability.QUERY_FROM_JOIN_SELFJOIN, true);
caps.setCapabilitySupport(Capability.QUERY_FROM_GROUP_ALIAS, true);
- caps.setCapabilitySupport(Capability.QUERY_FUNCTIONS_IN_GROUP_BY, true);
caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
caps.setFunctionSupport("convert", true); //$NON-NLS-1$
caps.setFunctionSupport("/", true); //$NON-NLS-1$
@@ -745,6 +728,114 @@
0, // Sort
0 // UnionAll
});
- }
+ }
+
+ @Test public void testPushDownOverUnion() throws Exception {
+ FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
+ BasicSourceCapabilities caps = getAggregateCapabilities();
+ capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
+ ProcessorPlan plan = TestOptimizer.helpPlan("select e1, max(e2) from (select e1, e2 from pm1.g1 union all select e1, e2 from pm1.g2) y group by e1", FakeMetadataFactory.example1Cached(), null, capFinder, //$NON-NLS-1$
+ new String[]{"SELECT v_0.c_0, MAX(v_0.c_1) FROM (SELECT g_0.e1 AS c_0, g_0.e2 AS c_1 FROM pm1.g1 AS g_0) AS v_0 GROUP BY v_0.c_0", //$NON-NLS-1$
+ "SELECT v_0.c_0, MAX(v_0.c_1) FROM (SELECT g_0.e1 AS c_0, g_0.e2 AS c_1 FROM pm1.g2 AS g_0) AS v_0 GROUP BY v_0.c_0"}, ComparisonMode.EXACT_COMMAND_STRING); //$NON-NLS-1$
+ TestOptimizer.checkNodeTypes(plan, new int[] {
+ 2, // Access
+ 0, // DependentAccess
+ 0, // DependentSelect
+ 0, // DependentProject
+ 0, // DupRemove
+ 1, // Grouping
+ 0, // NestedLoopJoinStrategy
+ 0, // MergeJoinStrategy
+ 0, // Null
+ 0, // PlanExecution
+ 1, // Project
+ 0, // Select
+ 0, // Sort
+ 1 // UnionAll
+ });
+ }
+
+ @Test public void testPushDownOverUnion1() throws Exception {
+ FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
+ BasicSourceCapabilities caps = getAggregateCapabilities();
+ capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
+
+ ProcessorPlan plan = TestOptimizer.helpPlan("select max(e2) from (select e1, e2 from pm1.g1 union all select e1, e2 from pm1.g2) z", FakeMetadataFactory.example1Cached(), null, capFinder, //$NON-NLS-1$
+ new String[]{"SELECT MAX(v_0.c_0) FROM (SELECT g_0.e2 AS c_0 FROM pm1.g2 AS g_0) AS v_0 HAVING COUNT(*) > 0", //$NON-NLS-1$
+ "SELECT MAX(v_0.c_0) FROM (SELECT g_0.e2 AS c_0 FROM pm1.g1 AS g_0) AS v_0 HAVING COUNT(*) > 0"}, ComparisonMode.EXACT_COMMAND_STRING); //$NON-NLS-1$
+ TestOptimizer.checkNodeTypes(plan, new int[] {
+ 2, // Access
+ 0, // DependentAccess
+ 0, // DependentSelect
+ 0, // DependentProject
+ 0, // DupRemove
+ 1, // Grouping
+ 0, // NestedLoopJoinStrategy
+ 0, // MergeJoinStrategy
+ 0, // Null
+ 0, // PlanExecution
+ 1, // Project
+ 0, // Select
+ 0, // Sort
+ 1 // UnionAll
+ });
+ }
+
+ /**
+ * We won't do the pushdown here since the aggregate depends upon the cardinality
+ */
+ @Test public void testPushDownOverUnion2() throws Exception {
+ FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
+ BasicSourceCapabilities caps = getAggregateCapabilities();
+ capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
+
+ ProcessorPlan plan = TestOptimizer.helpPlan("select count(e2) from (select e1, e2 from pm1.g1 union select e1, e2 from pm1.g2) z", FakeMetadataFactory.example1Cached(), null, capFinder, //$NON-NLS-1$
+ new String[]{"SELECT DISTINCT g_0.e1, g_0.e2 FROM pm1.g2 AS g_0", //$NON-NLS-1$
+ "SELECT DISTINCT g_0.e1, g_0.e2 FROM pm1.g1 AS g_0"}, ComparisonMode.EXACT_COMMAND_STRING); //$NON-NLS-1$
+ TestOptimizer.checkNodeTypes(plan, new int[] {
+ 2, // Access
+ 0, // DependentAccess
+ 0, // DependentSelect
+ 0, // DependentProject
+ 1, // DupRemove
+ 1, // Grouping
+ 0, // NestedLoopJoinStrategy
+ 0, // MergeJoinStrategy
+ 0, // Null
+ 0, // PlanExecution
+ 1, // Project
+ 0, // Select
+ 0, // Sort
+ 1 // UnionAll
+ });
+ }
+
+ @Test public void testPushDownOverUnionMixed() throws Exception {
+ FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
+ BasicSourceCapabilities caps = getAggregateCapabilities();
+ capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
+ capFinder.addCapabilities("pm2", TestOptimizer.getTypicalCapabilities()); //$NON-NLS-1$
+
+ ProcessorPlan plan = TestOptimizer.helpPlan("select max(e2) from (select e1, e2 from pm1.g1 union all select e1, e2 from pm2.g2) z", FakeMetadataFactory.example1Cached(), null, capFinder, //$NON-NLS-1$
+ new String[]{"SELECT MAX(v_0.c_0) FROM (SELECT g_0.e2 AS c_0 FROM pm1.g1 AS g_0) AS v_0 HAVING COUNT(*) > 0", //$NON-NLS-1$
+ "SELECT g_0.e2 FROM pm2.g2 AS g_0"}, ComparisonMode.EXACT_COMMAND_STRING); //$NON-NLS-1$
+ TestOptimizer.checkNodeTypes(plan, new int[] {
+ 2, // Access
+ 0, // DependentAccess
+ 0, // DependentSelect
+ 0, // DependentProject
+ 0, // DupRemove
+ 1, // Grouping
+ 0, // NestedLoopJoinStrategy
+ 0, // MergeJoinStrategy
+ 0, // Null
+ 0, // PlanExecution
+ 2, // Project
+ 0, // Select
+ 0, // Sort
+ 1 // UnionAll
+ });
+ }
+
}
Added: trunk/engine/src/test/java/com/metamatrix/query/processor/TestAggregateProcessing.java
===================================================================
--- trunk/engine/src/test/java/com/metamatrix/query/processor/TestAggregateProcessing.java (rev 0)
+++ trunk/engine/src/test/java/com/metamatrix/query/processor/TestAggregateProcessing.java 2009-07-21 22:53:49 UTC (rev 1162)
@@ -0,0 +1,273 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * See the COPYRIGHT.txt file distributed with this work for information
+ * regarding copyright ownership. Some portions may be licensed
+ * to Red Hat, Inc. under one or more contributor license agreements.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA.
+ */
+
+package com.metamatrix.query.processor;
+
+import static com.metamatrix.query.processor.TestProcessor.*;
+
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.junit.Test;
+
+import com.metamatrix.query.optimizer.TestAggregatePushdown;
+import com.metamatrix.query.optimizer.TestOptimizer;
+import com.metamatrix.query.optimizer.capabilities.FakeCapabilitiesFinder;
+import com.metamatrix.query.sql.lang.Command;
+import com.metamatrix.query.unittest.FakeMetadataFacade;
+import com.metamatrix.query.unittest.FakeMetadataFactory;
+import com.metamatrix.query.unittest.FakeMetadataObject;
+
+public class TestAggregateProcessing {
+
+ static void sampleDataBQT3(FakeDataManager dataMgr) throws Exception {
+ FakeMetadataFacade metadata = FakeMetadataFactory.exampleBQTCached();
+
+ // Group bqt1.smalla
+ FakeMetadataObject groupID = (FakeMetadataObject) metadata
+ .getGroupID("bqt1.smalla"); //$NON-NLS-1$
+ List elementIDs = metadata.getElementIDsInGroupID(groupID);
+ List elementSymbols = FakeDataStore.createElements(elementIDs);
+
+ List[] tuples = new List[20];
+ for (int i = 0; i < tuples.length; i++) {
+ tuples[i] = new ArrayList(17);
+ tuples[i].add(new Integer(i));
+ tuples[i].add("" + i); //$NON-NLS-1$
+ tuples[i].add(new Integer(i + 1));
+ for (int j = 0; j < 14; j++) {
+ tuples[i].add(null);
+ }
+ }
+
+ dataMgr.registerTuples(groupID, elementSymbols, tuples);
+
+ // Group bqt2.mediumb
+ groupID = (FakeMetadataObject) metadata.getGroupID("bqt2.mediumb"); //$NON-NLS-1$
+ elementIDs = metadata.getElementIDsInGroupID(groupID);
+ elementSymbols = FakeDataStore.createElements(elementIDs);
+
+ tuples = new List[20];
+ for (int i = 0; i < tuples.length; i++) {
+ tuples[i] = new ArrayList(17);
+ tuples[i].add(new Integer(i));
+ for (int j = 0; j < 16; j++) {
+ tuples[i].add(null);
+ }
+ }
+
+ dataMgr.registerTuples(groupID, elementSymbols, tuples);
+ }
+
+ private void sampleDataBQT_defect9842(FakeDataManager dataMgr) throws Exception {
+ FakeMetadataFacade metadata = FakeMetadataFactory.exampleBQTCached();
+
+ // Group bqt1.smalla
+ FakeMetadataObject groupID = (FakeMetadataObject) metadata
+ .getGroupID("bqt1.smalla"); //$NON-NLS-1$
+ List elementIDs = metadata.getElementIDsInGroupID(groupID);
+ List elementSymbols = FakeDataStore.createElements(elementIDs);
+
+ List[] tuples = new List[5];
+ for (int i = 0; i < tuples.length; i++) {
+ int k = i + 10;
+ tuples[i] = new ArrayList(17);
+ if (i < 2) {
+ tuples[i].add(new Integer(1)); // need duplicate values
+ } else {
+ tuples[i].add(new Integer(2)); // need duplicate values
+ }
+ tuples[i].add("" + k); //$NON-NLS-1$
+ tuples[i].add(new Integer(k + 1));
+ tuples[i].add("" + (k + 1)); //$NON-NLS-1$
+ tuples[i].add(new Float(0.5));
+ for (int j = 0; j < 8; j++) {
+ tuples[i].add(null);
+ }
+ tuples[i].add(new Short((short) k));
+ tuples[i].add(null);
+ tuples[i].add(new BigDecimal("" + k)); //$NON-NLS-1$
+ tuples[i].add(null);
+ }
+
+ dataMgr.registerTuples(groupID, elementSymbols, tuples);
+ }
+
+ @Test public void testAggregateOnBQT() throws Exception {
+ // Create query
+ String sql = "SELECT IntKey, SUM(IntNum) FROM BQT1.SmallA GROUP BY IntKey, IntNum HAVING IntNum > 10 ORDER BY IntKey"; //$NON-NLS-1$
+
+ // Create expected results
+ List[] expected = new List[] {
+ Arrays.asList(new Object[] { new Integer(10), new Long(11) }),
+ Arrays.asList(new Object[] { new Integer(11), new Long(12) }),
+ Arrays.asList(new Object[] { new Integer(12), new Long(13) }),
+ Arrays.asList(new Object[] { new Integer(13), new Long(14) }),
+ Arrays.asList(new Object[] { new Integer(14), new Long(15) }),
+ Arrays.asList(new Object[] { new Integer(15), new Long(16) }),
+ Arrays.asList(new Object[] { new Integer(16), new Long(17) }),
+ Arrays.asList(new Object[] { new Integer(17), new Long(18) }),
+ Arrays.asList(new Object[] { new Integer(18), new Long(19) }),
+ Arrays.asList(new Object[] { new Integer(19), new Long(20) }) };
+
+ // Construct data manager with data
+ FakeDataManager dataManager = new FakeDataManager();
+ sampleDataBQT3(dataManager);
+
+ // Plan query
+ ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory
+ .exampleBQTCached());
+
+ // Run query
+ helpProcess(plan, dataManager, expected);
+ }
+
+ @Test public void testAggregateOnBQT2() throws Exception {
+ // Create query
+ String sql = "SELECT IntNum, IsNotNull FROM (SELECT IntNum, LongNum, COUNT(IntNum) AS IsNotNull FROM BQT1.SmallA GROUP BY IntNum, LongNum HAVING LongNum IS NULL ) AS x ORDER BY IntNum, IsNotNull"; //$NON-NLS-1$
+
+ // Create expected results
+ List[] expected = new List[] {
+ Arrays.asList(new Object[] { new Integer(1), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(2), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(3), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(4), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(5), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(6), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(7), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(8), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(9), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(10), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(11), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(12), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(13), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(14), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(15), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(16), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(17), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(18), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(19), new Integer(1) }),
+ Arrays.asList(new Object[] { new Integer(20), new Integer(1) }) };
+
+ // Construct data manager with data
+ FakeDataManager dataManager = new FakeDataManager();
+ sampleDataBQT3(dataManager);
+
+ // Plan query
+ ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory
+ .exampleBQTCached());
+
+ // Run query
+ helpProcess(plan, dataManager, expected);
+ }
+
+ @Test public void testAggregateOnBQT_defect9842() throws Exception {
+ // Create query
+ String sql = "SELECT IntKey, SUM((BigDecimalValue)*(ShortValue)-(BigDecimalValue)*(ShortValue)*(FloatNum)) " + //$NON-NLS-1$
+ "AS MySum FROM BQT1.SmallA GROUP BY IntKey ORDER BY IntKey"; //$NON-NLS-1$
+
+ // Create expected results
+ List[] expected = new List[] {
+ Arrays.asList(new Object[] { new Integer(1),
+ new BigDecimal("110.5") }), //$NON-NLS-1$
+ Arrays.asList(new Object[] { new Integer(2),
+ new BigDecimal("254.5") }) //$NON-NLS-1$
+ };
+
+ // Construct data manager with data
+ FakeDataManager dataManager = new FakeDataManager();
+ sampleDataBQT_defect9842(dataManager);
+
+ // Plan query
+ ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory
+ .exampleBQTCached());
+
+ // Run query
+ helpProcess(plan, dataManager, expected);
+ }
+
+ @Test public void testCase186260() {
+ /*
+ * This case revealed that an expression like "COUNT( DISTINCT e1 )", where the type of e1 is
+ * anything but integer, was not handled properly. We tried to use "integer" (the type of the
+ * COUNT expression) to work with the e1 tuples.
+ */
+ // Create query
+ String sql = "SELECT COUNT(DISTINCT pm1.g2.e1), COUNT(DISTINCT pm1.g3.e1) FROM pm1.g2, pm1.g3"; //$NON-NLS-1$
+
+ // Create expected results
+ List[] expected = new List[] {
+ Arrays.asList(new Object[] { new Integer(3), new Integer(3) }),
+ };
+
+ // Construct data manager with data
+ FakeDataManager dataManager = new FakeDataManager();
+ sampleData1(dataManager);
+
+ // Plan query
+ ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory.example1Cached());
+
+ // Run query
+ helpProcess(plan, dataManager, expected);
+ }
+
+ @SuppressWarnings("unchecked")
+ @Test public void testAggregatePushdown() {
+ Command command = helpParse("select e1, count(e2), max(e2) from (select e1, e2, e3 from pm1.g1 union all select e1, e2, e3 from pm1.g2 union all select e1, e2, e3 from pm2.g1) z group by e1"); //$NON-NLS-1$
+
+ FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
+ capFinder.addCapabilities("pm1", TestAggregatePushdown.getAggregateCapabilities()); //$NON-NLS-1$
+ capFinder.addCapabilities("pm2", TestOptimizer.getTypicalCapabilities()); //$NON-NLS-1$
+ HardcodedDataManager dataManager = new HardcodedDataManager();
+
+ dataManager.addData("SELECT v_0.c_0, COUNT(v_0.c_1), MAX(v_0.c_1) FROM (SELECT g_0.e1 AS c_0, g_0.e2 AS c_1 FROM pm1.g1 AS g_0) AS v_0 GROUP BY v_0.c_0", //$NON-NLS-1$
+ new List[] {
+ Arrays.asList("a", Integer.valueOf(2), Boolean.TRUE), //$NON-NLS-1$
+ Arrays.asList("b", null, null) //$NON-NLS-1$
+ });
+ dataManager.addData("SELECT v_0.c_0, COUNT(v_0.c_1), MAX(v_0.c_1) FROM (SELECT g_0.e1 AS c_0, g_0.e2 AS c_1 FROM pm1.g2 AS g_0) AS v_0 GROUP BY v_0.c_0", //$NON-NLS-1$
+ new List[] {
+ Arrays.asList("a", Integer.valueOf(3), Boolean.FALSE), //$NON-NLS-1$
+ Arrays.asList("b", null, null) //$NON-NLS-1$
+ });
+ dataManager.addData("SELECT g_0.e1, g_0.e2 FROM pm2.g1 AS g_0", //$NON-NLS-1$
+ new List[] {
+ Arrays.asList("a", Boolean.FALSE), //$NON-NLS-1$
+ Arrays.asList("xyz", Boolean.FALSE), //$NON-NLS-1$
+ Arrays.asList(null, Boolean.TRUE),
+ });
+
+ ProcessorPlan plan = helpGetPlan(command, FakeMetadataFactory.example1Cached(), capFinder);
+
+ List[] expected = new List[] {
+ Arrays.asList(null, Integer.valueOf(1), Boolean.TRUE),
+ Arrays.asList("a", Integer.valueOf(6), Boolean.TRUE), //$NON-NLS-1$
+ Arrays.asList("b", null, null), //$NON-NLS-1$
+ Arrays.asList("xyz", Integer.valueOf(1), Boolean.FALSE) //$NON-NLS-1$
+ };
+
+ helpProcess(plan, dataManager, expected);
+ }
+
+}
Property changes on: trunk/engine/src/test/java/com/metamatrix/query/processor/TestAggregateProcessing.java
___________________________________________________________________
Name: svn:mime-type
+ text/plain
Modified: trunk/engine/src/test/java/com/metamatrix/query/processor/TestProcessor.java
===================================================================
--- trunk/engine/src/test/java/com/metamatrix/query/processor/TestProcessor.java 2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/test/java/com/metamatrix/query/processor/TestProcessor.java 2009-07-21 22:53:49 UTC (rev 1162)
@@ -622,90 +622,7 @@
fail("Exception building test data (" + e.getClass().getName() + "): " + e.getMessage()); //$NON-NLS-1$ //$NON-NLS-2$
}
}
-
- private void sampleDataBQT3(FakeDataManager dataMgr) {
- FakeMetadataFacade metadata = FakeMetadataFactory.exampleBQTCached();
- try {
- // Group bqt1.smalla
- FakeMetadataObject groupID = (FakeMetadataObject) metadata.getGroupID("bqt1.smalla"); //$NON-NLS-1$
- List elementIDs = metadata.getElementIDsInGroupID(groupID);
- List elementSymbols = FakeDataStore.createElements(elementIDs);
-
- List[] tuples = new List[20];
- for(int i=0; i<tuples.length; i++) {
- tuples[i] = new ArrayList(17);
- tuples[i].add(new Integer(i));
- tuples[i].add("" + i); //$NON-NLS-1$
- tuples[i].add(new Integer(i+1));
- for(int j=0; j<14; j++) {
- tuples[i].add(null);
- }
- }
-
- dataMgr.registerTuples(groupID, elementSymbols, tuples);
-
- // Group bqt2.mediumb
- groupID = (FakeMetadataObject) metadata.getGroupID("bqt2.mediumb"); //$NON-NLS-1$
- elementIDs = metadata.getElementIDsInGroupID(groupID);
- elementSymbols = FakeDataStore.createElements(elementIDs);
-
- tuples = new List[20];
- for(int i=0; i<tuples.length; i++) {
- tuples[i] = new ArrayList(17);
- tuples[i].add(new Integer(i));
- for(int j=0; j<16; j++) {
- tuples[i].add(null);
- }
- }
-
- dataMgr.registerTuples(groupID, elementSymbols, tuples);
-
- }catch(Throwable e) {
- e.printStackTrace();
- fail("Exception building test data (" + e.getClass().getName() + "): " + e.getMessage()); //$NON-NLS-1$ //$NON-NLS-2$
- }
- }
-
- private void sampleDataBQT_defect9842(FakeDataManager dataMgr) {
- FakeMetadataFacade metadata = FakeMetadataFactory.exampleBQTCached();
-
- try {
- // Group bqt1.smalla
- FakeMetadataObject groupID = (FakeMetadataObject) metadata.getGroupID("bqt1.smalla"); //$NON-NLS-1$
- List elementIDs = metadata.getElementIDsInGroupID(groupID);
- List elementSymbols = FakeDataStore.createElements(elementIDs);
-
- List[] tuples = new List[5];
- for(int i=0; i<tuples.length; i++) {
- int k = i + 10;
- tuples[i] = new ArrayList(17);
- if (i<2){
- tuples[i].add(new Integer(1)); //need duplicate values
- } else {
- tuples[i].add(new Integer(2)); //need duplicate values
- }
- tuples[i].add("" + k); //$NON-NLS-1$
- tuples[i].add(new Integer(k+1));
- tuples[i].add("" + (k+1)); //$NON-NLS-1$
- tuples[i].add(new Float(0.5));
- for(int j=0; j<8; j++) {
- tuples[i].add(null);
- }
- tuples[i].add(new Short((short)k));
- tuples[i].add(null);
- tuples[i].add(new BigDecimal("" + k)); //$NON-NLS-1$
- tuples[i].add(null);
- }
-
- dataMgr.registerTuples(groupID, elementSymbols, tuples);
-
- }catch(Throwable e) {
- e.printStackTrace();
- fail("Exception building test data (" + e.getClass().getName() + "): " + e.getMessage()); //$NON-NLS-1$ //$NON-NLS-2$
- }
- }
-
/**
* Just want to register two rows of all the integral types to test AVG
* @param dataMgr
@@ -4288,97 +4205,7 @@
// Run query
helpProcess(plan, dataManager, expected);
}
-
- // SELECT IntKey, SUM(IntNum) FROM BQT1.SmallA GROUP BY IntKey, IntNum HAVING IntNum > 10 ORDER BY IntKey
- @Test public void testAggregateOnBQT() {
- // Create query
- String sql = "SELECT IntKey, SUM(IntNum) FROM BQT1.SmallA GROUP BY IntKey, IntNum HAVING IntNum > 10 ORDER BY IntKey"; //$NON-NLS-1$
-
- // Create expected results
- List[] expected = new List[] {
- Arrays.asList(new Object[] { new Integer(10), new Long(11) }),
- Arrays.asList(new Object[] { new Integer(11), new Long(12) }),
- Arrays.asList(new Object[] { new Integer(12), new Long(13) }),
- Arrays.asList(new Object[] { new Integer(13), new Long(14) }),
- Arrays.asList(new Object[] { new Integer(14), new Long(15) }),
- Arrays.asList(new Object[] { new Integer(15), new Long(16) }),
- Arrays.asList(new Object[] { new Integer(16), new Long(17) }),
- Arrays.asList(new Object[] { new Integer(17), new Long(18) }),
- Arrays.asList(new Object[] { new Integer(18), new Long(19) }),
- Arrays.asList(new Object[] { new Integer(19), new Long(20) }) };
-
- // Construct data manager with data
- FakeDataManager dataManager = new FakeDataManager();
- sampleDataBQT3(dataManager);
-
- // Plan query
- ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory.exampleBQTCached());
-
- // Run query
- helpProcess(plan, dataManager, expected);
- }
- // SELECT IntNum, IsNotNull FROM (SELECT IntNum, LongNum, COUNT(IntNum) AS IsNotNull FROM BQT2.SmallB GROUP BY IntNum, LongNum HAVING LongNum >= 0 ) AS x ORDER BY IntNum, IsNotNull
- @Test public void testAggregateOnBQT2() {
- // Create query
- String sql = "SELECT IntNum, IsNotNull FROM (SELECT IntNum, LongNum, COUNT(IntNum) AS IsNotNull FROM BQT1.SmallA GROUP BY IntNum, LongNum HAVING LongNum IS NULL ) AS x ORDER BY IntNum, IsNotNull"; //$NON-NLS-1$
-
- // Create expected results
- List[] expected = new List[] {
- Arrays.asList(new Object[] { new Integer(1), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(2), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(3), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(4), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(5), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(6), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(7), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(8), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(9), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(10), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(11), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(12), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(13), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(14), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(15), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(16), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(17), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(18), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(19), new Integer(1) }),
- Arrays.asList(new Object[] { new Integer(20), new Integer(1) }) };
-
- // Construct data manager with data
- FakeDataManager dataManager = new FakeDataManager();
- sampleDataBQT3(dataManager);
-
- // Plan query
- ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory.exampleBQTCached());
-
- // Run query
- helpProcess(plan, dataManager, expected);
- }
-
- @Test public void testAggregateOnBQT_defect9842(){
- // Create query
- String sql = "SELECT IntKey, SUM((BigDecimalValue)*(ShortValue)-(BigDecimalValue)*(ShortValue)*(FloatNum)) " + //$NON-NLS-1$
- "AS MySum FROM BQT1.SmallA GROUP BY IntKey ORDER BY IntKey"; //$NON-NLS-1$
-
- // Create expected results
- List[] expected = new List[] {
- Arrays.asList(new Object[] { new Integer(1), new BigDecimal("110.5") }), //$NON-NLS-1$
- Arrays.asList(new Object[] { new Integer(2), new BigDecimal("254.5") }) //$NON-NLS-1$
- };
-
- // Construct data manager with data
- FakeDataManager dataManager = new FakeDataManager();
- sampleDataBQT_defect9842(dataManager);
-
- // Plan query
- ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory.exampleBQTCached());
-
- // Run query
- helpProcess(plan, dataManager, expected);
- }
-
@Test public void testSelectWithNoFrom() {
// Create query
String sql = "SELECT 5"; //$NON-NLS-1$
@@ -6075,7 +5902,7 @@
// Construct data manager with data
FakeDataManager dataManager = new FakeDataManager();
- sampleDataBQT3(dataManager);
+ sampleDataBQT1(dataManager);
// Run query
helpProcess(plan, dataManager, expectedResults);
@@ -7656,33 +7483,7 @@
//we expect 2 queries, 1 for the outer and 1 for the subquery
assertEquals(2, dataManager.getCommandHistory().size());
}
-
-
- @Test public void testCase186260() {
- /*
- * This case revealed that an expression like "COUNT( DISTINCT e1 )", where the type of e1 is
- * anything but integer, was not handled properly. We tried to use "integer" (the type of the
- * COUNT expression) to work with the e1 tuples.
- */
- // Create query
- String sql = "SELECT COUNT(DISTINCT pm1.g2.e1), COUNT(DISTINCT pm1.g3.e1) FROM pm1.g2, pm1.g3"; //$NON-NLS-1$
-
- // Create expected results
- List[] expected = new List[] {
- Arrays.asList(new Object[] { new Integer(3), new Integer(3) }),
- };
- // Construct data manager with data
- FakeDataManager dataManager = new FakeDataManager();
- sampleData1(dataManager);
-
- // Plan query
- ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory.example1Cached());
-
- // Run query
- helpProcess(plan, dataManager, expected);
- }
-
@Test public void testOrderByOutsideOfSelect() {
// Create query
String sql = "SELECT e1 FROM (select e1, e2 || e3 as e2 from pm1.g2) x order by e2"; //$NON-NLS-1$
14 years, 11 months
teiid SVN: r1161 - in trunk/engine/src: test/java/com/metamatrix/query/parser and 1 other directories.
by teiid-commits@lists.jboss.org
Author: shawkins
Date: 2009-07-21 18:51:14 -0400 (Tue, 21 Jul 2009)
New Revision: 1161
Modified:
trunk/engine/src/main/java/com/metamatrix/query/sql/lang/SubqueryFromClause.java
trunk/engine/src/test/java/com/metamatrix/query/parser/TestOptionsAndHints.java
trunk/engine/src/test/java/com/metamatrix/query/parser/TestParser.java
trunk/engine/src/test/java/com/metamatrix/query/sql/lang/TestSubqueryFromClause.java
Log:
TEIID-735 fix to the equals method of subqueryfromclause
Modified: trunk/engine/src/main/java/com/metamatrix/query/sql/lang/SubqueryFromClause.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/sql/lang/SubqueryFromClause.java 2009-07-21 21:07:33 UTC (rev 1160)
+++ trunk/engine/src/main/java/com/metamatrix/query/sql/lang/SubqueryFromClause.java 2009-07-21 22:51:14 UTC (rev 1161)
@@ -134,7 +134,7 @@
SubqueryFromClause sfc = (SubqueryFromClause) obj;
return this.getName().equalsIgnoreCase(sfc.getName()) &&
- sfc.isOptional() == this.isOptional();
+ sfc.isOptional() == this.isOptional() && this.command.equals(sfc.command);
}
/**
Modified: trunk/engine/src/test/java/com/metamatrix/query/parser/TestOptionsAndHints.java
===================================================================
--- trunk/engine/src/test/java/com/metamatrix/query/parser/TestOptionsAndHints.java 2009-07-21 21:07:33 UTC (rev 1160)
+++ trunk/engine/src/test/java/com/metamatrix/query/parser/TestOptionsAndHints.java 2009-07-21 22:51:14 UTC (rev 1161)
@@ -689,7 +689,7 @@
Query query2 = new Query();
select = new Select();
- select.addSymbol(new ElementSymbol("a"));//$NON-NLS-1$
+ select.addSymbol(new AllSymbol());
From from2 = new From();
from2.addGroup(new GroupSymbol("t1")); //$NON-NLS-1$
from2.addGroup(new GroupSymbol("t2")); //$NON-NLS-1$
Modified: trunk/engine/src/test/java/com/metamatrix/query/parser/TestParser.java
===================================================================
--- trunk/engine/src/test/java/com/metamatrix/query/parser/TestParser.java 2009-07-21 21:07:33 UTC (rev 1160)
+++ trunk/engine/src/test/java/com/metamatrix/query/parser/TestParser.java 2009-07-21 22:51:14 UTC (rev 1161)
@@ -5271,16 +5271,10 @@
query.setSelect(select);
From from = new From();
from.addClause(new UnaryFromClause(new GroupSymbol("newModel2.Table1"))); //$NON-NLS-1$
- Query subquery = new Query();
+ StoredProcedure subquery = new StoredProcedure();
+ subquery.setProcedureName("NewVirtual.StoredQuery");
from.addClause(new SubqueryFromClause("a", subquery)); //$NON-NLS-1$
query.setFrom(from);
-
- Select select2 = new Select();
- select2.addSymbol(new AllSymbol());
- subquery.setSelect(select2);
- From from2 = new From();
- from2.addClause(new UnaryFromClause(new GroupSymbol("a", "NewVirtual.StoredQuery"))); //$NON-NLS-1$ //$NON-NLS-2$
- subquery.setFrom(from2);
helpTest("SELECT * FROM newModel2.Table1, (EXEC NewVirtual.StoredQuery()) AS a", //$NON-NLS-1$
"SELECT * FROM newModel2.Table1, (EXEC NewVirtual.StoredQuery()) AS a", //$NON-NLS-1$
Modified: trunk/engine/src/test/java/com/metamatrix/query/sql/lang/TestSubqueryFromClause.java
===================================================================
--- trunk/engine/src/test/java/com/metamatrix/query/sql/lang/TestSubqueryFromClause.java 2009-07-21 21:07:33 UTC (rev 1160)
+++ trunk/engine/src/test/java/com/metamatrix/query/sql/lang/TestSubqueryFromClause.java 2009-07-21 22:51:14 UTC (rev 1161)
@@ -113,10 +113,10 @@
UnitTestUtil.helpTestEquivalence(equals, s1, s2);
}
- public void testEquivalenceSameName(){
+ public void testCommandNonEquivalence(){
Object s1 = example1();
Object s2 = example2();
- int equals = 0;
+ int equals = -1;
UnitTestUtil.helpTestEquivalence(equals, s1, s2);
}
14 years, 11 months
teiid SVN: r1160 - in trunk/embedded/src/main: resources/com/metamatrix/dqp/embedded and 1 other directory.
by teiid-commits@lists.jboss.org
Author: rareddy
Date: 2009-07-21 17:07:33 -0400 (Tue, 21 Jul 2009)
New Revision: 1160
Modified:
trunk/embedded/src/main/java/org/teiid/transport/SocketListener.java
trunk/embedded/src/main/resources/com/metamatrix/dqp/embedded/i18n.properties
Log:
misc cleanup of log messages
Modified: trunk/embedded/src/main/java/org/teiid/transport/SocketListener.java
===================================================================
--- trunk/embedded/src/main/java/org/teiid/transport/SocketListener.java 2009-07-21 16:14:46 UTC (rev 1159)
+++ trunk/embedded/src/main/java/org/teiid/transport/SocketListener.java 2009-07-21 21:07:33 UTC (rev 1160)
@@ -80,7 +80,7 @@
this.workerPool = WorkerPoolFactory.newWorkerPool("SocketWorker", maxWorkers); //$NON-NLS-1$
this.nettyPool = Executors.newCachedThreadPool();
if (LogManager.isMessageToBeRecorded(LogConstants.CTX_SERVER, MessageLevel.DETAIL)) {
- LogManager.logDetail(LogConstants.CTX_SERVER, "server = " + this.server + "binding to port:" + port); //$NON-NLS-1$ //$NON-NLS-2$
+ LogManager.logDetail(LogConstants.CTX_SERVER, "server = " + bindAddress + "binding to port:" + port); //$NON-NLS-1$ //$NON-NLS-2$
}
ChannelFactory factory = new NioServerSocketChannelFactory(nettyPool, nettyPool, Math.min(Runtime.getRuntime().availableProcessors(), maxWorkers));
Modified: trunk/embedded/src/main/resources/com/metamatrix/dqp/embedded/i18n.properties
===================================================================
--- trunk/embedded/src/main/resources/com/metamatrix/dqp/embedded/i18n.properties 2009-07-21 16:14:46 UTC (rev 1159)
+++ trunk/embedded/src/main/resources/com/metamatrix/dqp/embedded/i18n.properties 2009-07-21 21:07:33 UTC (rev 1160)
@@ -56,14 +56,14 @@
VDBService.vdb_change_status_failed=VDB \"{0}\" with version \"{1}\" has failed to change its status from \"{2}\" to \"{3}\" in VDBService
VDBService.connector_binding_changed=VDB \"{0}\" with version \"{1}\" has changed its connector binding for model name \"{2}\" with \"{3}\"
VDBService.vdb_already_exists=VDB \"{0}\" with version \"{1}\" already exists in the configuration, will going to save as a new version of the VDB
-VDBService.vdb_deployed=VDB \"{0}\" with version \"{1}\" has been deployed to the DQP system.
+VDBService.vdb_deployed=VDB \"{0}\" with version \"{1}\" has been deployed to the Teiid system.
VDBService.vdb_loded=VDB \"{0}\" with version \"{1}\" has been loaded from the configuration.
VDBService.vdb_missing_bindings=VDB \"{0}\" with version \"{1}\" is incomplete, it does not have all the connector bindings mapped to their physical models.
VDBService.lookup=VDB \"{0}\" with version \"{1}\" being loaded from configuration
VDBService.vdb_deleted=VDB \"{0}\" with version \"{1}\" deleted from VDB Service and configuration
VDBService.connector_binding_changed=VDB \"{0}\" with version \"{1}\" has changed its connector binding for model name {2} with {3}
VDBService.vdb_already_exists=VDB \"{0}\" with version \"{1}\" already exists in the configuration, will going to save as a new version of the VDB
-VDBService.vdb_deployed=VDB \"{0}\" with version \"{1}\" has been deployed to the DQP system.
+VDBService.vdb_deployed=VDB \"{0}\" with version \"{1}\" has been deployed to the Teiid system.
VDBService.vdb_loded=VDB \"{0}\" with version \"{1}\" has been loaded from the configuration.
VDBService.vdb_missing_bindings=VDB \"{0}\" with version \"{1}\" is incomplete, it does not have all the connector bindings mapped to their physical models.
VDBService.vdb_active=VDB \"{0}\" with version \"{1}\" is set to ACTIVE.
@@ -120,7 +120,7 @@
EmbeddedConfigurationService.ConnectorBinding_in_use_failed_delete=Connector binding with Name \"{0}\" is currenlty in use by VDB \"{1}\" with version \"{2}\"; Can not delete; Remove the assosiation first
#EmbeddedConfigurationService.Alternate_binding_inuse=Connector binding {0} is using alternate properties defined in the system configuration file.
EmbeddedConfigurationService.ConnectorBinding_in_use_failed_delete=Connector binding with Name {0} is currenlty in use by VDB {1} with version {2}; Can not delete; Remove the assosiation first
-EmbeddedConfigurationService.savingConfiguration=DQP Configuration is being saved.
+EmbeddedConfigurationService.savingConfiguration=Teiid Configuration is being saved.
EmbeddedConfigurationService.vdb_delete=VDB with name \"{0}\" and version \"{1}\" is being deleted from the configuration.
EmbeddedConfigurationService.vdb_delete_failed=configuration service failed to delte VDB with name \"{0}\" and version \"{1}\".
EmbeddedConfigurationService.vdb_saved=VDB with name \"{0}\" and version \"{1}\" is being saved by the configuration to \"{2}\" file name.
@@ -132,16 +132,16 @@
EmbeddedConfigurationService.connector_type_save=Connector Type with name \"{0}\" is being deleted from configuration.
EmbeddedConfigurationService.connector_type_delete_failed=Connector Type with name \"{0}\" failed to delete from configuration as they type is not found in configuration.
EmbeddedConfigurationService.custom_connectors_restricted=License does not allow adding connector types.
-EmbeddedConfigurationService.Failed_to_load_vdb=Failed to load the VDB \"{0}\" during the initilizing the DQP
-EmbeddedConfigurationService.loaded_vdb=Loaded the VDB \"{0}\" sucessfully into DQP.
-EmbeddedConfigurationService.not_loaded_vdb=VDB \"{0}\" is not loaded into DQP due to licensing issues.
+EmbeddedConfigurationService.Failed_to_load_vdb=Failed to load the VDB \"{0}\" during the initialization of Teiid
+EmbeddedConfigurationService.loaded_vdb=Loaded the VDB \"{0}\" sucessfully into Teiid.
+EmbeddedConfigurationService.not_loaded_vdb=VDB \"{0}\" is not loaded into Teiid due to licensing issues.
EmbeddedConfigurationService.connector_binding_delete_failed=Delete of Connector Binding \"{0}\" failed. Binding not found in the configuration.
EmbeddedConfigurationService.connector_type_in_use=Delete of Connector type \"{0}\" failed. Currently in use by a Connector Binding.
EmbeddedConfigurationService.duplicate_vdb_found=Duplicate VDB \"{0}\" is being loaded. Can not load have multiple VDBs with same name and version.
EmbeddedConfigurationService.connector_type_delete=Connector type \"{0}\" is deleted from the configuration.
EmbeddedConfigurationService.connectionAdded=Connection added to VDB \"{0}\" with version \"{1}\" with connection id \"{2}\"
EmbeddedConfigurationService.connectionRemoved=Connection removed from VDB \"{0}\" with version \"{1}\" with connection id \"{2}\"
-EmbeddedConfigurationService.dqp_loading=DQP instance with instance id \"{0}\" is being loaded.
+EmbeddedConfigurationService.dqp_loading=Teiid instance with instance id \"{0}\" is being loaded.
EmbeddedConfigurationService.init_encryption=Cryptography libraries are in use and are initialized
EmbeddedConfigurationService.udf_classspath_failure=Failed to resolve the user defined function''s classpath
EmbeddedConfigurationService.udf_load=User Defined Functions file \"{0}\" is loaded, using classloader \"{1}\"
@@ -154,7 +154,7 @@
ExtensionModuleReader.ext_module_does_not_exist=Extension module \"{0}\" does not exist in the configuration
ExtensionModuleWriter.ext_module_save=Extension module with name \"{0}\" is saved to \"{1}\".
ExtensionModuleWriter.ext_module_already_exists=Extension module \"{0}\" already exists in the configuration failed to delete.
-ExtensionModuleWriter.ext_module_delete_failed=Failed to delete Extension Module \"{0}\"; It may be currently in use by the DQP, check if is it been un-assigned from all models.
+ExtensionModuleWriter.ext_module_delete_failed=Failed to delete Extension Module \"{0}\"; It may be currently in use by the Teiid, check if is it been un-assigned from all models.
ExtensionModuleWriter.ext_module_delete=Extension module with name \"{0}\" is being deleted with file \"{1}\".
UnSupported_object_conversion=This object conversion not supported..
@@ -185,7 +185,7 @@
Admin.Invalid_ext_module=Extension module specified is not the JAR type, currently only ".jar" extension modules supported.
Admin.Connector_type_exists=Connector Type \"{0}\" already exists; Can not add a duplicate, try delete and add again.
Admin.extension_module_exists=Extension Module \"{0}\" already exists; Can not add a duplicate, delete first and add again.
-Admin.addBindingEixists=Connector Binding \"{0}\" already exists in exists in the configuration, use a unique name for the connector binding in the given dqp instance.
+Admin.addBindingEixists=Connector Binding \"{0}\" already exists in exists in the configuration, use a unique name for the connector binding in the given Teiid instance.
Admin.addBinding_type_exists=Adding Connector Binding \"{0}\" failed, because the Connector Type \"{1}\" already exists, and admin option set to fail on conflict
Admin.connector_type_not_exists=Connector Type \"{0}\" does not exists in the configuration, please add Connector Type.
Admin.Invalid Request=Request submitted does not match the requirement \"{0}\"
@@ -194,8 +194,8 @@
Admin.Unsupported_Object_Class=AdminObjects class {0} not supported for this operation.
Admin.bindings_exceeded_limit=You have reached the MetaMatrix license limit of connector bindings, Failed to add connector bindings.
-DQPEmbeddedManager.start_dqp=DQP is started at time = {0}
-DQPEmbeddedManager.shutdown_dqp=DQP is shutdown at time = {0}
+DQPEmbeddedManager.start_dqp=Teiid is started at time = {0}
+DQPEmbeddedManager.shutdown_dqp=Teiid is shutdown at time = {0}
DQPEmbeddedManager.duplicate_process= There is another process running with same name {0}; Failed to start.
ServerSecurityAdminImpl.User_name_too_long=The user name may not be null and cannot be longer than {0} charachters
@@ -268,5 +268,5 @@
ServerWorkItem.processing_error=Processing exception ''{0}'' for session {1}. Exception type {2} throw from {3}. Enable detail logging to see the entire stacktrace.
ServerWorkItem.Component_Not_Found=Component not found: {0}
-SocketTransport.1=Bound to address {1} listening on port {2}
+SocketTransport.1=Bound to address {0} listening on port {1}
SocketTransport.2=Problem starting server binding to address {0} and listening on port {1}
14 years, 11 months
Warning: could not send message for past 4 hours
by Mail Delivery Subsystem
**********************************************
** THIS IS A WARNING MESSAGE ONLY **
** YOU DO NOT NEED TO RESEND YOUR MESSAGE **
**********************************************
The original message was received at Tue, 21 Jul 2009 12:14:06 -0400
from svn01.web.mwc.hst.phx2.redhat.com [10.5.105.7]
----- Transcript of session follows -----
<teiid-commits(a)lists.jboss.org>... Deferred: Connection refused by mx1.lists.jboss.org.
Warning: message still undelivered after 4 hours
Will keep trying until message is 5 days old
14 years, 11 months
Warning: could not send message for past 4 hours
by Mail Delivery Subsystem
**********************************************
** THIS IS A WARNING MESSAGE ONLY **
** YOU DO NOT NEED TO RESEND YOUR MESSAGE **
**********************************************
The original message was received at Tue, 21 Jul 2009 12:14:46 -0400
from svn01.web.mwc.hst.phx2.redhat.com [10.5.105.7]
----- Transcript of session follows -----
<teiid-commits(a)lists.jboss.org>... Deferred: Connection refused by mx1.lists.jboss.org.
Warning: message still undelivered after 4 hours
Will keep trying until message is 5 days old
14 years, 11 months
teiid SVN: r1159 - trunk/adminshell.
by teiid-commits@lists.jboss.org
Author: rareddy
Date: 2009-07-21 12:14:46 -0400 (Tue, 21 Jul 2009)
New Revision: 1159
Modified:
trunk/adminshell/
Log:
adding a svn ignore
Property changes on: trunk/adminshell
___________________________________________________________________
Name: svn:ignore
- target
.project
.settings
.classpath
log
+ target
.project
.settings
.classpath
log
connection.properties
14 years, 11 months
teiid SVN: r1158 - in trunk: embedded/src/main/java/com/metamatrix/dqp/embedded/services and 3 other directories.
by teiid-commits@lists.jboss.org
Author: rareddy
Date: 2009-07-21 12:14:06 -0400 (Tue, 21 Jul 2009)
New Revision: 1158
Modified:
trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/configuration/ServerConfigFileReader.java
trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/services/EmbeddedConfigurationService.java
trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/services/EmbeddedDataService.java
trunk/embedded/src/main/resources/com/metamatrix/dqp/embedded/i18n.properties
trunk/embedded/src/test/java/com/metamatrix/dqp/embedded/services/TestEmbeddedConfigurationService.java
trunk/engine/src/main/java/com/metamatrix/dqp/service/ConfigurationService.java
Log:
TEIID-652: Corrected issue such that if the default properties for the connector type are not found in the configuration.xml, then the default properties from the connector type object instantiated will be taken. Also, stream lined the loading of VDB during the initialization of the embedded configuration service similar to that of adding VDB through admin api, so that there is only one way to load a VDB.
Modified: trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/configuration/ServerConfigFileReader.java
===================================================================
--- trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/configuration/ServerConfigFileReader.java 2009-07-20 19:35:47 UTC (rev 1157)
+++ trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/configuration/ServerConfigFileReader.java 2009-07-21 16:14:06 UTC (rev 1158)
@@ -34,6 +34,7 @@
import java.util.Set;
import com.metamatrix.common.config.api.ComponentType;
+import com.metamatrix.common.config.api.ComponentTypeDefn;
import com.metamatrix.common.config.api.ComponentTypeID;
import com.metamatrix.common.config.api.Configuration;
import com.metamatrix.common.config.api.ConfigurationModelContainer;
@@ -41,7 +42,6 @@
import com.metamatrix.common.config.api.exceptions.ConfigurationException;
import com.metamatrix.common.config.model.BasicComponentType;
import com.metamatrix.common.config.model.ConfigurationModelContainerAdapter;
-import com.metamatrix.common.object.PropertyDefinition;
/**
* This class loades the server configuration file <code>ServerConfig.xml</code>
@@ -151,11 +151,32 @@
}
if (c != null && !c.isEmpty()) {
- Set<PropertyDefinition> defns = new HashSet<PropertyDefinition>();
+ Set<ComponentTypeDefn> defns = new HashSet<ComponentTypeDefn>();
defns.addAll(c);
- defns.addAll(type.getComponentTypeDefinitions());
+
+ // Hashset does not add, if the object is already present in the collection through addall
+ // so they need to added one by one.
+ Collection<ComponentTypeDefn> overwriteDefns = type.getComponentTypeDefinitions();
+ for (ComponentTypeDefn pd:overwriteDefns) {
+ if (defns.contains(pd)) {
+ defns.remove(pd);
+ }
+ defns.add(pd);
+ }
+
baseType.setComponentTypeDefinitions(defns);
}
return baseType;
+ }
+
+ public static boolean containsBinding(ConfigurationModelContainer configuration, String name) {
+ // Load connector bindings, do we ever need connector types?
+ Collection<ConnectorBinding> bindings = configuration.getConfiguration().getConnectorBindings();
+ for(ConnectorBinding binding:bindings) {
+ if (binding.getFullName().equalsIgnoreCase(name)) {
+ return true;
+ }
+ }
+ return false;
}
}
Modified: trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/services/EmbeddedConfigurationService.java
===================================================================
--- trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/services/EmbeddedConfigurationService.java 2009-07-20 19:35:47 UTC (rev 1157)
+++ trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/services/EmbeddedConfigurationService.java 2009-07-21 16:14:06 UTC (rev 1158)
@@ -345,17 +345,38 @@
vdbFile = getNewVDBLocation(srcVdb);
VDBConfigurationWriter.write(srcVdb, vdbFile);
srcVdb = VDBConfigurationReader.loadVDB(vdbFile, getDeployDir());
- try {
- loadVDB(vdbFile, srcVdb);
- } catch (ApplicationInitializationException e) {
- throw new MetaMatrixComponentException(e);
- }
+ deployVDB(vdbFile, srcVdb);
notifyVDBLoad(def.getName(), def.getVersion());
}
DQPEmbeddedPlugin.logInfo("EmbeddedConfigurationService.vdb_saved", new Object[] {def.getName(), def.getVersion(), vdbFile}); //$NON-NLS-1$
}
+ private VDBArchive loadVDB(VDBArchive vdb, boolean replaceBindings) throws MetaMatrixComponentException {
+ // check if this is a valid VDB
+ if (!isValidVDB(vdb)) {
+ throw new MetaMatrixComponentException(DQPEmbeddedPlugin.Util.getString("EmbeddedConfigurationService.invalid_vdb", vdb.getName())); //$NON-NLS-1$
+ }
+
+ // add connector types from the VDB to the configuration
+ addConnectorTypesInVDB(vdb, replaceBindings);
+
+ // now try to add the connector bindings in the VDB to the configuration service
+ addConnectorBindingsInVDB(vdb, replaceBindings);
+
+ // make sure we have all the bindings, otherwise this is incomplete VDB
+ if (!isFullyConfiguredVDB(vdb)) {
+ // mark as in-active
+ vdb.setStatus(VDBStatus.INCOMPLETE);
+ }
+ else {
+ vdb.setStatus(VDBStatus.ACTIVE);
+ DQPEmbeddedPlugin.logInfo("VDBService.vdb_active", new Object[] {vdb.getName(), vdb.getVersion()}); //$NON-NLS-1$
+ }
+
+ return vdb;
+ }
+
/**
* @see com.metamatrix.dqp.service.ConfigurationService#addVDB(com.metamatrix.common.vdb.api.VDBDefn, boolean)
*/
@@ -363,34 +384,16 @@
if (vdb != null) {
boolean exists = false;
- // check if this is a valid VDB
- if (!isValidVDB(vdb)) {
- throw new MetaMatrixComponentException(DQPEmbeddedPlugin.Util.getString("EmbeddedConfigurationService.invalid_vdb", vdb.getName())); //$NON-NLS-1$
- }
-
// check to see if we already have vdb with same name and version.
VDBArchive existingVdb = getVDB(vdb.getName(), vdb.getVersion());
if (existingVdb != null) {
exists = true;
DQPEmbeddedPlugin.logWarning("VDBService.vdb_already_exists", new Object[] {existingVdb.getName(), existingVdb.getVersion()}); //$NON-NLS-1$
}
-
- // add connector types from the VDB to the configuration
- addConnectorTypesInVDB(vdb, replaceBindings);
- // now try to add the connector bindings in the VDB to the configuration service
- addConnectorBindingsInVDB(vdb, replaceBindings);
+ // load the vdb an its connector bindings
+ vdb = loadVDB(vdb, replaceBindings);
- // make sure we have all the bindings, otherwise this is incomplete VDB
- if (!isFullyConfiguredVDB(vdb)) {
- // mark as in-active
- vdb.setStatus(VDBStatus.INCOMPLETE);
- }
- else {
- vdb.setStatus(VDBStatus.ACTIVE);
- DQPEmbeddedPlugin.logInfo("VDBService.vdb_active", new Object[] {vdb.getName(), vdb.getVersion()}); //$NON-NLS-1$
- }
-
// Now save the VDB for future use using the Configuration.
// configuration may alter the connector bindings on VDB based
// upon preferences set.
@@ -429,7 +432,7 @@
// as bindings are vdb scoped they will never find one, unless there is shared one,
// then we need to use that one
if (existing == null || replace) {
- saveConnectorBinding(deployedBindingName, binding);
+ saveConnectorBinding(deployedBindingName, binding, false);
}
else {
// if the not being replaced, need to use the current one, then
@@ -455,7 +458,7 @@
ConnectorBindingType localType = getConnectorType(typeName);
if (localType == null || replace) {
final ConnectorBindingType type = (ConnectorBindingType)types.get(typeName);
- saveConnectorType(type);
+ saveConnectorType(type, false);
}
} // for
}
@@ -611,9 +614,8 @@
* @param binding
* @return properties for the connector binding given
*/
- public Properties getDefaultProperties(ConnectorBinding binding) {
- ComponentTypeID id = binding.getComponentTypeID();
- return configurationModel.getDefaultPropertyValues(id);
+ public Properties getDefaultProperties(ConnectorBindingType type) {
+ return configurationModel.getDefaultPropertyValues((ComponentTypeID)type.getID());
}
/**
@@ -674,7 +676,7 @@
ComponentType type = getConnectorType(typeName);
if (type != null) {
// Ask the Configuration Manager to save the connector Binding
- binding = saveConnectorBinding(deployedBindingName, binding);
+ binding = saveConnectorBinding(deployedBindingName, binding, true);
DQPEmbeddedPlugin.logInfo("DataService.Connector_Added", new Object[] {binding.getDeployedName()}); //$NON-NLS-1$
return binding;
}
@@ -688,13 +690,13 @@
*/
public ConnectorBinding updateConnectorBinding(ConnectorBinding binding)
throws MetaMatrixComponentException {
- return saveConnectorBinding(binding.getDeployedName(), binding);
+ return saveConnectorBinding(binding.getDeployedName(), binding, false);
}
/**
* Save the Connector Binding to the internal list.
*/
- ConnectorBinding saveConnectorBinding(String deployedBindingName, ConnectorBinding binding)
+ ConnectorBinding saveConnectorBinding(String deployedBindingName, ConnectorBinding binding, boolean updateConfiguration)
throws MetaMatrixComponentException {
if (binding != null) {
@@ -707,9 +709,7 @@
boolean used = (usedVDBs != null && !usedVDBs.isEmpty());
if (used) {
for (VDBArchive vdb:usedVDBs) {
- // Defect 21396 - a call to addConnectorBinding is an implicit replace in case where the
- // binding already exists, so there should be to license check here because we're updating
- // all the deployed VDBs that are using this connector binding.
+
BasicVDBDefn def = vdb.getConfigurationDef();
def.addConnectorBinding(binding);
@@ -718,15 +718,20 @@
}
}
- this.configurationModel = ServerConfigFileWriter.addConnectorBinding(configurationModel, binding);
- saveSystemConfiguration(this.configurationModel);
-
- DQPEmbeddedPlugin.logInfo("EmbeddedConfigurationService.connector_save", new Object[] {deployedBindingName}); //$NON-NLS-1$
+ if (updateConfiguration || isGlobalConnectorBinding(binding)) {
+ this.configurationModel = ServerConfigFileWriter.addConnectorBinding(configurationModel, binding);
+ saveSystemConfiguration(this.configurationModel);
+ DQPEmbeddedPlugin.logInfo("EmbeddedConfigurationService.connector_save", new Object[] {deployedBindingName}); //$NON-NLS-1$
+ }
}
return binding;
}
- /**
+ private boolean isGlobalConnectorBinding(ConnectorBinding binding) {
+ return ServerConfigFileReader.containsBinding(this.configurationModel, binding.getFullName());
+ }
+
+ /**
* @see com.metamatrix.dqp.service.ConfigurationService#deleteConnectorBinding(java.lang.String)
* @since 4.3
*/
@@ -839,14 +844,21 @@
* @since 4.3
*/
public void saveConnectorType(ConnectorBindingType type) throws MetaMatrixComponentException {
- loadedConnectorTypes.put(type.getName(), ServerConfigFileReader.resolvePropertyDefns(type, this.configurationModel));
-
- // Also add binding type to the configuration and save.
- DQPEmbeddedPlugin.logInfo("EmbeddedConfigurationService.connector_type_save", new Object[] {type.getName()}); //$NON-NLS-1$
- this.configurationModel = ServerConfigFileWriter.addConnectorType(configurationModel, type);
- saveSystemConfiguration(this.configurationModel);
+ saveConnectorType(type, true);
}
+
+ private void saveConnectorType(ConnectorBindingType type, boolean updateConfiguration) throws MetaMatrixComponentException {
+ type = (ConnectorBindingType)ServerConfigFileReader.resolvePropertyDefns(type, this.configurationModel);
+ loadedConnectorTypes.put(type.getName(), type);
+ if (updateConfiguration) {
+ // Also add binding type to the configuration and save.
+ DQPEmbeddedPlugin.logInfo("EmbeddedConfigurationService.connector_type_save", new Object[] {type.getName()}); //$NON-NLS-1$
+ this.configurationModel = ServerConfigFileWriter.addConnectorType(configurationModel, type);
+ saveSystemConfiguration(this.configurationModel);
+ }
+ }
+
/**
* @see com.metamatrix.dqp.service.ConfigurationService#deleteConnectorType(java.lang.String)
* @since 4.3
@@ -959,20 +971,17 @@
// Add all the connector types.
Map connectorTypes = configReader.getConnectorTypes();
-
+
+ // load the connector bindings
+ loadConnectorBindings(connectorBindings, connectorTypes);
+
// Load the User defined functions
loadUDF();
// Find all the VDB File in the configuration
// Load them the available VDBs
loadVDBs();
-
- // load the connector bindings
- loadConnectorBindings(connectorBindings, connectorTypes);
-
- // validate the VDBS and make them active if they are not already active based
- // on their binding status
- validateVDBs(loadedVDBs.values());
+
} catch (MetaMatrixComponentException e) {
throw new ApplicationInitializationException(e);
}
@@ -1044,40 +1053,13 @@
binding.setDeployedName(binding.getFullName());
}
deployConnectorBinding(binding.getDeployedName(), binding);
- }
-
- // Loop through all the loaded VDBs and collect what connector bindings
- // connector types are needed for the dqp to start.
- for (VDBArchive vdb: loadedVDBs.values()) {
-
- VDBDefn def = vdb.getConfigurationDef();
-
- // load new connector types from vdb
- for (Iterator it = def.getConnectorTypes().values().iterator(); it.hasNext();) {
- ConnectorBindingType type= (ConnectorBindingType)it.next();
- if (!loadedConnectorTypes.containsKey(type.getName())) {
- loadedConnectorTypes.put(type.getName(), ServerConfigFileReader.resolvePropertyDefns(type, this.configurationModel));
- }
- }
-
- // Load new bindings from the vdb
- for (Iterator it = def.getConnectorBindings().values().iterator(); it.hasNext();) {
- BasicConnectorBinding binding = (BasicConnectorBinding)it.next();
- String deployName = binding.getDeployedName();
- if (deployName == null) {
- deployName = binding.getFullName();
- }
- if (!loadedConnectorBindings.containsKey(deployName)) {
- deployConnectorBinding(deployName, binding);
- }
- }
- }
+ }
}
/**
- * Add the connnector binding with new deployment name
+ * Add the connector binding with new deployment name
* @param binding
* @param deployedName
*/
@@ -1096,6 +1078,7 @@
deployedBinding.setDeployedName(deployedName);
loadedConnectorBindings.put(deployedName, deployedBinding);
notifyConnectorBindingLoad(deployedName);
+ DQPEmbeddedPlugin.logInfo("EmbeddedConfigurationService.connector_binding_deployed", new Object[] {deployedName}); //$NON-NLS-1$
return deployedBinding;
}
@@ -1105,7 +1088,7 @@
* @throws ApplicationInitializationException
* @since 4.3
*/
- void loadVDBs() throws ApplicationInitializationException{
+ void loadVDBs() throws ApplicationInitializationException, MetaMatrixComponentException {
// Get the files to load
HashMap<URL, VDBArchive> vdbFiles;
try {
@@ -1119,20 +1102,19 @@
VDBArchive vdb = vdbFiles.get(vdbURL);
if (vdb != null) {
-
- loadVDB(vdbURL, vdb);
+ // Check to make sure there are two identical VDBs with same version
+ // being loaded into DQP
+ if (getVDB(vdb.getName(), vdb.getVersion()) != null) {
+ throw new ApplicationInitializationException(DQPEmbeddedPlugin.Util.getString("EmbeddedConfigurationService.duplicate_vdb_found", new Object[] {vdbURL})); //$NON-NLS-1$
+ }
+
+ vdb = loadVDB(vdb, false);
+ deployVDB(vdbURL, vdb);
}
}
}
- private void loadVDB(URL vdbURL, VDBArchive vdb)
- throws ApplicationInitializationException {
- // Check to make sure there are two identical VDBs with same version
- // being loaded into DQP
- if (loadedVDBs.get(vdbId(vdb)) != null) {
- throw new ApplicationInitializationException(DQPEmbeddedPlugin.Util.getString("EmbeddedConfigurationService.duplicate_vdb_found", new Object[] {vdbURL})); //$NON-NLS-1$
- }
-
+ private void deployVDB(URL vdbURL, VDBArchive vdb) {
// add vdb to loaded VDBS
loadedVDBs.put(vdbId(vdb), vdb);
availableVDBFiles.put(vdbId(vdb), vdbURL);
@@ -1285,23 +1267,7 @@
DQPEmbeddedPlugin.logError(e, "EmbeddedConfigurationService.vdb_delete_failed", new Object[] {vdbName, vdbVersion}); //$NON-NLS-1$
}
}
-
-
- /**
- * Validate all the VDBs and make them active
- * @param vdbs
- * @since 4.3.2
- */
- void validateVDBs(Collection<VDBArchive> vdbList) throws MetaMatrixComponentException {
- for (VDBArchive vdb:vdbList) {
- if (vdb.getStatus() != VDBStatus.ACTIVE && vdb.getStatus() != VDBStatus.ACTIVE_DEFAULT &&
- isValidVDB(vdb) && isFullyConfiguredVDB(vdb)) {
- vdb.setStatus(VDBStatus.ACTIVE);
- DQPEmbeddedPlugin.logInfo("VDBService.vdb_loded", new Object[] {vdb.getName(), vdb.getVersion()}); //$NON-NLS-1$
- }
- }
- }
-
+
public boolean isFullyConfiguredVDB(VDBArchive vdb) throws MetaMatrixComponentException{
VDBDefn def = vdb.getConfigurationDef();
Collection models = def.getModels();
Modified: trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/services/EmbeddedDataService.java
===================================================================
--- trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/services/EmbeddedDataService.java 2009-07-20 19:35:47 UTC (rev 1157)
+++ trunk/embedded/src/main/java/com/metamatrix/dqp/embedded/services/EmbeddedDataService.java 2009-07-21 16:14:06 UTC (rev 1158)
@@ -47,6 +47,7 @@
import com.metamatrix.common.config.api.ComponentTypeDefn;
import com.metamatrix.common.config.api.ComponentTypeID;
import com.metamatrix.common.config.api.ConnectorBinding;
+import com.metamatrix.common.config.api.ConnectorBindingType;
import com.metamatrix.common.util.crypto.CryptoException;
import com.metamatrix.common.util.crypto.CryptoUtil;
import com.metamatrix.common.vdb.api.VDBArchive;
@@ -130,7 +131,7 @@
VDBService vdbService = (VDBService)this.lookupService(DQPServiceNames.VDB_SERVICE);
List<String> bindingNames = vdbService.getConnectorBindingNames(vdbName, vdbVersion, modelName);
if (bindingNames.isEmpty()) {
- throw new MetaMatrixComponentException("No connectors defined for binding");
+ throw new MetaMatrixComponentException(DQPEmbeddedPlugin.Util.getString("DataService.no_connectors_defined")); //$NON-NLS-1$
}
String deployedConnectorBindingName = bindingNames.get(0);
ConnectorID connector = selectConnector(deployedConnectorBindingName);
@@ -505,23 +506,23 @@
Properties decryptedProperties = new Properties();
// Get all the default properties for the connector type, so that
- // if the connector binding does not have all the proeprties then these
+ // if the connector binding does not have all the properties then these
// will take over, otherwise the connector binding ones overwrite
ComponentTypeID id = binding.getComponentTypeID();
- ComponentType type = getConfigurationService().getConnectorType(id.getName());
-
- Properties props = getConfigurationService().getDefaultProperties(binding);
- if (props == null || props.isEmpty()) {
- ComponentType defaultType = getConfigurationService().getConnectorType("Connector"); //$NON-NLS-1$
- if (defaultType != null) {
- props = defaultType.getDefaultPropertyValues();
- }
+ ConnectorBindingType type = getConfigurationService().getConnectorType(id.getName());
+
+ // Index connector has no formal definition in the configuration file.
+ if (type != null) {
+ Properties props = getConfigurationService().getDefaultProperties(type);
+ if (props == null || props.isEmpty()) {
+ props = type.getDefaultPropertyValues();
+ }
+
+ if (props != null && !props.isEmpty()) {
+ decryptedProperties.putAll(props);
+ }
}
- if (props != null && !props.isEmpty()) {
- decryptedProperties.putAll(props);
- }
-
// now overlay the custom properties from the default properties.
decryptedProperties.putAll(bindingProperties);
Modified: trunk/embedded/src/main/resources/com/metamatrix/dqp/embedded/i18n.properties
===================================================================
--- trunk/embedded/src/main/resources/com/metamatrix/dqp/embedded/i18n.properties 2009-07-20 19:35:47 UTC (rev 1157)
+++ trunk/embedded/src/main/resources/com/metamatrix/dqp/embedded/i18n.properties 2009-07-21 16:14:06 UTC (rev 1158)
@@ -105,6 +105,7 @@
DataService.ext_module_not_found=Required extension module {0} not found in the extensions directory. if not using extension modules ignore.
DataService.Connector_still_used=Connector Binding {0} can not be deleted because it is still in use by VDBs. Remove the assosiation with VDB before deletion.
DataService.Connector_failed_start=Connector Binding {0} failed to start, check the properties and start again.
+DataService.no_connectors_defined=No connectors defined for binding
Failed_To_Service=Failed to get service requested
VDBReader.Invalid_location=Invalid file location \"{0}\". No VDB files (*.DEF) found
@@ -147,6 +148,7 @@
EmbeddedConfigurationService.udf_unload=User Defined Functions file unloaded
EmbeddedConfigurationService.bindings_exceeded_limit=VDB {1} has {0} connector bindings, that exceeds MetaMatrix license allowed number of connector bindings.
EmbeddedConfigurationService.Invalid_connector_binding=Can not assign binding "{2}" to VDB "{0}" with "{1}", because it is a private binding from another vdb.
+EmbeddedConfigurationService.connector_binding_deployed=Connector binding {0} deployed
ExtensionModuleReader.ext_module_failed_to_read=failed to read the extension module \"{0}\" from file system.
ExtensionModuleReader.ext_module_does_not_exist=Extension module \"{0}\" does not exist in the configuration
Modified: trunk/embedded/src/test/java/com/metamatrix/dqp/embedded/services/TestEmbeddedConfigurationService.java
===================================================================
--- trunk/embedded/src/test/java/com/metamatrix/dqp/embedded/services/TestEmbeddedConfigurationService.java 2009-07-20 19:35:47 UTC (rev 1157)
+++ trunk/embedded/src/test/java/com/metamatrix/dqp/embedded/services/TestEmbeddedConfigurationService.java 2009-07-21 16:14:06 UTC (rev 1158)
@@ -356,7 +356,7 @@
BasicConnectorBinding binding = (BasicConnectorBinding)service.getConnectorBinding("QT_ORA9DS_1.BQT2 Oracle 9i Simple Cap"); //$NON-NLS-1$
binding.setDescription(msg);
- service.saveConnectorBinding(binding.getFullName(), binding);
+ service.saveConnectorBinding(binding.getFullName(), binding, true);
// Test and make sure the VDB updated
VDBArchive vdb = service.getVDB("QT_Ora9DS", "1"); //$NON-NLS-1$ //$NON-NLS-2$
Modified: trunk/engine/src/main/java/com/metamatrix/dqp/service/ConfigurationService.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/dqp/service/ConfigurationService.java 2009-07-20 19:35:47 UTC (rev 1157)
+++ trunk/engine/src/main/java/com/metamatrix/dqp/service/ConfigurationService.java 2009-07-21 16:14:06 UTC (rev 1158)
@@ -179,11 +179,11 @@
throws MetaMatrixComponentException;
/**
- * Get the default properties for the Connector binding
- * @param binding
+ * Get the default properties for the Connector type
+ * @param type
* @return properties
*/
- public Properties getDefaultProperties(ConnectorBinding binding);
+ public Properties getDefaultProperties(ConnectorBindingType type);
/**
* Update the Connector Binding, the assumption here that we kept the name same
14 years, 11 months