[teiid-commits] teiid SVN: r1162 - in trunk/engine/src: main/java/com/metamatrix/query/resolver/util and 3 other directories.

teiid-commits at lists.jboss.org teiid-commits at lists.jboss.org
Tue Jul 21 18:53:50 EDT 2009


Author: shawkins
Date: 2009-07-21 18:53:49 -0400 (Tue, 21 Jul 2009)
New Revision: 1162

Added:
   trunk/engine/src/test/java/com/metamatrix/query/processor/TestAggregateProcessing.java
Modified:
   trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleCollapseSource.java
   trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePlanUnions.java
   trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java
   trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRaiseAccess.java
   trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRemoveOptionalJoins.java
   trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java
   trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java
   trunk/engine/src/test/java/com/metamatrix/query/optimizer/TestAggregatePushdown.java
   trunk/engine/src/test/java/com/metamatrix/query/processor/TestProcessor.java
Log:
TEIID-339 adding the feature to push aggregates through unions.

Modified: trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleCollapseSource.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleCollapseSource.java	2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleCollapseSource.java	2009-07-21 22:53:49 UTC (rev 1162)
@@ -96,7 +96,7 @@
                 	}
                     plan = removeUnnecessaryInlineView(plan, commandRoot);
                     QueryCommand queryCommand = createQuery(metadata, capFinder, accessNode, commandRoot);
-                    addSetOpDistinct(metadata, capFinder, accessNode, queryCommand);
+                	addSetOpDistinct(metadata, capFinder, accessNode, queryCommand);
                     command = queryCommand;
                     if (intoGroup != null) {
                     	Insert insertCommand = new Insert(intoGroup, ResolverUtil.resolveElementsInGroup(intoGroup, metadata), null);
@@ -127,7 +127,7 @@
 			}
 			parent = parent.getParent();
 		}
-		if (!dupRemoval) {
+		if (!dupRemoval || NewCalculateCostUtil.usesKey(queryCommand.getProjectedSymbols(), metadata)) {
 			return;
 		}
 		//TODO: we should also order the results and update the set processing logic

Modified: trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePlanUnions.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePlanUnions.java	2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePlanUnions.java	2009-07-21 22:53:49 UTC (rev 1162)
@@ -159,7 +159,6 @@
 
     /**
      * TODO: union and intersect are associative
-     * TODO: the not all flag should be raised to its highest point
      */
     private void collectUnionSources(QueryMetadataInterface metadata,
                                      CapabilitiesFinder capabilitiesFinder,

Modified: trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java	2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RulePushAggregates.java	2009-07-21 22:53:49 UTC (rev 1162)
@@ -23,7 +23,9 @@
 package com.metamatrix.query.optimizer.relational.rules;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -41,28 +43,39 @@
 import com.metamatrix.query.analysis.AnalysisRecord;
 import com.metamatrix.query.function.FunctionLibrary;
 import com.metamatrix.query.metadata.QueryMetadataInterface;
+import com.metamatrix.query.metadata.TempMetadataAdapter;
+import com.metamatrix.query.metadata.TempMetadataStore;
 import com.metamatrix.query.optimizer.capabilities.CapabilitiesFinder;
+import com.metamatrix.query.optimizer.capabilities.SourceCapabilities.Capability;
 import com.metamatrix.query.optimizer.relational.OptimizerRule;
 import com.metamatrix.query.optimizer.relational.RuleStack;
 import com.metamatrix.query.optimizer.relational.plantree.NodeConstants;
 import com.metamatrix.query.optimizer.relational.plantree.NodeEditor;
 import com.metamatrix.query.optimizer.relational.plantree.NodeFactory;
 import com.metamatrix.query.optimizer.relational.plantree.PlanNode;
+import com.metamatrix.query.resolver.util.ResolverUtil;
 import com.metamatrix.query.resolver.util.ResolverVisitor;
+import com.metamatrix.query.rewriter.QueryRewriter;
 import com.metamatrix.query.sql.ReservedWords;
 import com.metamatrix.query.sql.lang.CompareCriteria;
 import com.metamatrix.query.sql.lang.Criteria;
+import com.metamatrix.query.sql.lang.IsNullCriteria;
 import com.metamatrix.query.sql.lang.JoinType;
+import com.metamatrix.query.sql.lang.Select;
+import com.metamatrix.query.sql.lang.SetQuery.Operation;
 import com.metamatrix.query.sql.symbol.AggregateSymbol;
 import com.metamatrix.query.sql.symbol.Constant;
 import com.metamatrix.query.sql.symbol.ElementSymbol;
 import com.metamatrix.query.sql.symbol.Expression;
+import com.metamatrix.query.sql.symbol.ExpressionSymbol;
 import com.metamatrix.query.sql.symbol.Function;
 import com.metamatrix.query.sql.symbol.GroupSymbol;
+import com.metamatrix.query.sql.symbol.SearchedCaseExpression;
 import com.metamatrix.query.sql.symbol.SingleElementSymbol;
 import com.metamatrix.query.sql.util.SymbolMap;
 import com.metamatrix.query.sql.visitor.AggregateSymbolCollectorVisitor;
 import com.metamatrix.query.sql.visitor.ElementCollectorVisitor;
+import com.metamatrix.query.sql.visitor.ExpressionMappingVisitor;
 import com.metamatrix.query.sql.visitor.GroupsUsedByElementsVisitor;
 import com.metamatrix.query.util.CommandContext;
 
@@ -88,13 +101,25 @@
                                                    MetaMatrixComponentException {
 
         for (PlanNode groupNode : NodeEditor.findAllNodes(plan, NodeConstants.Types.GROUP, NodeConstants.Types.ACCESS)) {
-            PlanNode joinNode = groupNode.getFirstChild();
+            PlanNode child = groupNode.getFirstChild();
 
-            if (joinNode.getType() != NodeConstants.Types.JOIN) {
+        	List<SingleElementSymbol> groupingExpressions = (List<SingleElementSymbol>)groupNode.getProperty(NodeConstants.Info.GROUP_COLS);
+            
+            if (child.getType() == NodeConstants.Types.SOURCE) {
+                PlanNode setOp = child.getFirstChild();
+                
+                try {
+					pushGroupNodeOverUnion(plan, metadata, capFinder, groupNode, child, groupingExpressions, setOp);
+				} catch (QueryResolverException e) {
+					throw new MetaMatrixComponentException(e);
+				}
                 continue;
             }
+        	
+            if (child.getType() != NodeConstants.Types.JOIN) {
+                continue;
+            }
 
-            List<SingleElementSymbol> groupingExpressions = (List<SingleElementSymbol>)groupNode.getProperty(NodeConstants.Info.GROUP_COLS);
             Set<AggregateSymbol> aggregates = collectAggregates(groupNode);
 
             pushGroupNode(groupNode, groupingExpressions, aggregates, metadata, capFinder);
@@ -103,6 +128,231 @@
         return plan;
     }
 
+	/**
+	 * The plan tree looks like:
+	 * group [agg(x), {a, b}]
+	 *   source
+	 *     set op
+	 *       child 1
+	 *       ...
+	 * 
+	 * we need to make it into
+	 * 
+	 * group [agg(agg(x)), {a, b}]
+	 *   source
+	 *     set op
+	 *       source
+	 *         project
+	 *           [select]
+	 *             group [agg(x), {a, b}]
+	 *               source
+	 *                 child 1
+	 *       ...
+	 * 
+	 * Or if the child does not support pushdown we add dummy aggregate projection
+     * count(*) = 1, count(x) = case x is null then 0 else 1 end, avg(x) = x, etc.
+	 */
+	private void pushGroupNodeOverUnion(PlanNode plan,
+			QueryMetadataInterface metadata, CapabilitiesFinder capFinder,
+			PlanNode groupNode, PlanNode child,
+			List<SingleElementSymbol> groupingExpressions, PlanNode setOp)
+			throws MetaMatrixComponentException, QueryMetadataException,
+			QueryPlannerException, QueryResolverException {
+		if (setOp.getType() != NodeConstants.Types.SET_OP || setOp.getProperty(NodeConstants.Info.SET_OPERATION) != Operation.UNION) {
+			return; //must not be a union
+		}
+		LinkedHashSet<AggregateSymbol> aggregates = collectAggregates(groupNode);
+
+		/*
+		 * if there are no aggregates, this is just duplicate removal
+		 * mark the union as not all, which should be removed later but
+		 * serves as a hint to distribute a distinct to the union queries
+		 */
+		if (aggregates.isEmpty()) {
+			if (groupingExpressions != null && !groupingExpressions.isEmpty()) {
+				setOp.setProperty(NodeConstants.Info.USE_ALL, Boolean.FALSE);
+			}
+			return;
+		}
+		
+		//check to see if any aggregate is dependent upon cardinality
+		boolean cardinalityDependent = false;
+		for (AggregateSymbol aggregateSymbol : aggregates) {
+			if (aggregateSymbol.getAggregateFunction().equals(ReservedWords.COUNT)
+					|| aggregateSymbol.getAggregateFunction().equals(ReservedWords.AVG)
+					|| aggregateSymbol.getAggregateFunction().equals(ReservedWords.SUM)) {
+				cardinalityDependent = true;
+				break;
+			}
+		}
+		
+		LinkedList<PlanNode> unionChildren = new LinkedList<PlanNode>();
+		findUnionChildren(unionChildren, cardinalityDependent, setOp);
+
+		if (unionChildren.size() < 2) {
+			return;
+		}
+		
+		SymbolMap parentMap = (SymbolMap)child.getProperty(NodeConstants.Info.SYMBOL_MAP);
+		List<ElementSymbol> virtualElements = parentMap.getKeys();
+
+		Map<AggregateSymbol, Expression> aggMap = buildAggregateMap(new ArrayList<SingleElementSymbol>(aggregates), metadata, aggregates);
+
+		boolean shouldPushdown = false;
+		List<Boolean> pushdownList = new ArrayList<Boolean>(unionChildren.size());
+		for (PlanNode planNode : unionChildren) {
+			boolean pushdown = canPushGroupByToUnionChild(metadata, capFinder, groupingExpressions, aggregates, planNode); 
+			pushdownList.add(pushdown);
+			shouldPushdown |= pushdown;
+		}
+		
+		if (!shouldPushdown) {
+			return;
+		}
+
+		Iterator<Boolean> pushdownIterator = pushdownList.iterator();
+		for (PlanNode planNode : unionChildren) {
+		    addView(plan, planNode, pushdownIterator.next(), groupingExpressions, aggregates, virtualElements, metadata, capFinder);
+		}
+		
+		//update the parent plan with the staged aggregates and the new projected symbols
+		List<SingleElementSymbol> projectedViewSymbols = (List<SingleElementSymbol>)NodeEditor.findNodePreOrder(child, NodeConstants.Types.PROJECT).getProperty(NodeConstants.Info.PROJECT_COLS);
+		SymbolMap newParentMap = SymbolMap.createSymbolMap(child.getGroups().iterator().next(), projectedViewSymbols);
+		child.setProperty(NodeConstants.Info.SYMBOL_MAP, newParentMap);
+		Map<AggregateSymbol, ElementSymbol> projectedMap = new HashMap<AggregateSymbol, ElementSymbol>();
+		Iterator<AggregateSymbol> aggIter = aggregates.iterator();
+		for (ElementSymbol projectedViewSymbol : newParentMap.getKeys().subList(projectedViewSymbols.size() - aggregates.size(), projectedViewSymbols.size())) {
+			projectedMap.put(aggIter.next(), projectedViewSymbol);
+		}
+		for (Expression expr : aggMap.values()) {
+			ExpressionMappingVisitor.mapExpressions(expr, projectedMap);
+		}
+		mapExpressions(groupNode.getParent(), aggMap);
+	}
+
+	private boolean canPushGroupByToUnionChild(QueryMetadataInterface metadata,
+			CapabilitiesFinder capFinder,
+			List<SingleElementSymbol> groupingExpressions,
+			LinkedHashSet<AggregateSymbol> aggregates, PlanNode planNode)
+			throws QueryMetadataException, MetaMatrixComponentException {
+		if (planNode.getType() != NodeConstants.Types.ACCESS) {
+			return false;
+		}
+		Object modelId = RuleRaiseAccess.getModelIDFromAccess(planNode, metadata);
+		if (!CapabilitiesUtil.supports(Capability.QUERY_FROM_INLINE_VIEWS, modelId, metadata, capFinder) 
+				|| !CapabilitiesUtil.supports(Capability.QUERY_GROUP_BY, modelId, metadata, capFinder)) {
+			return false;
+		}
+		for (AggregateSymbol aggregate : aggregates) {
+			if (!CapabilitiesUtil.supportsAggregateFunction(modelId, aggregate, metadata, capFinder)) {
+				return false;
+			}
+		}
+		if ((groupingExpressions == null || groupingExpressions.isEmpty()) && !CapabilitiesUtil.supports(Capability.QUERY_AGGREGATES_COUNT_STAR, modelId, metadata, capFinder)) {
+			return false;
+		}
+		//TODO: check to see if we are distinct
+		return true;
+	}
+    
+	/**
+	 * Recursively searches the union tree for all applicable source nodes
+	 */
+	private PlanNode findUnionChildren(List<PlanNode> unionChildren, boolean carinalityDependent, PlanNode setOp) {
+		if (setOp.getType() != NodeConstants.Types.SET_OP || setOp.getProperty(NodeConstants.Info.SET_OPERATION) != Operation.UNION) {
+			return setOp;
+		}
+				
+		if (!setOp.hasBooleanProperty(NodeConstants.Info.USE_ALL)) {
+			if (carinalityDependent) {
+				return setOp;
+			}
+			setOp.setProperty(NodeConstants.Info.USE_ALL, Boolean.TRUE);
+		}
+		
+		for (PlanNode planNode : setOp.getChildren()) {
+			PlanNode child = findUnionChildren(unionChildren, carinalityDependent, planNode);
+			if (child != null) {
+				unionChildren.add(child);
+			}
+		}
+		
+		return null;
+	}
+    
+	public void addView(PlanNode root, PlanNode unionSource, boolean pushdown, List<SingleElementSymbol> groupingExpressions,
+			Set<AggregateSymbol> aggregates, List<ElementSymbol> virtualElements,
+			QueryMetadataInterface metadata, CapabilitiesFinder capFinder)
+			throws MetaMatrixComponentException, QueryPlannerException, QueryResolverException {
+		PlanNode originalNode = unionSource;
+    	PlanNode intermediateView = NodeFactory.getNewNode(NodeConstants.Types.SOURCE);
+    	unionSource.addAsParent(intermediateView);
+    	unionSource = intermediateView;
+    	TempMetadataStore store = new TempMetadataStore();
+        TempMetadataAdapter tma = new TempMetadataAdapter(metadata, store);
+        GroupSymbol group = new GroupSymbol("X"); //$NON-NLS-1$
+        try {
+			group.setMetadataID(ResolverUtil.addTempGroup(tma, group, virtualElements, false));
+		} catch (QueryResolverException e) {
+			throw new MetaMatrixComponentException(e);
+		}
+    	intermediateView.addGroup(group);
+    	List<ElementSymbol> projectedSymbols = ResolverUtil.resolveElementsInGroup(group, metadata);
+    	SymbolMap symbolMap = SymbolMap.createSymbolMap(projectedSymbols, 
+				(List<Expression>)NodeEditor.findNodePreOrder(unionSource, NodeConstants.Types.PROJECT).getProperty(NodeConstants.Info.PROJECT_COLS));
+    	intermediateView.setProperty(NodeConstants.Info.SYMBOL_MAP, symbolMap);
+    	
+        Set<SingleElementSymbol> newGroupingExpressions = Collections.emptySet();
+        if (groupingExpressions != null) {
+        	newGroupingExpressions = new HashSet<SingleElementSymbol>();
+        	for (SingleElementSymbol singleElementSymbol : groupingExpressions) {
+				newGroupingExpressions.add((SingleElementSymbol)symbolMap.getKeys().get(virtualElements.indexOf(singleElementSymbol)).clone());
+			}
+        }
+
+        List<SingleElementSymbol> projectedViewSymbols = QueryRewriter.deepClone(projectedSymbols, SingleElementSymbol.class);
+
+        SymbolMap viewMapping = SymbolMap.createSymbolMap(NodeEditor.findParent(unionSource, NodeConstants.Types.SOURCE).getGroups().iterator().next(), projectedSymbols);
+        for (AggregateSymbol agg : aggregates) {
+        	agg = (AggregateSymbol)agg.clone();
+        	ExpressionMappingVisitor.mapExpressions(agg, viewMapping.asMap());
+        	if (pushdown) {
+        		projectedViewSymbols.add(agg);
+        	} else {
+        		if (agg.getAggregateFunction().equals(ReservedWords.COUNT)) {
+        			SearchedCaseExpression count = new SearchedCaseExpression(Arrays.asList(new IsNullCriteria(agg.getExpression())), Arrays.asList(new Constant(Integer.valueOf(0))));
+        			count.setElseExpression(new Constant(Integer.valueOf(1)));
+        			count.setType(DataTypeManager.DefaultDataClasses.INTEGER);
+    				projectedViewSymbols.add(new ExpressionSymbol("stagedAgg", count)); //$NON-NLS-1$
+        		} else { //min, max, sum
+        			Expression ex = agg.getExpression();
+        			ex = ResolverUtil.convertExpression(ex, DataTypeManager.getDataTypeName(agg.getType()));
+        			projectedViewSymbols.add(new ExpressionSymbol("stagedAgg", ex)); //$NON-NLS-1$
+        		}
+        	}
+		}
+
+        if (pushdown) {
+        	unionSource = addGroupBy(unionSource, newGroupingExpressions, new LinkedList<AggregateSymbol>());
+        }
+        
+        PlanNode projectPlanNode = NodeFactory.getNewNode(NodeConstants.Types.PROJECT);
+        unionSource.addAsParent(projectPlanNode);
+        unionSource = projectPlanNode;
+
+        //create proper names for the aggregate symbols
+        Select select = new Select(projectedViewSymbols);
+        QueryRewriter.makeSelectUnique(select, false);
+        projectedViewSymbols = select.getProjectedSymbols();
+        projectPlanNode.setProperty(NodeConstants.Info.PROJECT_COLS, projectedViewSymbols);
+        projectPlanNode.addGroup(group);
+        if (pushdown) {
+        	while (RuleRaiseAccess.raiseAccessNode(root, originalNode, metadata, capFinder, true) != null) {
+        		//continue to raise
+        	}
+        }
+    }
+
     /**
      * Walk up the plan from the GROUP node. Should encounter only (optionally) a SELECT and can stop at the PROJECT node. Need to
      * collect any AggregateSymbols used in the select criteria or projected columns.
@@ -111,8 +361,8 @@
      * @return the set of aggregate symbols found
      * @since 4.2
      */
-    static Set<AggregateSymbol> collectAggregates(PlanNode groupNode) {
-        Set<AggregateSymbol> aggregates = new HashSet<AggregateSymbol>();
+    static LinkedHashSet<AggregateSymbol> collectAggregates(PlanNode groupNode) {
+    	LinkedHashSet<AggregateSymbol> aggregates = new LinkedHashSet<AggregateSymbol>();
         PlanNode currentNode = groupNode.getParent();
         while (currentNode != null) {
             if (currentNode.getType() == NodeConstants.Types.PROJECT) {
@@ -146,7 +396,7 @@
                                CapabilitiesFinder capFinder) throws MetaMatrixComponentException,
                                                             QueryMetadataException, QueryPlannerException {
 
-        Map<PlanNode, List<SingleElementSymbol>> aggregateMap = createNodeMapping(groupNode, allAggregates);
+        Map<PlanNode, List<AggregateSymbol>> aggregateMap = createNodeMapping(groupNode, allAggregates);
         Map<PlanNode, List<SingleElementSymbol>> groupingMap = createNodeMapping(groupNode, groupingExpressions);
 
         Set<PlanNode> possibleTargetNodes = new HashSet<PlanNode>(aggregateMap.keySet());
@@ -154,7 +404,7 @@
 
         for (PlanNode planNode : possibleTargetNodes) {
             Set<SingleElementSymbol> stagedGroupingSymbols = new LinkedHashSet<SingleElementSymbol>();
-            List<SingleElementSymbol> aggregates = aggregateMap.get(planNode);
+            List<AggregateSymbol> aggregates = aggregateMap.get(planNode);
             List<SingleElementSymbol> groupBy = groupingMap.get(planNode);
 
             if (!canPush(groupNode, stagedGroupingSymbols, planNode)) {
@@ -168,15 +418,19 @@
             collectSymbolsFromOtherAggregates(allAggregates, aggregates, planNode, stagedGroupingSymbols);
             
             //if the grouping expressions are unique then there's no point in staging the aggregate
-            //TODO: the uses key check is not really accurate.
+            //TODO: the uses key check is not really accurate, it doesn't take into consideration where 
+            //we are in the plan.
+            //if a key column is used after a non 1-1 join or a union all, then it may be non-unique.
             if (NewCalculateCostUtil.usesKey(stagedGroupingSymbols, metadata)) {
-                continue;
+            	continue;
             }
 
+        	//TODO: we should be doing another cost check here - especially if the aggregate cannot be pushed.
+            
             if (aggregates != null) {
                 stageAggregates(groupNode, metadata, stagedGroupingSymbols, aggregates);
             } else {
-                aggregates = new ArrayList<SingleElementSymbol>();
+                aggregates = new ArrayList<AggregateSymbol>(1);
             }
 
             if (aggregates.isEmpty() && stagedGroupingSymbols.isEmpty()) {
@@ -184,23 +438,8 @@
             }
             //TODO: if aggregates is empty, then could insert a dup remove node instead
             
-            PlanNode stageGroup = NodeFactory.getNewNode(NodeConstants.Types.GROUP);
-            planNode.addAsParent(stageGroup);
-
-            if (!stagedGroupingSymbols.isEmpty()) {
-                stageGroup.setProperty(NodeConstants.Info.GROUP_COLS, new ArrayList<SingleElementSymbol>(stagedGroupingSymbols));
-                stageGroup.addGroups(GroupsUsedByElementsVisitor.getGroups(stagedGroupingSymbols));
-            } else {
-                // if the source has no rows we need to insert a select node with criteria count(*)>0
-                PlanNode selectNode = NodeFactory.getNewNode(NodeConstants.Types.SELECT);
-                AggregateSymbol count = new AggregateSymbol("stagedAgg", ReservedWords.COUNT, false, null); //$NON-NLS-1$
-                aggregates.add(count); //consider the count aggregate for the push down call below
-                selectNode.setProperty(NodeConstants.Info.SELECT_CRITERIA, new CompareCriteria(count, CompareCriteria.GT,
-                                                                                               new Constant(new Integer(0))));
-                selectNode.setProperty(NodeConstants.Info.IS_HAVING, Boolean.TRUE);
-                stageGroup.addAsParent(selectNode);
-            }
-
+            PlanNode stageGroup = addGroupBy(planNode, stagedGroupingSymbols, aggregates);
+    		
             //check for push down
             if (stageGroup.getFirstChild().getType() == NodeConstants.Types.ACCESS 
                             && RuleRaiseAccess.canRaiseOverGroupBy(stageGroup, stageGroup.getFirstChild(), aggregates, metadata, capFinder)) {
@@ -212,18 +451,40 @@
         }
     }
 
-    private void stageAggregates(PlanNode groupNode,
+	private PlanNode addGroupBy(PlanNode planNode,
+			Collection<SingleElementSymbol> stagedGroupingSymbols,
+			Collection<AggregateSymbol> aggregates) {
+		PlanNode stageGroup = NodeFactory.getNewNode(NodeConstants.Types.GROUP);
+		planNode.addAsParent(stageGroup);
+
+		if (!stagedGroupingSymbols.isEmpty()) {
+		    stageGroup.setProperty(NodeConstants.Info.GROUP_COLS, new ArrayList<SingleElementSymbol>(stagedGroupingSymbols));
+		    stageGroup.addGroups(GroupsUsedByElementsVisitor.getGroups(stagedGroupingSymbols));
+		} else {
+		    // if the source has no rows we need to insert a select node with criteria count(*)>0
+		    PlanNode selectNode = NodeFactory.getNewNode(NodeConstants.Types.SELECT);
+		    AggregateSymbol count = new AggregateSymbol("stagedAgg", ReservedWords.COUNT, false, null); //$NON-NLS-1$
+		    aggregates.add(count); //consider the count aggregate for the push down call below
+		    selectNode.setProperty(NodeConstants.Info.SELECT_CRITERIA, new CompareCriteria(count, CompareCriteria.GT,
+		                                                                                   new Constant(new Integer(0))));
+		    selectNode.setProperty(NodeConstants.Info.IS_HAVING, Boolean.TRUE);
+		    stageGroup.addAsParent(selectNode);
+		}
+		return stageGroup;
+	}
+
+    static void stageAggregates(PlanNode groupNode,
                                  QueryMetadataInterface metadata,
-                                 Set<SingleElementSymbol> stagedGroupingSymbols,
-                                 List<SingleElementSymbol> aggregates) throws MetaMatrixComponentException, QueryPlannerException {
+                                 Collection<SingleElementSymbol> stagedGroupingSymbols,
+                                 Collection<AggregateSymbol> aggregates) throws MetaMatrixComponentException, QueryPlannerException {
         //remove any aggregates that are computed over a group by column
         Set<Expression> expressions = new HashSet<Expression>();
         for (SingleElementSymbol expression : stagedGroupingSymbols) {
             expressions.add(SymbolMap.getExpression(expression));
         }
         
-        for (final Iterator<SingleElementSymbol> iterator = aggregates.iterator(); iterator.hasNext();) {
-            final AggregateSymbol symbol = (AggregateSymbol)iterator.next();
+        for (final Iterator<AggregateSymbol> iterator = aggregates.iterator(); iterator.hasNext();) {
+            final AggregateSymbol symbol = iterator.next();
             Expression expr = symbol.getExpression();
             if (expr == null) {
                 continue;
@@ -248,7 +509,7 @@
     }
     
     private void collectSymbolsFromOtherAggregates(Collection<AggregateSymbol> allAggregates,
-                                                      Collection<SingleElementSymbol> aggregates,
+                                                      Collection<AggregateSymbol> aggregates,
                                                       PlanNode current,
                                                       Set<SingleElementSymbol> stagedGroupingSymbols) {
         Set<AggregateSymbol> otherAggs = new HashSet<AggregateSymbol>(allAggregates);
@@ -314,13 +575,13 @@
         return true;
     }
 
-    private Map<PlanNode, List<SingleElementSymbol>> createNodeMapping(PlanNode groupNode,
-                                                                       Collection<? extends SingleElementSymbol> expressions) {
-        Map<PlanNode, List<SingleElementSymbol>> result = new HashMap<PlanNode, List<SingleElementSymbol>>();
+    private <T extends SingleElementSymbol> Map<PlanNode, List<T>> createNodeMapping(PlanNode groupNode,
+                                                                       Collection<T> expressions) {
+        Map<PlanNode, List<T>> result = new HashMap<PlanNode, List<T>>();
         if (expressions == null) {
             return result;
         }
-        for (SingleElementSymbol aggregateSymbol : expressions) {
+        for (T aggregateSymbol : expressions) {
             if (aggregateSymbol instanceof AggregateSymbol) {
                 AggregateSymbol partitionAgg = (AggregateSymbol)aggregateSymbol;
                 if (partitionAgg.isDistinct()) {
@@ -350,9 +611,9 @@
                 continue;
             }
 
-            List<SingleElementSymbol> symbols = result.get(originatingNode);
+            List<T> symbols = result.get(originatingNode);
             if (symbols == null) {
-                symbols = new LinkedList<SingleElementSymbol>();
+                symbols = new LinkedList<T>();
                 result.put(originatingNode, symbols);
             }
             symbols.add(aggregateSymbol);
@@ -360,7 +621,7 @@
         return result;
     }
 
-    private Map<AggregateSymbol, Expression> buildAggregateMap(Collection<SingleElementSymbol> aggregateExpressions,
+    private static Map<AggregateSymbol, Expression> buildAggregateMap(Collection<? extends SingleElementSymbol> aggregateExpressions,
                                                                         QueryMetadataInterface metadata, Set<AggregateSymbol> nestedAggregates) throws QueryResolverException,
                                                                                                         MetaMatrixComponentException {
         Map<AggregateSymbol, Expression> aggMap = new HashMap<AggregateSymbol, Expression>();

Modified: trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRaiseAccess.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRaiseAccess.java	2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRaiseAccess.java	2009-07-21 22:53:49 UTC (rev 1162)
@@ -85,7 +85,7 @@
     /**
      * @return null if nothing changed, and a new plan root if something changed
      */
-    PlanNode raiseAccessNode(PlanNode rootNode, PlanNode accessNode, QueryMetadataInterface metadata, CapabilitiesFinder capFinder, boolean afterJoinPlanning) 
+    static PlanNode raiseAccessNode(PlanNode rootNode, PlanNode accessNode, QueryMetadataInterface metadata, CapabilitiesFinder capFinder, boolean afterJoinPlanning) 
     throws QueryPlannerException, QueryMetadataException, MetaMatrixComponentException {
         
         PlanNode parentNode = accessNode.getParent();
@@ -683,7 +683,7 @@
         return accessModelID;    
     }
     
-    private boolean canRaiseOverSetQuery(PlanNode setOpNode,
+    private static boolean canRaiseOverSetQuery(PlanNode setOpNode,
                                      QueryMetadataInterface metadata,
                                      CapabilitiesFinder capFinder) throws QueryMetadataException, MetaMatrixComponentException {
         

Modified: trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRemoveOptionalJoins.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRemoveOptionalJoins.java	2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/optimizer/relational/rules/RuleRemoveOptionalJoins.java	2009-07-21 22:53:49 UTC (rev 1162)
@@ -23,7 +23,6 @@
 package com.metamatrix.query.optimizer.relational.rules;
 
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
@@ -43,7 +42,6 @@
 import com.metamatrix.query.optimizer.relational.plantree.NodeEditor;
 import com.metamatrix.query.optimizer.relational.plantree.PlanNode;
 import com.metamatrix.query.resolver.util.ResolverUtil;
-import com.metamatrix.query.sql.LanguageObject;
 import com.metamatrix.query.sql.ReservedWords;
 import com.metamatrix.query.sql.lang.Criteria;
 import com.metamatrix.query.sql.lang.JoinType;

Modified: trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java	2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java	2009-07-21 22:53:49 UTC (rev 1162)
@@ -607,7 +607,7 @@
         }
     }
 
-    public static void addTempGroup(TempMetadataAdapter metadata,
+    public static TempMetadataID addTempGroup(TempMetadataAdapter metadata,
                                     GroupSymbol symbol,
                                     List symbols, boolean tempTable) throws QueryResolverException {
         HashSet names = new HashSet();
@@ -622,7 +622,7 @@
             resolveNullLiterals(symbols);
         }
         TempMetadataStore store = metadata.getMetadataStore();
-        store.addTempGroup(symbol.getName(), symbols, !tempTable, tempTable);
+        return store.addTempGroup(symbol.getName(), symbols, !tempTable, tempTable);
     }
     
     public static void addTempTable(TempMetadataAdapter metadata,

Modified: trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java	2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java	2009-07-21 22:53:49 UTC (rev 1162)
@@ -775,7 +775,7 @@
         rewriteExpressions(query.getSelect(), procCommand, metadata, context);
 
         if (query.getOrderBy() != null && !query.getIsXML()) {
-            makeSelectUnique(query, true);
+            makeSelectUnique(query.getSelect(), true);
             rewriteOrderBy(query, procCommand, metadata, context);
         }
         
@@ -947,7 +947,7 @@
         setQuery.setRightQuery((QueryCommand)rewriteCommand(setQuery.getRightQuery(), procCommand, metadata, context, true));
 
         if (setQuery.getOrderBy() != null) {
-            makeSelectUnique(setQuery.getProjectedQuery(), true);
+            makeSelectUnique(setQuery.getProjectedQuery().getSelect(), true);
             rewriteOrderBy(setQuery, procCommand, metadata, context);
         }
         
@@ -2355,7 +2355,7 @@
         TempMetadataStore store = new TempMetadataStore();
         TempMetadataAdapter tma = new TempMetadataAdapter(metadata, store);
         Query firstProject = nested.getProjectedQuery(); 
-        makeSelectUnique(firstProject, false);
+        makeSelectUnique(firstProject.getSelect(), false);
         
         store.addTempGroup(inlineGroup.getName(), nested.getProjectedSymbols());
         inlineGroup.setMetadataID(store.getTempGroupID(inlineGroup.getName()));
@@ -2367,9 +2367,7 @@
             actualTypes.add(ses.getType());
         }
         List selectSymbols = SetQuery.getTypedProjectedSymbols(ResolverUtil.resolveElementsInGroup(inlineGroup, tma), actualTypes);
-        for (final Iterator iterator = selectSymbols.iterator(); iterator.hasNext();) {
-            select.addSymbol((SingleElementSymbol)((SingleElementSymbol)iterator.next()).clone());
-        } 
+        select.addSymbols(deepClone(selectSymbols, SingleElementSymbol.class));
         query.setFrom(from); 
         QueryResolver.resolveCommand(query, tma);
         query.setOption(nested.getOption());
@@ -2383,11 +2381,19 @@
         return query;
     }    
     
-    public static void makeSelectUnique(Query query, boolean expressionSymbolsOnly) {
+    public static <S extends Expression, T extends S> List<S> deepClone(List<T> collection, Class<S> clazz) {
+    	ArrayList<S> result = new ArrayList<S>(collection.size());
+    	for (Expression expression : collection) {
+			result.add((S)expression.clone());
+		}
+    	return result;
+    }
+    
+    public static void makeSelectUnique(Select select, boolean expressionSymbolsOnly) {
         
-        query.getSelect().setSymbols(query.getSelect().getProjectedSymbols());
+        select.setSymbols(select.getProjectedSymbols());
         
-        List symbols = query.getSelect().getSymbols();
+        List symbols = select.getSymbols();
         
         HashSet<String> uniqueNames = new HashSet<String>();
         

Modified: trunk/engine/src/test/java/com/metamatrix/query/optimizer/TestAggregatePushdown.java
===================================================================
--- trunk/engine/src/test/java/com/metamatrix/query/optimizer/TestAggregatePushdown.java	2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/test/java/com/metamatrix/query/optimizer/TestAggregatePushdown.java	2009-07-21 22:53:49 UTC (rev 1162)
@@ -35,13 +35,24 @@
 import com.metamatrix.query.unittest.FakeMetadataFactory;
 
 public class TestAggregatePushdown {
-    
+
+	public static BasicSourceCapabilities getAggregateCapabilities() {
+		BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
+        caps.setCapabilitySupport(Capability.QUERY_FROM_INLINE_VIEWS, true);
+        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_MAX, true);
+        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
+        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_MIN, true);
+        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_AVG, true);
+        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT, true);
+        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT_STAR, true);
+        caps.setCapabilitySupport(Capability.QUERY_GROUP_BY, true);
+        caps.setCapabilitySupport(Capability.QUERY_HAVING, true);
+		return caps;
+	}
+
     private FakeCapabilitiesFinder getAggregatesFinder() {
         FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
-        BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_MAX, true);
+        BasicSourceCapabilities caps = getAggregateCapabilities();
         capFinder.addCapabilities("m1", caps); //$NON-NLS-1$
         capFinder.addCapabilities("m2", caps); //$NON-NLS-1$
 
@@ -50,13 +61,9 @@
 
     @Test public void testCase6327() {
         FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
-        BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT_STAR, true);
+        BasicSourceCapabilities caps = getAggregateCapabilities();
         caps.setCapabilitySupport(Capability.QUERY_FROM_JOIN_SELFJOIN, true);
         caps.setCapabilitySupport(Capability.QUERY_FROM_GROUP_ALIAS, true);
-        caps.setCapabilitySupport(Capability.QUERY_FUNCTIONS_IN_GROUP_BY, true);
         caps.setFunctionSupport("convert", true); //$NON-NLS-1$
         capFinder.addCapabilities("BQT1", caps); //$NON-NLS-1$
         capFinder.addCapabilities("BQT2", caps); //$NON-NLS-1$
@@ -90,12 +97,8 @@
      */
     @Test public void testAggregateOfJoinExpression() throws Exception {
         FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
-        BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT_STAR, true);
+        BasicSourceCapabilities caps = getAggregateCapabilities();
         caps.setCapabilitySupport(Capability.QUERY_FROM_GROUP_ALIAS, true);
-        caps.setCapabilitySupport(Capability.QUERY_FUNCTIONS_IN_GROUP_BY, true);
         caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
         caps.setFunctionSupport("convert", true); //$NON-NLS-1$
         capFinder.addCapabilities("BQT1", caps); //$NON-NLS-1$
@@ -129,9 +132,7 @@
      */
     @Test public void testInvariantAggregate() throws Exception {
         FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
-        BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_MAX, true);
+        BasicSourceCapabilities caps = getAggregateCapabilities();
         caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
         capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
         capFinder.addCapabilities("pm2", caps); //$NON-NLS-1$
@@ -163,10 +164,7 @@
      */
     @Test public void testCase6211() throws Exception {
         FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
-        BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
-        caps.setCapabilitySupport(Capability.QUERY_FUNCTIONS_IN_GROUP_BY, true);
+        BasicSourceCapabilities caps = getAggregateCapabilities();
         caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
         caps.setFunctionSupport("convert", true); //$NON-NLS-1$
         capFinder.addCapabilities("BQT1", caps); //$NON-NLS-1$
@@ -263,10 +261,7 @@
      */
     @Test public void testAvgAggregate() throws Exception {
         FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
-        BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
+        BasicSourceCapabilities caps = getAggregateCapabilities();
         caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
         capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
         capFinder.addCapabilities("pm2", caps); //$NON-NLS-1$
@@ -295,10 +290,7 @@
     
     @Test public void testCountAggregate() throws Exception {
         FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
-        BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
+        BasicSourceCapabilities caps = getAggregateCapabilities();
         caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
         capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
         capFinder.addCapabilities("pm2", caps); //$NON-NLS-1$
@@ -327,10 +319,7 @@
     
     @Test public void testOuterJoinPreventsPushdown() throws Exception {
         FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
-        BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
+        BasicSourceCapabilities caps = getAggregateCapabilities();
         caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
         capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
         capFinder.addCapabilities("pm2", caps); //$NON-NLS-1$
@@ -363,10 +352,7 @@
      */
     @Test public void testCase5724() {
         FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
-        BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_COUNT_STAR, true);
+        BasicSourceCapabilities caps = getAggregateCapabilities();
         caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
         capFinder.addCapabilities("BQT1", caps); //$NON-NLS-1$
         capFinder.addCapabilities("BQT2", caps); //$NON-NLS-1$
@@ -399,12 +385,9 @@
 
     @Test public void testCase6210() throws Exception {
         FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
-        BasicSourceCapabilities caps = TestOptimizer.getTypicalCapabilities();
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES, true);
-        caps.setCapabilitySupport(Capability.QUERY_AGGREGATES_SUM, true);
+        BasicSourceCapabilities caps = getAggregateCapabilities();
         caps.setCapabilitySupport(Capability.QUERY_FROM_JOIN_SELFJOIN, true);
         caps.setCapabilitySupport(Capability.QUERY_FROM_GROUP_ALIAS, true);
-        caps.setCapabilitySupport(Capability.QUERY_FUNCTIONS_IN_GROUP_BY, true);
         caps.setCapabilitySupport(Capability.QUERY_ORDERBY, false);
         caps.setFunctionSupport("convert", true); //$NON-NLS-1$
         caps.setFunctionSupport("/", true); //$NON-NLS-1$
@@ -745,6 +728,114 @@
                                         0,      // Sort
                                         0       // UnionAll
                                     });
-    } 
+    }
+    
+    @Test public void testPushDownOverUnion() throws Exception {
+        FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
+        BasicSourceCapabilities caps = getAggregateCapabilities();
+        capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
         
+        ProcessorPlan plan = TestOptimizer.helpPlan("select e1, max(e2) from (select e1, e2 from pm1.g1 union all select e1, e2 from pm1.g2) y group by e1", FakeMetadataFactory.example1Cached(), null, capFinder,  //$NON-NLS-1$
+            new String[]{"SELECT v_0.c_0, MAX(v_0.c_1) FROM (SELECT g_0.e1 AS c_0, g_0.e2 AS c_1 FROM pm1.g1 AS g_0) AS v_0 GROUP BY v_0.c_0", //$NON-NLS-1$
+        	"SELECT v_0.c_0, MAX(v_0.c_1) FROM (SELECT g_0.e1 AS c_0, g_0.e2 AS c_1 FROM pm1.g2 AS g_0) AS v_0 GROUP BY v_0.c_0"}, ComparisonMode.EXACT_COMMAND_STRING); //$NON-NLS-1$
+        TestOptimizer.checkNodeTypes(plan, new int[] {
+            2,      // Access
+            0,      // DependentAccess
+            0,      // DependentSelect
+            0,      // DependentProject
+            0,      // DupRemove
+            1,      // Grouping
+            0,      // NestedLoopJoinStrategy
+            0,      // MergeJoinStrategy
+            0,      // Null
+            0,      // PlanExecution
+            1,      // Project
+            0,      // Select
+            0,      // Sort
+            1       // UnionAll
+        }); 
+    }
+    
+    @Test public void testPushDownOverUnion1() throws Exception {
+        FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
+        BasicSourceCapabilities caps = getAggregateCapabilities();
+        capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
+        
+        ProcessorPlan plan = TestOptimizer.helpPlan("select max(e2) from (select e1, e2 from pm1.g1 union all select e1, e2 from pm1.g2) z", FakeMetadataFactory.example1Cached(), null, capFinder,  //$NON-NLS-1$
+            new String[]{"SELECT MAX(v_0.c_0) FROM (SELECT g_0.e2 AS c_0 FROM pm1.g2 AS g_0) AS v_0 HAVING COUNT(*) > 0", //$NON-NLS-1$
+        	"SELECT MAX(v_0.c_0) FROM (SELECT g_0.e2 AS c_0 FROM pm1.g1 AS g_0) AS v_0 HAVING COUNT(*) > 0"}, ComparisonMode.EXACT_COMMAND_STRING); //$NON-NLS-1$
+        TestOptimizer.checkNodeTypes(plan, new int[] {
+            2,      // Access
+            0,      // DependentAccess
+            0,      // DependentSelect
+            0,      // DependentProject
+            0,      // DupRemove
+            1,      // Grouping
+            0,      // NestedLoopJoinStrategy
+            0,      // MergeJoinStrategy
+            0,      // Null
+            0,      // PlanExecution
+            1,      // Project
+            0,      // Select
+            0,      // Sort
+            1       // UnionAll
+        }); 
+    }
+    
+    /**
+     * We won't do the pushdown here since the aggregate depends upon the cardinality
+     */
+    @Test public void testPushDownOverUnion2() throws Exception {
+        FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
+        BasicSourceCapabilities caps = getAggregateCapabilities();
+        capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
+        
+        ProcessorPlan plan = TestOptimizer.helpPlan("select count(e2) from (select e1, e2 from pm1.g1 union select e1, e2 from pm1.g2) z", FakeMetadataFactory.example1Cached(), null, capFinder,  //$NON-NLS-1$
+            new String[]{"SELECT DISTINCT g_0.e1, g_0.e2 FROM pm1.g2 AS g_0", //$NON-NLS-1$
+        	"SELECT DISTINCT g_0.e1, g_0.e2 FROM pm1.g1 AS g_0"}, ComparisonMode.EXACT_COMMAND_STRING); //$NON-NLS-1$
+        TestOptimizer.checkNodeTypes(plan, new int[] {
+            2,      // Access
+            0,      // DependentAccess
+            0,      // DependentSelect
+            0,      // DependentProject
+            1,      // DupRemove
+            1,      // Grouping
+            0,      // NestedLoopJoinStrategy
+            0,      // MergeJoinStrategy
+            0,      // Null
+            0,      // PlanExecution
+            1,      // Project
+            0,      // Select
+            0,      // Sort
+            1       // UnionAll
+        }); 
+    }
+    
+    @Test public void testPushDownOverUnionMixed() throws Exception {
+        FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
+        BasicSourceCapabilities caps = getAggregateCapabilities();
+        capFinder.addCapabilities("pm1", caps); //$NON-NLS-1$
+        capFinder.addCapabilities("pm2", TestOptimizer.getTypicalCapabilities()); //$NON-NLS-1$
+        
+        ProcessorPlan plan = TestOptimizer.helpPlan("select max(e2) from (select e1, e2 from pm1.g1 union all select e1, e2 from pm2.g2) z", FakeMetadataFactory.example1Cached(), null, capFinder,  //$NON-NLS-1$
+            new String[]{"SELECT MAX(v_0.c_0) FROM (SELECT g_0.e2 AS c_0 FROM pm1.g1 AS g_0) AS v_0 HAVING COUNT(*) > 0", //$NON-NLS-1$
+        	"SELECT g_0.e2 FROM pm2.g2 AS g_0"}, ComparisonMode.EXACT_COMMAND_STRING); //$NON-NLS-1$
+        TestOptimizer.checkNodeTypes(plan, new int[] {
+            2,      // Access
+            0,      // DependentAccess
+            0,      // DependentSelect
+            0,      // DependentProject
+            0,      // DupRemove
+            1,      // Grouping
+            0,      // NestedLoopJoinStrategy
+            0,      // MergeJoinStrategy
+            0,      // Null
+            0,      // PlanExecution
+            2,      // Project
+            0,      // Select
+            0,      // Sort
+            1       // UnionAll
+        }); 
+    }
+        
 }

Added: trunk/engine/src/test/java/com/metamatrix/query/processor/TestAggregateProcessing.java
===================================================================
--- trunk/engine/src/test/java/com/metamatrix/query/processor/TestAggregateProcessing.java	                        (rev 0)
+++ trunk/engine/src/test/java/com/metamatrix/query/processor/TestAggregateProcessing.java	2009-07-21 22:53:49 UTC (rev 1162)
@@ -0,0 +1,273 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * See the COPYRIGHT.txt file distributed with this work for information
+ * regarding copyright ownership.  Some portions may be licensed
+ * to Red Hat, Inc. under one or more contributor license agreements.
+ * 
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ * 
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ * 
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA.
+ */
+
+package com.metamatrix.query.processor;
+
+import static com.metamatrix.query.processor.TestProcessor.*;
+
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.junit.Test;
+
+import com.metamatrix.query.optimizer.TestAggregatePushdown;
+import com.metamatrix.query.optimizer.TestOptimizer;
+import com.metamatrix.query.optimizer.capabilities.FakeCapabilitiesFinder;
+import com.metamatrix.query.sql.lang.Command;
+import com.metamatrix.query.unittest.FakeMetadataFacade;
+import com.metamatrix.query.unittest.FakeMetadataFactory;
+import com.metamatrix.query.unittest.FakeMetadataObject;
+
+public class TestAggregateProcessing {
+
+	static void sampleDataBQT3(FakeDataManager dataMgr) throws Exception {
+		FakeMetadataFacade metadata = FakeMetadataFactory.exampleBQTCached();
+
+		// Group bqt1.smalla
+		FakeMetadataObject groupID = (FakeMetadataObject) metadata
+				.getGroupID("bqt1.smalla"); //$NON-NLS-1$
+		List elementIDs = metadata.getElementIDsInGroupID(groupID);
+		List elementSymbols = FakeDataStore.createElements(elementIDs);
+
+		List[] tuples = new List[20];
+		for (int i = 0; i < tuples.length; i++) {
+			tuples[i] = new ArrayList(17);
+			tuples[i].add(new Integer(i));
+			tuples[i].add("" + i); //$NON-NLS-1$
+			tuples[i].add(new Integer(i + 1));
+			for (int j = 0; j < 14; j++) {
+				tuples[i].add(null);
+			}
+		}
+
+		dataMgr.registerTuples(groupID, elementSymbols, tuples);
+
+		// Group bqt2.mediumb
+		groupID = (FakeMetadataObject) metadata.getGroupID("bqt2.mediumb"); //$NON-NLS-1$
+		elementIDs = metadata.getElementIDsInGroupID(groupID);
+		elementSymbols = FakeDataStore.createElements(elementIDs);
+
+		tuples = new List[20];
+		for (int i = 0; i < tuples.length; i++) {
+			tuples[i] = new ArrayList(17);
+			tuples[i].add(new Integer(i));
+			for (int j = 0; j < 16; j++) {
+				tuples[i].add(null);
+			}
+		}
+
+		dataMgr.registerTuples(groupID, elementSymbols, tuples);
+	}
+
+	private void sampleDataBQT_defect9842(FakeDataManager dataMgr) throws Exception {
+		FakeMetadataFacade metadata = FakeMetadataFactory.exampleBQTCached();
+
+		// Group bqt1.smalla
+		FakeMetadataObject groupID = (FakeMetadataObject) metadata
+				.getGroupID("bqt1.smalla"); //$NON-NLS-1$
+		List elementIDs = metadata.getElementIDsInGroupID(groupID);
+		List elementSymbols = FakeDataStore.createElements(elementIDs);
+
+		List[] tuples = new List[5];
+		for (int i = 0; i < tuples.length; i++) {
+			int k = i + 10;
+			tuples[i] = new ArrayList(17);
+			if (i < 2) {
+				tuples[i].add(new Integer(1)); // need duplicate values
+			} else {
+				tuples[i].add(new Integer(2)); // need duplicate values
+			}
+			tuples[i].add("" + k); //$NON-NLS-1$
+			tuples[i].add(new Integer(k + 1));
+			tuples[i].add("" + (k + 1)); //$NON-NLS-1$
+			tuples[i].add(new Float(0.5));
+			for (int j = 0; j < 8; j++) {
+				tuples[i].add(null);
+			}
+			tuples[i].add(new Short((short) k));
+			tuples[i].add(null);
+			tuples[i].add(new BigDecimal("" + k)); //$NON-NLS-1$
+			tuples[i].add(null);
+		}
+
+		dataMgr.registerTuples(groupID, elementSymbols, tuples);
+	}
+
+	@Test public void testAggregateOnBQT() throws Exception {
+		// Create query
+		String sql = "SELECT IntKey, SUM(IntNum) FROM BQT1.SmallA GROUP BY IntKey, IntNum HAVING IntNum > 10 ORDER BY IntKey"; //$NON-NLS-1$
+
+		// Create expected results
+		List[] expected = new List[] {
+				Arrays.asList(new Object[] { new Integer(10), new Long(11) }),
+				Arrays.asList(new Object[] { new Integer(11), new Long(12) }),
+				Arrays.asList(new Object[] { new Integer(12), new Long(13) }),
+				Arrays.asList(new Object[] { new Integer(13), new Long(14) }),
+				Arrays.asList(new Object[] { new Integer(14), new Long(15) }),
+				Arrays.asList(new Object[] { new Integer(15), new Long(16) }),
+				Arrays.asList(new Object[] { new Integer(16), new Long(17) }),
+				Arrays.asList(new Object[] { new Integer(17), new Long(18) }),
+				Arrays.asList(new Object[] { new Integer(18), new Long(19) }),
+				Arrays.asList(new Object[] { new Integer(19), new Long(20) }) };
+
+		// Construct data manager with data
+		FakeDataManager dataManager = new FakeDataManager();
+		sampleDataBQT3(dataManager);
+
+		// Plan query
+		ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory
+				.exampleBQTCached());
+
+		// Run query
+		helpProcess(plan, dataManager, expected);
+	}
+
+	@Test public void testAggregateOnBQT2() throws Exception {
+		// Create query
+		String sql = "SELECT IntNum, IsNotNull FROM (SELECT IntNum, LongNum, COUNT(IntNum) AS IsNotNull FROM BQT1.SmallA GROUP BY IntNum, LongNum HAVING LongNum IS NULL ) AS x ORDER BY IntNum, IsNotNull"; //$NON-NLS-1$
+
+		// Create expected results
+		List[] expected = new List[] {
+				Arrays.asList(new Object[] { new Integer(1), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(2), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(3), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(4), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(5), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(6), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(7), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(8), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(9), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(10), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(11), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(12), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(13), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(14), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(15), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(16), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(17), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(18), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(19), new Integer(1) }),
+				Arrays.asList(new Object[] { new Integer(20), new Integer(1) }) };
+
+		// Construct data manager with data
+		FakeDataManager dataManager = new FakeDataManager();
+		sampleDataBQT3(dataManager);
+
+		// Plan query
+		ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory
+				.exampleBQTCached());
+
+		// Run query
+		helpProcess(plan, dataManager, expected);
+	}
+
+	@Test public void testAggregateOnBQT_defect9842() throws Exception {
+		// Create query
+		String sql = "SELECT IntKey, SUM((BigDecimalValue)*(ShortValue)-(BigDecimalValue)*(ShortValue)*(FloatNum)) " + //$NON-NLS-1$
+				"AS MySum FROM BQT1.SmallA GROUP BY IntKey ORDER BY IntKey"; //$NON-NLS-1$
+
+		// Create expected results
+		List[] expected = new List[] {
+				Arrays.asList(new Object[] { new Integer(1),
+						new BigDecimal("110.5") }), //$NON-NLS-1$
+				Arrays.asList(new Object[] { new Integer(2),
+						new BigDecimal("254.5") }) //$NON-NLS-1$
+		};
+
+		// Construct data manager with data
+		FakeDataManager dataManager = new FakeDataManager();
+		sampleDataBQT_defect9842(dataManager);
+
+		// Plan query
+		ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory
+				.exampleBQTCached());
+
+		// Run query
+		helpProcess(plan, dataManager, expected);
+	}
+	
+    @Test public void testCase186260() {
+        /*
+         * This case revealed that an expression like "COUNT( DISTINCT e1 )", where the type of e1 is 
+         * anything but integer, was not handled properly.  We tried to use "integer" (the type of the
+         * COUNT expression) to work with the e1 tuples.
+         */
+        // Create query 
+        String sql = "SELECT COUNT(DISTINCT pm1.g2.e1), COUNT(DISTINCT pm1.g3.e1) FROM pm1.g2, pm1.g3"; //$NON-NLS-1$
+        
+        // Create expected results
+        List[] expected = new List[] { 
+            Arrays.asList(new Object[] { new Integer(3), new Integer(3) }),
+        };    
+    
+        // Construct data manager with data
+        FakeDataManager dataManager = new FakeDataManager();
+        sampleData1(dataManager);
+        
+        // Plan query
+        ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory.example1Cached());
+        
+        // Run query
+        helpProcess(plan, dataManager, expected);
+    }
+    
+    @SuppressWarnings("unchecked")
+	@Test public void testAggregatePushdown() {
+    	Command command = helpParse("select e1, count(e2), max(e2) from (select e1, e2, e3 from pm1.g1 union all select e1, e2, e3 from pm1.g2 union all select e1, e2, e3 from pm2.g1) z group by e1"); //$NON-NLS-1$
+    	
+    	FakeCapabilitiesFinder capFinder = new FakeCapabilitiesFinder();
+    	capFinder.addCapabilities("pm1", TestAggregatePushdown.getAggregateCapabilities()); //$NON-NLS-1$
+    	capFinder.addCapabilities("pm2", TestOptimizer.getTypicalCapabilities()); //$NON-NLS-1$
+    	HardcodedDataManager dataManager = new HardcodedDataManager();
+    	
+    	dataManager.addData("SELECT v_0.c_0, COUNT(v_0.c_1), MAX(v_0.c_1) FROM (SELECT g_0.e1 AS c_0, g_0.e2 AS c_1 FROM pm1.g1 AS g_0) AS v_0 GROUP BY v_0.c_0", //$NON-NLS-1$ 
+    			new List[] {
+    				Arrays.asList("a", Integer.valueOf(2), Boolean.TRUE), //$NON-NLS-1$
+    				Arrays.asList("b", null, null) //$NON-NLS-1$
+    			});
+    	dataManager.addData("SELECT v_0.c_0, COUNT(v_0.c_1), MAX(v_0.c_1) FROM (SELECT g_0.e1 AS c_0, g_0.e2 AS c_1 FROM pm1.g2 AS g_0) AS v_0 GROUP BY v_0.c_0", //$NON-NLS-1$ 
+    			new List[] {
+    				Arrays.asList("a", Integer.valueOf(3), Boolean.FALSE), //$NON-NLS-1$
+    				Arrays.asList("b", null, null) //$NON-NLS-1$
+    			});
+    	dataManager.addData("SELECT g_0.e1, g_0.e2 FROM pm2.g1 AS g_0", //$NON-NLS-1$ 
+    			new List[] {
+    				Arrays.asList("a", Boolean.FALSE), //$NON-NLS-1$
+    				Arrays.asList("xyz", Boolean.FALSE), //$NON-NLS-1$
+    				Arrays.asList(null, Boolean.TRUE),
+    			});
+    	
+    	ProcessorPlan plan = helpGetPlan(command, FakeMetadataFactory.example1Cached(), capFinder);
+    	
+    	List[] expected = new List[] { 
+                Arrays.asList(null, Integer.valueOf(1), Boolean.TRUE),
+                Arrays.asList("a", Integer.valueOf(6), Boolean.TRUE), //$NON-NLS-1$
+                Arrays.asList("b", null, null), //$NON-NLS-1$
+                Arrays.asList("xyz", Integer.valueOf(1), Boolean.FALSE) //$NON-NLS-1$
+            };    
+    	
+    	helpProcess(plan, dataManager, expected);
+    }
+
+}


Property changes on: trunk/engine/src/test/java/com/metamatrix/query/processor/TestAggregateProcessing.java
___________________________________________________________________
Name: svn:mime-type
   + text/plain

Modified: trunk/engine/src/test/java/com/metamatrix/query/processor/TestProcessor.java
===================================================================
--- trunk/engine/src/test/java/com/metamatrix/query/processor/TestProcessor.java	2009-07-21 22:51:14 UTC (rev 1161)
+++ trunk/engine/src/test/java/com/metamatrix/query/processor/TestProcessor.java	2009-07-21 22:53:49 UTC (rev 1162)
@@ -622,90 +622,7 @@
             fail("Exception building test data (" + e.getClass().getName() + "): " + e.getMessage());    //$NON-NLS-1$ //$NON-NLS-2$
         }
     }
-
-    private void sampleDataBQT3(FakeDataManager dataMgr) {
-        FakeMetadataFacade metadata = FakeMetadataFactory.exampleBQTCached();
     
-        try { 
-            // Group bqt1.smalla
-            FakeMetadataObject groupID = (FakeMetadataObject) metadata.getGroupID("bqt1.smalla"); //$NON-NLS-1$
-            List elementIDs = metadata.getElementIDsInGroupID(groupID);
-            List elementSymbols = FakeDataStore.createElements(elementIDs);
-        
-            List[] tuples = new List[20];
-            for(int i=0; i<tuples.length; i++) {
-                tuples[i] = new ArrayList(17);
-                tuples[i].add(new Integer(i));
-                tuples[i].add("" + i); //$NON-NLS-1$
-                tuples[i].add(new Integer(i+1));
-                for(int j=0; j<14; j++) {
-                    tuples[i].add(null);    
-                }    
-            }
-        
-            dataMgr.registerTuples(groupID, elementSymbols, tuples);
-
-            // Group bqt2.mediumb
-            groupID = (FakeMetadataObject) metadata.getGroupID("bqt2.mediumb"); //$NON-NLS-1$
-            elementIDs = metadata.getElementIDsInGroupID(groupID);
-            elementSymbols = FakeDataStore.createElements(elementIDs);
-        
-            tuples = new List[20];
-            for(int i=0; i<tuples.length; i++) {
-                tuples[i] = new ArrayList(17);
-                tuples[i].add(new Integer(i));
-                for(int j=0; j<16; j++) {
-                    tuples[i].add(null);    
-                }    
-            }
-        
-            dataMgr.registerTuples(groupID, elementSymbols, tuples);
-
-        }catch(Throwable e) { 
-            e.printStackTrace();
-            fail("Exception building test data (" + e.getClass().getName() + "): " + e.getMessage());    //$NON-NLS-1$ //$NON-NLS-2$
-        }
-    }   
-    
-    private void sampleDataBQT_defect9842(FakeDataManager dataMgr) {
-        FakeMetadataFacade metadata = FakeMetadataFactory.exampleBQTCached();
-    
-        try { 
-            // Group bqt1.smalla
-            FakeMetadataObject groupID = (FakeMetadataObject) metadata.getGroupID("bqt1.smalla"); //$NON-NLS-1$
-            List elementIDs = metadata.getElementIDsInGroupID(groupID);
-            List elementSymbols = FakeDataStore.createElements(elementIDs);
-        
-            List[] tuples = new List[5];
-            for(int i=0; i<tuples.length; i++) {
-                int k = i + 10;
-                tuples[i] = new ArrayList(17);
-                if (i<2){
-                    tuples[i].add(new Integer(1)); //need duplicate values
-                } else {
-                    tuples[i].add(new Integer(2)); //need duplicate values
-                }
-                tuples[i].add("" + k); //$NON-NLS-1$
-                tuples[i].add(new Integer(k+1));
-                tuples[i].add("" + (k+1)); //$NON-NLS-1$
-                tuples[i].add(new Float(0.5));
-                for(int j=0; j<8; j++) {
-                    tuples[i].add(null);    
-                }    
-                tuples[i].add(new Short((short)k));
-                tuples[i].add(null);
-                tuples[i].add(new BigDecimal("" + k)); //$NON-NLS-1$
-                tuples[i].add(null);
-            }
-        
-            dataMgr.registerTuples(groupID, elementSymbols, tuples);
-
-        }catch(Throwable e) { 
-            e.printStackTrace();
-            fail("Exception building test data (" + e.getClass().getName() + "): " + e.getMessage());    //$NON-NLS-1$ //$NON-NLS-2$
-        }
-    }
-    
     /**
      * Just want to register two rows of all the integral types to test AVG 
      * @param dataMgr
@@ -4288,97 +4205,7 @@
        // Run query
        helpProcess(plan, dataManager, expected);
    }
-
-   // SELECT IntKey, SUM(IntNum) FROM BQT1.SmallA GROUP BY IntKey, IntNum HAVING IntNum > 10 ORDER BY IntKey
-   @Test public void testAggregateOnBQT() {
-       // Create query
-       String sql = "SELECT IntKey, SUM(IntNum) FROM BQT1.SmallA GROUP BY IntKey, IntNum HAVING IntNum > 10 ORDER BY IntKey"; //$NON-NLS-1$
-
-       // Create expected results
-       List[] expected = new List[] {
-           Arrays.asList(new Object[] { new Integer(10), new Long(11) }),
-           Arrays.asList(new Object[] { new Integer(11), new Long(12) }),
-           Arrays.asList(new Object[] { new Integer(12), new Long(13) }),
-           Arrays.asList(new Object[] { new Integer(13), new Long(14) }),
-           Arrays.asList(new Object[] { new Integer(14), new Long(15) }),
-           Arrays.asList(new Object[] { new Integer(15), new Long(16) }),
-           Arrays.asList(new Object[] { new Integer(16), new Long(17) }),
-           Arrays.asList(new Object[] { new Integer(17), new Long(18) }),
-           Arrays.asList(new Object[] { new Integer(18), new Long(19) }),
-           Arrays.asList(new Object[] { new Integer(19), new Long(20) })       };
-
-       // Construct data manager with data
-       FakeDataManager dataManager = new FakeDataManager();
-       sampleDataBQT3(dataManager);
-
-       // Plan query
-       ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory.exampleBQTCached());
-
-       // Run query
-       helpProcess(plan, dataManager, expected);
-   }   
    
-   // SELECT IntNum, IsNotNull FROM (SELECT IntNum, LongNum, COUNT(IntNum) AS IsNotNull FROM BQT2.SmallB GROUP BY IntNum, LongNum HAVING LongNum >= 0 ) AS x ORDER BY IntNum, IsNotNull
-   @Test public void testAggregateOnBQT2() {
-       // Create query
-       String sql = "SELECT IntNum, IsNotNull FROM (SELECT IntNum, LongNum, COUNT(IntNum) AS IsNotNull FROM BQT1.SmallA GROUP BY IntNum, LongNum HAVING LongNum IS NULL ) AS x ORDER BY IntNum, IsNotNull"; //$NON-NLS-1$
-
-       // Create expected results
-       List[] expected = new List[] {
-           Arrays.asList(new Object[] { new Integer(1), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(2), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(3), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(4), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(5), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(6), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(7), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(8), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(9), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(10), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(11), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(12), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(13), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(14), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(15), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(16), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(17), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(18), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(19), new Integer(1) }),
-           Arrays.asList(new Object[] { new Integer(20), new Integer(1) })      };
-
-       // Construct data manager with data
-       FakeDataManager dataManager = new FakeDataManager();
-       sampleDataBQT3(dataManager);
-
-       // Plan query
-       ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory.exampleBQTCached());
-
-       // Run query
-       helpProcess(plan, dataManager, expected);
-   }  
-
-   @Test public void testAggregateOnBQT_defect9842(){
-       // Create query
-       String sql = "SELECT IntKey, SUM((BigDecimalValue)*(ShortValue)-(BigDecimalValue)*(ShortValue)*(FloatNum)) " + //$NON-NLS-1$
-                    "AS MySum FROM BQT1.SmallA GROUP BY IntKey ORDER BY IntKey"; //$NON-NLS-1$
-
-       // Create expected results
-       List[] expected = new List[] {
-           Arrays.asList(new Object[] { new Integer(1), new BigDecimal("110.5") }), //$NON-NLS-1$
-           Arrays.asList(new Object[] { new Integer(2), new BigDecimal("254.5") }) //$NON-NLS-1$
-                         };
-
-       // Construct data manager with data
-       FakeDataManager dataManager = new FakeDataManager();
-       sampleDataBQT_defect9842(dataManager);
-
-       // Plan query
-       ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory.exampleBQTCached());
-
-       // Run query
-       helpProcess(plan, dataManager, expected);        
-   }
-   
    @Test public void testSelectWithNoFrom() { 
        // Create query 
        String sql = "SELECT 5"; //$NON-NLS-1$
@@ -6075,7 +5902,7 @@
         
         // Construct data manager with data
         FakeDataManager dataManager = new FakeDataManager();
-        sampleDataBQT3(dataManager);
+        sampleDataBQT1(dataManager);
 
         // Run query
         helpProcess(plan, dataManager, expectedResults);        
@@ -7656,33 +7483,7 @@
         //we expect 2 queries, 1 for the outer and 1 for the subquery
         assertEquals(2, dataManager.getCommandHistory().size());
     }
-
-
-    @Test public void testCase186260() {
-        /*
-         * This case revealed that an expression like "COUNT( DISTINCT e1 )", where the type of e1 is 
-         * anything but integer, was not handled properly.  We tried to use "integer" (the type of the
-         * COUNT expression) to work with the e1 tuples.
-         */
-        // Create query 
-        String sql = "SELECT COUNT(DISTINCT pm1.g2.e1), COUNT(DISTINCT pm1.g3.e1) FROM pm1.g2, pm1.g3"; //$NON-NLS-1$
-        
-        // Create expected results
-        List[] expected = new List[] { 
-            Arrays.asList(new Object[] { new Integer(3), new Integer(3) }),
-        };    
     
-        // Construct data manager with data
-        FakeDataManager dataManager = new FakeDataManager();
-        sampleData1(dataManager);
-        
-        // Plan query
-        ProcessorPlan plan = helpGetPlan(sql, FakeMetadataFactory.example1Cached());
-        
-        // Run query
-        helpProcess(plan, dataManager, expected);
-    }
-    
     @Test public void testOrderByOutsideOfSelect() {
         // Create query 
         String sql = "SELECT e1 FROM (select e1, e2 || e3 as e2 from pm1.g2) x order by e2"; //$NON-NLS-1$



More information about the teiid-commits mailing list