teiid SVN: r1355 - in trunk/test-integration/db/src/main/resources/datasources: postgres and 1 other directory.
by teiid-commits@lists.jboss.org
Author: vhalbert(a)redhat.com
Date: 2009-09-14 23:49:18 -0400 (Mon, 14 Sep 2009)
New Revision: 1355
Added:
trunk/test-integration/db/src/main/resources/datasources/postgres/
trunk/test-integration/db/src/main/resources/datasources/postgres/example_connection.properties
Log:
Teiid 773 - adding postgres eample datasource
Added: trunk/test-integration/db/src/main/resources/datasources/postgres/example_connection.properties
===================================================================
--- trunk/test-integration/db/src/main/resources/datasources/postgres/example_connection.properties (rev 0)
+++ trunk/test-integration/db/src/main/resources/datasources/postgres/example_connection.properties 2009-09-15 03:49:18 UTC (rev 1355)
@@ -0,0 +1,17 @@
+db.type=postgres
+driver=org.postgresql.Driver
+URL=jdbc:postgresql://(servername):5432/(databasename)
+User=
+Password=
+
+servername=(servername)
+databasename=(databasename)
+portnumber=1527
+ds-jndiname=(servername)_1527
+
+Immutable=true
+
+
+
+
+
Property changes on: trunk/test-integration/db/src/main/resources/datasources/postgres/example_connection.properties
___________________________________________________________________
Name: svn:mime-type
+ text/plain
16 years, 7 months
teiid SVN: r1354 - trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource.
by teiid-commits@lists.jboss.org
Author: vhalbert(a)redhat.com
Date: 2009-09-14 23:47:00 -0400 (Mon, 14 Sep 2009)
New Revision: 1354
Added:
trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DataSourceMgr.java
Log:
Teiid 773 - organize integration test - changes to support multiple datasources
Added: trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DataSourceMgr.java
===================================================================
--- trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DataSourceMgr.java (rev 0)
+++ trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DataSourceMgr.java 2009-09-15 03:47:00 UTC (rev 1354)
@@ -0,0 +1,290 @@
+/*
+ * Copyright (c) 2000-2007 MetaMatrix, Inc.
+ * All rights reserved.
+ */
+package org.teiid.test.framework.datasource;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.teiid.test.framework.exception.QueryTestFailedException;
+import org.teiid.test.framework.exception.TransactionRuntimeException;
+
+import com.metamatrix.common.xml.XMLReaderWriter;
+import com.metamatrix.common.xml.XMLReaderWriterImpl;
+
+/**
+ * The DataSourceMgr is responsible for loading and managing the datasource
+ * mapping properties file {@see #DATASOURCE_MAPPING_FILE} and the mapped
+ * datasource properties files. The {@link #getDatasourceProperties(String)}
+ * returns the properties defined for that datasourceid, which is mapped in the
+ * mnappings file. This mapping allows the test
+ *
+ * @author vanhalbert
+ *
+ */
+public class DataSourceMgr {
+
+ static final String DIRECTORY = "datasources/";
+ static final String DATASOURCE_MAPPING_FILE = "datasource_mapping.xml";
+
+ private static DataSourceMgr _instance = null;
+
+ private Map<String, Map<String, DataSource>>dstypeMap = new HashMap<String, Map<String, DataSource>>(); //key=datasourcetype
+
+ private Map<String, DataSource> allDatasourcesMap = new HashMap<String, DataSource>(); // key=datasource name
+
+ private Map<String, DataSource> modelToDatasourceMap = new HashMap<String, DataSource>(); // key=modelname
+
+ private Set<String> usedDataSources = new HashSet<String>();
+
+
+ private DataSourceMgr() {
+ }
+
+ public static synchronized DataSourceMgr getInstance() {
+ if (_instance == null) {
+ _instance = new DataSourceMgr();
+ try {
+ _instance.loadDataSourceMappings();
+ } catch (QueryTestFailedException e) {
+ // TODO Auto-generated catch block
+ throw new TransactionRuntimeException(e);
+ } catch (TransactionRuntimeException e) {
+ // TODO Auto-generated catch block
+ throw e;
+ }
+
+ }
+ return _instance;
+ }
+
+ public int numberOfAvailDataSources() {
+ return allDatasourcesMap.size();
+ }
+
+ public DataSource getDatasource(String datasourceid, String modelName)
+ throws QueryTestFailedException {
+ DataSource ds = null;
+
+ // map the datasource to the model and datasourceid
+ // this is so the next time this combination is requested,
+ // the same datasource is returned to ensure when consecutive calls during the process
+ // corresponds to the same datasource
+ String key = modelName + "_"+datasourceid;
+
+ if (modelToDatasourceMap.containsKey(key)) {
+ return modelToDatasourceMap.get(key);
+ }
+ if (dstypeMap.containsKey(datasourceid)) {
+
+ Map datasources = dstypeMap.get(datasourceid);
+ Iterator<DataSource> it= datasources.values().iterator();
+ while(it.hasNext()) {
+ DataSource checkit = it.next();
+ if (!usedDataSources.contains(checkit.getName())) {
+ usedDataSources.add(checkit.getName());
+ ds = checkit;
+ break;
+ }
+ }
+
+ } else {
+ ds = allDatasourcesMap.get(datasourceid);
+ }
+ if (ds == null) {
+ throw new QueryTestFailedException("DatasourceID " + datasourceid
+ + " is not a defined datasource in the mappings file ");
+
+ }
+
+ modelToDatasourceMap.put(key, ds);
+ return ds;
+
+ }
+
+ public Properties getDatasourceProperties(String datasourceid, String modelname)
+ throws QueryTestFailedException {
+ DataSource ds = getDatasource(datasourceid, modelname);
+
+ return ds.getProperties();
+
+ }
+
+ private void loadDataSourceMappings()
+ throws QueryTestFailedException {
+
+ Document doc = null;
+ XMLReaderWriter readerWriter = new XMLReaderWriterImpl();
+
+ try {
+ doc = readerWriter.readDocument(getInputStream());
+ } catch (JDOMException e) {
+ e.printStackTrace();
+ throw new TransactionRuntimeException(e);
+ } catch (IOException e) {
+ e.printStackTrace();
+ throw new TransactionRuntimeException(e);
+ }
+
+ Element root = doc.getRootElement();
+ List<Element> rootElements = root.getChildren();
+ if (rootElements == null || rootElements.size() == 0) {
+ throw new TransactionRuntimeException("No children defined under root element " + DSCONFIG);
+ }
+
+ for (Iterator<Element> it = rootElements.iterator(); it.hasNext();) {
+ Element type = it.next();
+// System.out.println("Loading ds transactional type " + type.getName());
+ String typename = type.getAttributeValue(Property.Attributes.NAME);
+
+ List<Element> typeElements = type.getChildren();
+ if (typeElements != null) {
+ Map<String, DataSource> datasources = new HashMap<String, DataSource>(typeElements.size());
+
+ for (Iterator<Element> typeit = typeElements.iterator(); typeit.hasNext();) {
+ Element e = typeit.next();
+// System.out.println("Loading ds type " + e.getName());
+ addDataSource(e, typename, datasources);
+ }
+ dstypeMap.put(typename, datasources);
+ allDatasourcesMap.putAll(datasources);
+
+ }
+
+
+ }
+
+ if (dstypeMap == null || dstypeMap.isEmpty()) {
+ throw new TransactionRuntimeException(
+ "No Datasources were found in the mappings file");
+ }
+
+ System.out.println("Number of datasource types loaded " + dstypeMap.size());
+ System.out.println("Number of total datasource mappings loaded " + allDatasourcesMap.size());
+
+
+
+ }
+
+ private static void addDataSource(Element element, String type, Map<String, DataSource> datasources) {
+ String name = element.getAttributeValue(Property.Attributes.NAME);
+ Properties props = getProperties(element);
+
+ String dir = props.getProperty(DataSource.DIRECTORY);
+ String dsfile = DIRECTORY + dir + "/connection.properties";
+ Properties dsprops = loadProperties(dsfile);
+ if (dsprops != null) {
+ props.putAll(dsprops);
+ DataSource ds = new DataSource(name,
+ type,
+ props);
+ datasources.put(ds.getName(), ds);
+ System.out.println("Loaded datasource " + ds.getName());
+
+ }
+
+ }
+
+
+ private static Properties loadProperties(String filename) {
+ Properties props = null;
+
+ try {
+ InputStream in = DataSourceMgr.class.getResourceAsStream("/"
+ + filename);
+ if (in != null) {
+ props = new Properties();
+ props.load(in);
+ return props;
+ }
+ return null;
+ } catch (IOException e) {
+ throw new TransactionRuntimeException("Error loading properties from file '"
+ + filename + "'" + e.getMessage());
+ }
+ }
+
+ private static Properties getProperties(Element propertiesElement) {
+ Properties props = new Properties();
+
+ List<Element> properties = propertiesElement
+ .getChildren(Property.ELEMENT);
+ Iterator<Element> iterator = properties.iterator();
+ while (iterator.hasNext()) {
+ Element propertyElement = (Element) iterator.next();
+ String propertyName = propertyElement
+ .getAttributeValue(Property.Attributes.NAME);
+ String propertyValue = propertyElement.getText();
+
+ props.setProperty(propertyName, propertyValue);
+
+ }
+ return props;
+ }
+
+ private static InputStream getInputStream() {
+
+ InputStream in = DataSourceMgr.class.getResourceAsStream("/"
+ + DIRECTORY + DATASOURCE_MAPPING_FILE);
+ if (in != null) {
+
+ return in;
+ } else {
+ throw new RuntimeException(
+ "Failed to load datasource mapping file '" + DIRECTORY
+ + DATASOURCE_MAPPING_FILE + "'");
+ }
+
+ }
+
+ static final String DSCONFIG = "datasourceconfig";
+ static final String DATASOURCETYPE = "datasourcetype";
+ static final String DATASOURCE = "datasource";
+
+ static class Property {
+
+ /**
+ * This is the name of the Property Element.
+ */
+ public static final String ELEMENT = "property"; //$NON-NLS-1$
+
+ /**
+ * This class defines the Attributes of the Element class that contains
+ * it.
+ */
+ public static class Attributes {
+ public static final String NAME = "name"; //$NON-NLS-1$
+ }
+
+ }
+
+ public static void main(String[] args) {
+ DataSourceMgr mgr = DataSourceMgr.getInstance();
+
+ try {
+ DataSource ds1 = mgr.getDatasource("ds_mysql5", "model1");
+
+ DataSource ds2 = mgr.getDatasource("ds_mysql5", "model1");
+ if (ds1 != ds2) {
+ throw new RuntimeException("Datasources are not the same");
+ }
+ System.out.println("Value for ds_mysql5: "
+ + mgr.getDatasourceProperties("ds_mysql5", "model1"));
+ } catch (QueryTestFailedException e) {
+ e.printStackTrace();
+ }
+
+ }
+
+}
16 years, 7 months
teiid SVN: r1353 - in trunk/test-integration/db/src: main/java/org/teiid/test/framework/connection and 7 other directories.
by teiid-commits@lists.jboss.org
Author: vhalbert(a)redhat.com
Date: 2009-09-14 23:45:38 -0400 (Mon, 14 Sep 2009)
New Revision: 1353
Modified:
trunk/test-integration/db/src/main/java/org/teiid/test/framework/TransactionContainer.java
trunk/test-integration/db/src/main/java/org/teiid/test/framework/connection/ConnectionStrategy.java
trunk/test-integration/db/src/main/java/org/teiid/test/framework/connection/ConnectionUtil.java
trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DataSource.java
trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DatasourceMgr.java
trunk/test-integration/db/src/main/java/org/teiid/test/framework/transaction/LocalTransaction.java
trunk/test-integration/db/src/main/java/org/teiid/test/framework/transaction/TransactionFactory.java
trunk/test-integration/db/src/main/resources/datasources/readme.txt
trunk/test-integration/db/src/main/resources/default-config.properties
trunk/test-integration/db/src/test/java/org/teiid/test/framework/AbstractQueryTransactionTest.java
trunk/test-integration/db/src/test/java/org/teiid/test/framework/datasource/SingleDataSourceSetup.java
trunk/test-integration/db/src/test/java/org/teiid/test/framework/datasource/TwoDataSourceSetup.java
trunk/test-integration/db/src/test/java/org/teiid/test/testcases/TwoSourceTransactionTest.java
Log:
Teiid 773 - organize integration test - changes to support multiple datasources
Modified: trunk/test-integration/db/src/main/java/org/teiid/test/framework/TransactionContainer.java
===================================================================
--- trunk/test-integration/db/src/main/java/org/teiid/test/framework/TransactionContainer.java 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/main/java/org/teiid/test/framework/TransactionContainer.java 2009-09-15 03:45:38 UTC (rev 1353)
@@ -9,7 +9,7 @@
import javax.sql.XAConnection;
-import org.teiid.test.framework.datasource.DatasourceMgr;
+import org.teiid.test.framework.datasource.DataSourceMgr;
import org.teiid.test.framework.exception.QueryTestFailedException;
import org.teiid.test.framework.exception.TransactionRuntimeException;
import org.teiid.test.framework.connection.ConnectionStrategy;
@@ -19,7 +19,7 @@
public abstract class TransactionContainer {
- private boolean debug = false;
+ private boolean debug = true;
protected Properties props;
protected ConnectionStrategy connStrategy;
@@ -29,6 +29,7 @@
this.props = new Properties();
this.props.putAll(this.connStrategy.getEnvironment());
+
}
@@ -38,9 +39,9 @@
*
*/
protected boolean turnOffTest (int numberofDataSources) {
- boolean rtn = (numberofDataSources > DatasourceMgr.getInstance().numberOfAvailDataSources());
+ boolean rtn = (numberofDataSources > DataSourceMgr.getInstance().numberOfAvailDataSources());
if (rtn) {
- System.out.println("Required Number of DataSources is " + numberofDataSources + " but availables sources is " + DatasourceMgr.getInstance().numberOfAvailDataSources());
+ System.out.println("Required Number of DataSources is " + numberofDataSources + " but availables sources is " + DataSourceMgr.getInstance().numberOfAvailDataSources());
}
return rtn;
}
Modified: trunk/test-integration/db/src/main/java/org/teiid/test/framework/connection/ConnectionStrategy.java
===================================================================
--- trunk/test-integration/db/src/main/java/org/teiid/test/framework/connection/ConnectionStrategy.java 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/main/java/org/teiid/test/framework/connection/ConnectionStrategy.java 2009-09-15 03:45:38 UTC (rev 1353)
@@ -14,14 +14,12 @@
import javax.sql.XAConnection;
-import org.teiid.test.framework.datasource.DataSource;
-import org.teiid.test.framework.datasource.DatasourceMgr;
import org.teiid.adminapi.Admin;
import org.teiid.adminapi.AdminOptions;
-import org.teiid.adminapi.ConnectorBinding;
import org.teiid.adminapi.Model;
import org.teiid.adminapi.VDB;
import org.teiid.connector.jdbc.JDBCPropertyNames;
+import org.teiid.test.framework.datasource.DataSourceMgr;
import org.teiid.test.framework.exception.QueryTestFailedException;
import org.teiid.test.framework.exception.TransactionRuntimeException;
@@ -215,8 +213,7 @@
useName = mappedName;
}
- org.teiid.test.framework.datasource.DataSource ds = DatasourceMgr.getInstance().getDatasource(useName);
-// Properties sourceProps = DatasourceMgr.getInstance().getDatasourceProperties(useName);
+ org.teiid.test.framework.datasource.DataSource ds = DataSourceMgr.getInstance().getDatasource(useName, m.getName());
if (ds != null) {
@@ -233,34 +230,6 @@
}
-// Collection<ConnectorBinding> bindings = api.getConnectorBindingsInVDB("*");
-//
-// for (Iterator<ConnectorBinding> it=bindings.iterator(); it.hasNext();) {
-// ConnectorBinding cb = it.next();
-//
-// cb.
-//
-// String mappedName = this.env.getProperty(cb.getName());
-//
-// String useName = cb.getName();
-// if(mappedName != null) {
-// useName = mappedName;
-// }
-//
-// Properties sourceProps = DatasourceMgr.getInstance().getDatasourceProperties(useName);
-//
-// if (sourceProps != null) {
-// Properties newprops = new Properties(cb.getProperties());
-// newprops.putAll(sourceProps);
-// api.updateProperties(cb.getName(), "org.teiid.adminapi.ConnectorBinding", newprops);
-//
-// } else {
-// System.err.println("WARNING: ConnectorBinding : " + cb.getName() + " was not updated, the mapped name " + useName + " had no datasource properties defined");
-// }
-//
-//
-//
-// }
} catch (QueryTestFailedException qt) {
throw qt;
} catch (Exception t) {
Modified: trunk/test-integration/db/src/main/java/org/teiid/test/framework/connection/ConnectionUtil.java
===================================================================
--- trunk/test-integration/db/src/main/java/org/teiid/test/framework/connection/ConnectionUtil.java 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/main/java/org/teiid/test/framework/connection/ConnectionUtil.java 2009-09-15 03:45:38 UTC (rev 1353)
@@ -6,10 +6,11 @@
import javax.sql.XAConnection;
import org.teiid.test.framework.ConfigPropertyLoader;
-import org.teiid.test.framework.datasource.DatasourceMgr;
+import org.teiid.test.framework.datasource.DataSourceMgr;
import org.teiid.test.framework.exception.QueryTestFailedException;
import org.teiid.test.framework.exception.TransactionRuntimeException;
+// identifier should be the model name that is identfied in the config properties
public class ConnectionUtil {
public static final Connection getSource(String identifier)
throws QueryTestFailedException {
@@ -26,8 +27,8 @@
Properties sourceProps;
try {
- sourceProps = DatasourceMgr.getInstance()
- .getDatasourceProperties(mappedName);
+ sourceProps = DataSourceMgr.getInstance()
+ .getDatasourceProperties(mappedName, identifier);
} catch (QueryTestFailedException e) {
throw new TransactionRuntimeException(e);
}
@@ -60,8 +61,8 @@
Properties sourceProps;
try {
- sourceProps = DatasourceMgr.getInstance()
- .getDatasourceProperties(mappedName);
+ sourceProps = DataSourceMgr.getInstance()
+ .getDatasourceProperties(mappedName, identifier);
} catch (QueryTestFailedException e) {
throw new TransactionRuntimeException(e);
}
Modified: trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DataSource.java
===================================================================
--- trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DataSource.java 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DataSource.java 2009-09-15 03:45:38 UTC (rev 1353)
@@ -22,6 +22,10 @@
return name;
}
+ public String getGroup() {
+ return group;
+ }
+
public String getType() {
return props.getProperty(CONNECTOR_TYPE);
}
Modified: trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DatasourceMgr.java
===================================================================
--- trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DatasourceMgr.java 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/main/java/org/teiid/test/framework/datasource/DatasourceMgr.java 2009-09-15 03:45:38 UTC (rev 1353)
@@ -6,20 +6,17 @@
import java.io.IOException;
import java.io.InputStream;
-import java.sql.Connection;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
+import java.util.Set;
-import javax.sql.XAConnection;
-
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
-import org.teiid.test.framework.ConfigPropertyLoader;
-import org.teiid.test.framework.connection.ConnectionStrategyFactory;
import org.teiid.test.framework.exception.QueryTestFailedException;
import org.teiid.test.framework.exception.TransactionRuntimeException;
@@ -27,7 +24,7 @@
import com.metamatrix.common.xml.XMLReaderWriterImpl;
/**
- * The DatasourceMgr is responsible for loading and managing the datasource
+ * The DataSourceMgr is responsible for loading and managing the datasource
* mapping properties file {@see #DATASOURCE_MAPPING_FILE} and the mapped
* datasource properties files. The {@link #getDatasourceProperties(String)}
* returns the properties defined for that datasourceid, which is mapped in the
@@ -36,23 +33,28 @@
* @author vanhalbert
*
*/
-public class DatasourceMgr {
+public class DataSourceMgr {
static final String DIRECTORY = "datasources/";
static final String DATASOURCE_MAPPING_FILE = "datasource_mapping.xml";
- private static DatasourceMgr _instance = null;
+ private static DataSourceMgr _instance = null;
- private Map<String, Map<String, DataSource>>dstypeMap = new HashMap<String, Map<String, DataSource>>();
+ private Map<String, Map<String, DataSource>>dstypeMap = new HashMap<String, Map<String, DataSource>>(); //key=datasourcetype
- private Map<String, DataSource> allDatasourcesMap = new HashMap<String, DataSource>();
+ private Map<String, DataSource> allDatasourcesMap = new HashMap<String, DataSource>(); // key=datasource name
+
+ private Map<String, DataSource> modelToDatasourceMap = new HashMap<String, DataSource>(); // key=modelname
+
+ private Set<String> usedDataSources = new HashSet<String>();
- private DatasourceMgr() {
+
+ private DataSourceMgr() {
}
- public static synchronized DatasourceMgr getInstance() {
+ public static synchronized DataSourceMgr getInstance() {
if (_instance == null) {
- _instance = new DatasourceMgr();
+ _instance = new DataSourceMgr();
try {
_instance.loadDataSourceMappings();
} catch (QueryTestFailedException e) {
@@ -71,13 +73,31 @@
return allDatasourcesMap.size();
}
- public org.teiid.test.framework.datasource.DataSource getDatasource(String datasourceid)
+ public DataSource getDatasource(String datasourceid, String modelName)
throws QueryTestFailedException {
DataSource ds = null;
+
+ // map the datasource to the model and datasourceid
+ // this is so the next time this combination is requested,
+ // the same datasource is returned to ensure when consecutive calls during the process
+ // corresponds to the same datasource
+ String key = modelName + "_"+datasourceid;
+
+ if (modelToDatasourceMap.containsKey(key)) {
+ return modelToDatasourceMap.get(key);
+ }
if (dstypeMap.containsKey(datasourceid)) {
Map datasources = dstypeMap.get(datasourceid);
- ds = (DataSource) datasources.values().iterator().next();
+ Iterator<DataSource> it= datasources.values().iterator();
+ while(it.hasNext()) {
+ DataSource checkit = it.next();
+ if (!usedDataSources.contains(checkit.getName())) {
+ usedDataSources.add(checkit.getName());
+ ds = checkit;
+ break;
+ }
+ }
} else {
ds = allDatasourcesMap.get(datasourceid);
@@ -87,26 +107,16 @@
+ " is not a defined datasource in the mappings file ");
}
+
+ modelToDatasourceMap.put(key, ds);
return ds;
}
- public Properties getDatasourceProperties(String datasourceid)
+ public Properties getDatasourceProperties(String datasourceid, String modelname)
throws QueryTestFailedException {
- DataSource ds = null;
- if (dstypeMap.containsKey(datasourceid)) {
-
- Map datasources = dstypeMap.get(datasourceid);
- ds = (DataSource)datasources.values().iterator().next();
-
- } else {
- ds = allDatasourcesMap.get(datasourceid);
- }
- if (ds == null) {
- throw new QueryTestFailedException("DatasourceID " + datasourceid
- + " is not a defined datasource in the mappings file ");
+ DataSource ds = getDatasource(datasourceid, modelname);
- }
return ds.getProperties();
}
@@ -183,9 +193,6 @@
System.out.println("Loaded datasource " + ds.getName());
}
-// else {
-// System.out.println("Did not load datasource " + name);
-// }
}
@@ -194,7 +201,7 @@
Properties props = null;
try {
- InputStream in = DatasourceMgr.class.getResourceAsStream("/"
+ InputStream in = DataSourceMgr.class.getResourceAsStream("/"
+ filename);
if (in != null) {
props = new Properties();
@@ -228,7 +235,7 @@
private static InputStream getInputStream() {
- InputStream in = DatasourceMgr.class.getResourceAsStream("/"
+ InputStream in = DataSourceMgr.class.getResourceAsStream("/"
+ DIRECTORY + DATASOURCE_MAPPING_FILE);
if (in != null) {
@@ -244,16 +251,6 @@
static final String DSCONFIG = "datasourceconfig";
static final String DATASOURCETYPE = "datasourcetype";
static final String DATASOURCE = "datasource";
-
-// static class DS_TYPE {
-//
-// /**
-// * This is the name of the Property Element.
-// */
-// public static final String XA_ELEMENT = "xa"; //$NON-NLS-1$
-// public static final String NONXA_ELEMENT = "nonxa"; //$NON-NLS-1$
-//
-// }
static class Property {
@@ -273,11 +270,17 @@
}
public static void main(String[] args) {
- DatasourceMgr mgr = DatasourceMgr.getInstance();
+ DataSourceMgr mgr = DataSourceMgr.getInstance();
try {
+ DataSource ds1 = mgr.getDatasource("ds_mysql5", "model1");
+
+ DataSource ds2 = mgr.getDatasource("ds_mysql5", "model1");
+ if (ds1 != ds2) {
+ throw new RuntimeException("Datasources are not the same");
+ }
System.out.println("Value for ds_mysql5: "
- + mgr.getDatasourceProperties("ds_mysql5"));
+ + mgr.getDatasourceProperties("ds_mysql5", "model1"));
} catch (QueryTestFailedException e) {
e.printStackTrace();
}
Modified: trunk/test-integration/db/src/main/java/org/teiid/test/framework/transaction/LocalTransaction.java
===================================================================
--- trunk/test-integration/db/src/main/java/org/teiid/test/framework/transaction/LocalTransaction.java 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/main/java/org/teiid/test/framework/transaction/LocalTransaction.java 2009-09-15 03:45:38 UTC (rev 1353)
@@ -21,11 +21,11 @@
public LocalTransaction(ConnectionStrategy strategy) {
super(strategy);
-// this.props.setProperty(ConnectionStrategy.TXN_AUTO_WRAP, ConnectionStrategy.AUTO_WRAP_OFF);
}
protected void before(TransactionQueryTest test) {
try {
+ debug("Autocommit: " + this.connStrategy.getAutocommit());
test.getConnection().setAutoCommit(this.connStrategy.getAutocommit());
} catch (SQLException e) {
throw new RuntimeException(e);
@@ -33,6 +33,7 @@
}
protected void after(TransactionQueryTest test) {
+ boolean exception = false;
try {
if (test.rollbackAllways()|| test.exceptionOccurred()) {
test.getConnection().rollback();
@@ -41,14 +42,27 @@
else {
test.getConnection().commit();
}
- } catch (SQLException e) {
- throw new TransactionRuntimeException(e);
+ } catch (SQLException se) {
+ exception = true;
+ // if exception, try to trigger the rollback
+ try {
+ test.getConnection().rollback();
+ } catch (Exception e) {
+ // do nothing
+ }
+ throw new TransactionRuntimeException(se);
+
+
} finally {
- try {
- test.getConnection().setAutoCommit(true);
- } catch (SQLException e) {
- throw new RuntimeException(e);
- }
+ // if an exceptio occurs and the autocommit is set to true - while doing a transaction
+ // will generate a new exception overriding the first exception
+ if (!exception) {
+ try {
+ test.getConnection().setAutoCommit(true);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ }
}
}
}
Modified: trunk/test-integration/db/src/main/java/org/teiid/test/framework/transaction/TransactionFactory.java
===================================================================
--- trunk/test-integration/db/src/main/java/org/teiid/test/framework/transaction/TransactionFactory.java 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/main/java/org/teiid/test/framework/transaction/TransactionFactory.java 2009-09-15 03:45:38 UTC (rev 1353)
@@ -52,9 +52,9 @@
transacton = new JNDITransaction(connstrategy);
} else {
- new TransactionRuntimeException("Invalid property value of " + type + " for " + TRANSACTION_TYPE );
+ throw new TransactionRuntimeException("Invalid property value of " + type + " for " + TRANSACTION_TYPE );
}
-
+
return transacton;
}
Modified: trunk/test-integration/db/src/main/resources/datasources/readme.txt
===================================================================
--- trunk/test-integration/db/src/main/resources/datasources/readme.txt 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/main/resources/datasources/readme.txt 2009-09-15 03:45:38 UTC (rev 1353)
@@ -8,4 +8,29 @@
1. create the directory (if it doesn't exist)
2. create (or place) a connection.properties file in the newly created directory. See the
example_connection.properties in the derby directory as a starting point.
-
\ No newline at end of file
+
+
+NOTE: The datasource_mapping.xml has groupings by datasource type. This is also a mechinism for creating groupings
+for special usecases. An example would be to do the following:
+
+- add 2 different groups to datasource_mapping.xml
+
+ <datasourcetype name="TestIT">
+ <datasource name="mysqlmyA">
+ <property name="dir">mysql</property>
+ <property name="connectortype">MySQL JDBC XA Connector</property>
+ </datasource>
+
+ <datasource name="oraclemyB">
+ <property name="dir">oracle</property>
+ <property name="connectortype">Oracle JDBC XA Connector</property>
+ </datasource>
+ </datasourcetype>
+
+
+- then, in the config properties file, map the models to each datasourcetype
+
+pm1=mysqlmyA
+pm2=oraclemyB
+
+This force the association between the model and datasource.
\ No newline at end of file
Modified: trunk/test-integration/db/src/main/resources/default-config.properties
===================================================================
--- trunk/test-integration/db/src/main/resources/default-config.properties 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/main/resources/default-config.properties 2009-09-15 03:45:38 UTC (rev 1353)
@@ -25,7 +25,7 @@
# AUTO_WRAP_PESSIMISTIC = "PESSIMISTIC"
# AUTO_WRAP_OPTIMISTIC = "OPTIMISTIC"
-# txnAutoWrap=
+txnAutoWrap=off
##########################################
# properties for MetaMatrix connection
@@ -59,9 +59,12 @@
#
#
-pm1=nonxa
-pm2=nonxa
+#pm1=nonxa
+#pm2=nonxa
+pm1=mysqlA
+pm2=mysqlB
+
Modified: trunk/test-integration/db/src/test/java/org/teiid/test/framework/AbstractQueryTransactionTest.java
===================================================================
--- trunk/test-integration/db/src/test/java/org/teiid/test/framework/AbstractQueryTransactionTest.java 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/test/java/org/teiid/test/framework/AbstractQueryTransactionTest.java 2009-09-15 03:45:38 UTC (rev 1353)
@@ -52,11 +52,13 @@
}
@Override protected void assignExecutionProperties(Statement stmt) {
- if (this.executionProperties != null) {
- if (stmt instanceof com.metamatrix.jdbc.api.Statement) {
+ if (this.executionProperties != null) {
+ if (stmt instanceof com.metamatrix.jdbc.api.Statement) {
com.metamatrix.jdbc.api.Statement statement = (com.metamatrix.jdbc.api.Statement)stmt;
- if (this.executionProperties.getProperty(ExecutionProperties.PROP_TXN_AUTO_WRAP) != null) {
- statement.setExecutionProperty(ExecutionProperties.PROP_TXN_AUTO_WRAP, this.executionProperties.getProperty(ExecutionProperties.PROP_TXN_AUTO_WRAP));
+ String txnautowrap = this.executionProperties.getProperty(ExecutionProperties.PROP_TXN_AUTO_WRAP);
+ if (txnautowrap != null) {
+ System.out.println("txnAutoWrap: " + txnautowrap);
+ statement.setExecutionProperty(ExecutionProperties.PROP_TXN_AUTO_WRAP, txnautowrap);
}
if (this.executionProperties.getProperty(ExecutionProperties.PROP_FETCH_SIZE) != null) {
Modified: trunk/test-integration/db/src/test/java/org/teiid/test/framework/datasource/SingleDataSourceSetup.java
===================================================================
--- trunk/test-integration/db/src/test/java/org/teiid/test/framework/datasource/SingleDataSourceSetup.java 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/test/java/org/teiid/test/framework/datasource/SingleDataSourceSetup.java 2009-09-15 03:45:38 UTC (rev 1353)
@@ -59,8 +59,7 @@
test1.assertRowCount(100);
test1.execute("select * from g2 ");
test1.assertRowCount(100);
-
-
+
System.out.println("SingleDataSourceSetup Completed");
Modified: trunk/test-integration/db/src/test/java/org/teiid/test/framework/datasource/TwoDataSourceSetup.java
===================================================================
--- trunk/test-integration/db/src/test/java/org/teiid/test/framework/datasource/TwoDataSourceSetup.java 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/test/java/org/teiid/test/framework/datasource/TwoDataSourceSetup.java 2009-09-15 03:45:38 UTC (rev 1353)
@@ -44,11 +44,20 @@
}
test1.executeBatch(sql1);
+
+
+ for (int i = 0; i < 100; i++) {
+ sql1[i] = "insert into g2 (e1, e2) values("+i+",'"+i+"')" ;
+ }
+
+ test1.executeBatch(sql1);
test1.execute("select * from g1 ");
test1.assertRowCount(100);
test1.execute("select * from g2 ");
- test1.assertRowCount(0);
+ test1.assertRowCount(100);
+ test1.closeConnection();
+
AbstractQueryTest test2 = new QueryExecution(ConnectionUtil.getSource("pm2")); //$NON-NLS-1$
test2.execute("delete from g2"); //$NON-NLS-1$
test2.execute("delete from g1"); //$NON-NLS-1$
@@ -61,15 +70,23 @@
String[] sql2 = new String[100];
for (int i = 0; i < 100; i++) {
+ sql2[i] = "insert into g1 (e1, e2) values("+i+",'"+i+"')" ;
+ }
+
+ test2.executeBatch(sql2);
+
+
+ for (int i = 0; i < 100; i++) {
sql2[i] = "insert into g2 (e1, e2) values("+i+",'"+i+"')" ;
}
test2.executeBatch(sql2);
test2.execute("select * from g1 ");
- test2.assertRowCount(0);
+ test2.assertRowCount(100);
test2.execute("select * from g2 ");
test2.assertRowCount(100);
-
+
+ test2.closeConnection();
System.out.println("TwoSource Setup Completed");
Modified: trunk/test-integration/db/src/test/java/org/teiid/test/testcases/TwoSourceTransactionTest.java
===================================================================
--- trunk/test-integration/db/src/test/java/org/teiid/test/testcases/TwoSourceTransactionTest.java 2009-09-15 03:08:00 UTC (rev 1352)
+++ trunk/test-integration/db/src/test/java/org/teiid/test/testcases/TwoSourceTransactionTest.java 2009-09-15 03:45:38 UTC (rev 1353)
@@ -15,7 +15,7 @@
/**
- * A common SingleSource test case among many different transaction stuff.
+ * Test cases that require 2 datasources
*/
public class TwoSourceTransactionTest extends BaseAbstractTransactionTestCase {
@@ -41,6 +41,10 @@
assertRowCount(100);
}
+ public int getNumberRequiredDataSources(){
+ return 2;
+ }
+
public void validateTestCase() throws Exception {
}
};
@@ -289,7 +293,7 @@
test.execute("select * from g2 where e1 >= 100 and e1 < 115");
test.assertRowCount(15);
test.execute("select distinct e2 from g1 where e1 > 100");
- test.assertResultsSetEquals(new String[] {"e2[varchar]", "blah"});
+ test.assertResultsSetEquals(new String[] {"e2[VARCHAR]", "blah"});
test.closeConnection();
}
16 years, 7 months
teiid SVN: r1352 - branches.
by teiid-commits@lists.jboss.org
Author: shawkins
Date: 2009-09-14 23:08:00 -0400 (Mon, 14 Sep 2009)
New Revision: 1352
Added:
branches/6.2.x/
Log:
creating 6.2 branch
Copied: branches/6.2.x (from rev 1351, trunk)
16 years, 7 months
teiid SVN: r1351 - in trunk/engine/src: main/java/com/metamatrix/query/rewriter and 1 other directories.
by teiid-commits@lists.jboss.org
Author: shawkins
Date: 2009-09-14 22:31:57 -0400 (Mon, 14 Sep 2009)
New Revision: 1351
Modified:
trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java
trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java
trunk/engine/src/test/java/com/metamatrix/query/rewriter/TestQueryRewriter.java
Log:
TEIID-800 now that the conversion between string and char does not throw an exception, we need to check for the narrowing case in the resolver.
Modified: trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java 2009-09-14 20:49:25 UTC (rev 1350)
+++ trunk/engine/src/main/java/com/metamatrix/query/resolver/util/ResolverUtil.java 2009-09-15 02:31:57 UTC (rev 1351)
@@ -214,27 +214,38 @@
public static Constant convertConstant(String sourceTypeName,
String targetTypeName,
- Constant constant) throws QueryResolverException {
+ Constant constant) {
if (!DataTypeManager.isTransformable(sourceTypeName, targetTypeName)) {
return null;
}
-
- //try to get the converted constant, if this fails then it is not in a valid format
- Constant result = getProperlyTypedConstant(constant.getValue(), DataTypeManager.getDataTypeClass(targetTypeName));
+
+ try {
+ //try to get the converted constant, if this fails then it is not in a valid format
+ Constant result = getProperlyTypedConstant(constant.getValue(), DataTypeManager.getDataTypeClass(targetTypeName));
+
+ if (DataTypeManager.DefaultDataTypes.STRING.equals(sourceTypeName)) {
+ if (DataTypeManager.DefaultDataTypes.CHAR.equals(targetTypeName)) {
+ String value = (String)constant.getValue();
+ if (value != null && value.length() != 1) {
+ return null;
+ }
+ }
+ return result;
+ }
+
+ //for non-strings, ensure that the conversion is consistent
+ if (!DataTypeManager.isTransformable(targetTypeName, sourceTypeName)) {
+ return null;
+ }
- if (DataTypeManager.DefaultDataTypes.STRING.equals(sourceTypeName)) {
- return result;
+ Constant reverse = getProperlyTypedConstant(result.getValue(), constant.getType());
+
+ if (constant.equals(reverse)) {
+ return result;
+ }
+ } catch (QueryResolverException e) {
+
}
-
- //for non-strings, ensure that the conversion is consistent
- if (!DataTypeManager.isTransformable(targetTypeName, sourceTypeName)) {
- return null;
- }
- Constant reverse = getProperlyTypedConstant(result.getValue(), constant.getType());
-
- if (constant.equals(reverse)) {
- return result;
- }
return null;
}
Modified: trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java
===================================================================
--- trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java 2009-09-14 20:49:25 UTC (rev 1350)
+++ trunk/engine/src/main/java/com/metamatrix/query/rewriter/QueryRewriter.java 2009-09-15 02:31:57 UTC (rev 1351)
@@ -1492,16 +1492,14 @@
String leftExprTypeName = DataTypeManager.getDataTypeName(leftExpr.getType());
- Constant result = null;
- try {
- result = ResolverUtil.convertConstant(DataTypeManager.getDataTypeName(rightConstant.getType()), leftExprTypeName, rightConstant);
- Constant other = ResolverUtil.convertConstant(leftExprTypeName, DataTypeManager.getDataTypeName(rightConstant.getType()), result);
- if (((Comparable)rightConstant.getValue()).compareTo(other.getValue()) != 0) {
- return getSimpliedCriteria(crit, leftExpr, crit.getOperator() != CompareCriteria.EQ, true);
- }
- } catch(QueryResolverException e) {
+ Constant result = ResolverUtil.convertConstant(DataTypeManager.getDataTypeName(rightConstant.getType()), leftExprTypeName, rightConstant);
+ if (result == null) {
return getSimpliedCriteria(crit, leftExpr, crit.getOperator() != CompareCriteria.EQ, true);
}
+ Constant other = ResolverUtil.convertConstant(leftExprTypeName, DataTypeManager.getDataTypeName(rightConstant.getType()), result);
+ if (other == null || ((Comparable)rightConstant.getValue()).compareTo(other.getValue()) != 0) {
+ return getSimpliedCriteria(crit, leftExpr, crit.getOperator() != CompareCriteria.EQ, true);
+ }
if (!DataTypeManager.isImplicitConversion(leftExprTypeName, DataTypeManager.getDataTypeName(rightConstant.getType()))) {
return crit;
@@ -1558,15 +1556,12 @@
Constant rightConstant = (Constant) next;
- Constant result = null;
- try {
- result = ResolverUtil.convertConstant(DataTypeManager.getDataTypeName(rightConstant.getType()), leftExprTypeName, rightConstant);
+ Constant result = ResolverUtil.convertConstant(DataTypeManager.getDataTypeName(rightConstant.getType()), leftExprTypeName, rightConstant);
+ if (result != null) {
Constant other = ResolverUtil.convertConstant(leftExprTypeName, DataTypeManager.getDataTypeName(rightConstant.getType()), result);
- if (((Comparable)rightConstant.getValue()).compareTo(other.getValue()) != 0) {
+ if (other == null || ((Comparable)rightConstant.getValue()).compareTo(other.getValue()) != 0) {
result = null;
}
- } catch(QueryResolverException e) {
-
}
if (result != null) {
Modified: trunk/engine/src/test/java/com/metamatrix/query/rewriter/TestQueryRewriter.java
===================================================================
--- trunk/engine/src/test/java/com/metamatrix/query/rewriter/TestQueryRewriter.java 2009-09-14 20:49:25 UTC (rev 1350)
+++ trunk/engine/src/test/java/com/metamatrix/query/rewriter/TestQueryRewriter.java 2009-09-15 02:31:57 UTC (rev 1351)
@@ -2269,4 +2269,11 @@
helpTestRewriteCriteria(original, expected);
}
+ @Test public void testRewriteChar() {
+ String original = "convert(pm1.g1.e1, char) = '100'"; //$NON-NLS-1$
+ String expected = "1 = 0"; //$NON-NLS-1$
+
+ helpTestRewriteCriteria(original, expected);
+ }
+
}
16 years, 7 months
teiid SVN: r1350 - trunk/client-jdbc/src/main/java/com/metamatrix/jdbc.
by teiid-commits@lists.jboss.org
Author: shawkins
Date: 2009-09-14 16:49:25 -0400 (Mon, 14 Sep 2009)
New Revision: 1350
Modified:
trunk/client-jdbc/src/main/java/com/metamatrix/jdbc/MMDatabaseMetaData.java
Log:
TEIID-736 simplifying MMDatabaseMetaData type handling to common MMJDBCSQLTypeInfo class
Modified: trunk/client-jdbc/src/main/java/com/metamatrix/jdbc/MMDatabaseMetaData.java
===================================================================
--- trunk/client-jdbc/src/main/java/com/metamatrix/jdbc/MMDatabaseMetaData.java 2009-09-14 20:45:38 UTC (rev 1349)
+++ trunk/client-jdbc/src/main/java/com/metamatrix/jdbc/MMDatabaseMetaData.java 2009-09-14 20:49:25 UTC (rev 1350)
@@ -31,7 +31,6 @@
import java.sql.RowIdLifetime;
//## JDBC4.0-end ##
import java.sql.SQLException;
-import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -2548,51 +2547,16 @@
* @throws SQLException, should never occur.
*/
public boolean supportsConvert(int fromType, int toType) throws SQLException {
-
- if(fromType == Types.CHAR || fromType == Types.VARCHAR || fromType == Types.LONGVARCHAR) {
- if(toType == Types.CHAR || toType == Types.VARCHAR || toType == Types.LONGVARCHAR || toType == Types.BIT
- || toType == Types.SMALLINT || toType == Types.TINYINT|| toType == Types.INTEGER || toType == Types.BIGINT
- || toType == Types.FLOAT || toType == Types.REAL || toType == Types.DOUBLE || toType == Types.NUMERIC
- || toType == Types.DECIMAL || toType == Types.DATE || toType == Types.TIME || toType == Types.TIMESTAMP) {
- return true;
- }
- return false;
- } else if(fromType == Types.INTEGER || fromType == Types.TINYINT || fromType == Types.SMALLINT
- || fromType == Types.BIT || fromType == Types.BIGINT || fromType == Types.FLOAT || fromType == Types.REAL
- || fromType == Types.DOUBLE || fromType == Types.NUMERIC || fromType == Types.DECIMAL) {
- if(toType == Types.CHAR || toType == Types.VARCHAR || toType == Types.LONGVARCHAR || toType == Types.BIT
- || toType == Types.SMALLINT || toType == Types.TINYINT || toType == Types.INTEGER || toType == Types.BIGINT
- || toType == Types.FLOAT || toType == Types.REAL || toType == Types.DOUBLE || toType == Types.NUMERIC
- || toType == Types.DECIMAL) {
- return true;
- }
- return false;
- } else if(fromType == Types.DATE) {
- if(toType == Types.DATE || toType == Types.TIMESTAMP || toType == Types.CHAR
- || toType == Types.VARCHAR || toType == Types.LONGVARCHAR) {
- return true;
- }
- return false;
- } else if(fromType == Types.TIME) {
- if(toType == Types.TIME || toType == Types.TIMESTAMP || toType == Types.CHAR
- || toType == Types.VARCHAR || toType == Types.LONGVARCHAR) {
- return true;
- }
- return false;
- } else if(fromType == Types.TIMESTAMP) {
- if(toType == Types.DATE || toType == Types.TIME || toType == Types.TIMESTAMP
- || toType == Types.CHAR || toType == Types.VARCHAR || toType == Types.LONGVARCHAR) {
- return true;
- }
- return false;
- } else if(fromType == Types.JAVA_OBJECT) {
- if(toType == Types.JAVA_OBJECT) {
- return true;
- }
- return false;
- } else {
- return false;
- }
+ String fromName = MMJDBCSQLTypeInfo.getTypeName(fromType);
+ String toName = MMJDBCSQLTypeInfo.getTypeName(toType);
+
+ if (fromName.equals(toName)) {
+ if (fromName.equals(DataTypeManager.DefaultDataTypes.OBJECT) && fromName != toName) {
+ return false;
+ }
+ return true;
+ }
+ return DataTypeManager.isTransformable(fromName, toName);
}
/**
16 years, 7 months
teiid SVN: r1349 - trunk/test-integration/common.
by teiid-commits@lists.jboss.org
Author: shawkins
Date: 2009-09-14 16:45:38 -0400 (Mon, 14 Sep 2009)
New Revision: 1349
Modified:
trunk/test-integration/common/
Log:
adding to svn ignore
Property changes on: trunk/test-integration/common
___________________________________________________________________
Name: svn:ignore
+ target
.settings
.classpath
.project
16 years, 7 months
teiid SVN: r1348 - in trunk/documentation: salesforce-connector-guide/src/main/docbook/en-US/content and 1 other directory.
by teiid-commits@lists.jboss.org
Author: jdoyle
Date: 2009-09-14 14:35:18 -0400 (Mon, 14 Sep 2009)
New Revision: 1348
Modified:
trunk/documentation/pom.xml
trunk/documentation/salesforce-connector-guide/src/main/docbook/en-US/content/appendix.xml
trunk/documentation/salesforce-connector-guide/src/main/docbook/en-US/content/connector.xml
Log:
Updating SF doc for relationship queries.
Modified: trunk/documentation/pom.xml
===================================================================
--- trunk/documentation/pom.xml 2009-09-14 18:09:28 UTC (rev 1347)
+++ trunk/documentation/pom.xml 2009-09-14 18:35:18 UTC (rev 1348)
@@ -17,6 +17,7 @@
<module>connector-developer-guide</module>
<module>server-extensions-guide</module>
<module>quick-start-example</module>
+ <module>salesforce-connector-guide</module>
</modules>
<repositories>
<repository>
Modified: trunk/documentation/salesforce-connector-guide/src/main/docbook/en-US/content/appendix.xml
===================================================================
--- trunk/documentation/salesforce-connector-guide/src/main/docbook/en-US/content/appendix.xml 2009-09-14 18:09:28 UTC (rev 1347)
+++ trunk/documentation/salesforce-connector-guide/src/main/docbook/en-US/content/appendix.xml 2009-09-14 18:35:18 UTC (rev 1348)
@@ -44,6 +44,9 @@
<listitem><para>
CompareCriteriaOrdered
</para></listitem>
+ <listitem><para>
+ OuterJoins with join criteria KEY
+ </para></listitem>
</itemizedlist>
</sect1>
Modified: trunk/documentation/salesforce-connector-guide/src/main/docbook/en-US/content/connector.xml
===================================================================
--- trunk/documentation/salesforce-connector-guide/src/main/docbook/en-US/content/connector.xml 2009-09-14 18:09:28 UTC (rev 1347)
+++ trunk/documentation/salesforce-connector-guide/src/main/docbook/en-US/content/connector.xml 2009-09-14 18:35:18 UTC (rev 1348)
@@ -221,5 +221,49 @@
</para>
</sect1>
+ <sect1>
+ <title>Relationship Queries</title>
+ <para>Salesforce does not support joins like a relational database,
+ but it does have support for queries that include parent-to-child
+ or child-to-parent relationships between objects. These are termed
+ Relationship Queries. The SalesForce connector supports Relationship
+ Queries through Outer Join syntax.
+ </para>
+ <programlisting><![CDATA[
+ SELECT Account.name, Contact.Name from Contact LEFT OUTER JOIN Account
+ on Contact.Accountid = Account.id
+ ]]>
+ </programlisting>
+ <para>This query shows the correct syntax to query a SalesForce model with
+ to produce a relationship query from child to parent. It resolves to the
+ following query to SalesForce.
+ </para>
+ <programlisting><![CDATA[
+ SELECT Contact.Account.Name, Contact.Name FROM Contact
+ ]]>
+ </programlisting>
+ <programlisting><![CDATA[
+ select Contact.Name, Account.Name from Account Left outer Join Contact
+ on Contact.Accountid = Account.id
+ ]]>
+ </programlisting>
+ <para>This query shows the correct syntax to query a SalesForce model with
+ to produce a relationship query from parent to child. It resolves to the
+ following query to SalesForce.
+ </para>
+ <programlisting><![CDATA[
+ SELECT Account.Name, (SELECT Contact.Name FROM
+ Account.Contacts) FROM Account
+ ]]>
+ </programlisting>
+ <para>
+ See the description of the
+ <ulink
+ url="http://www.salesforce.com/us/developer/docs/api/index_Left.htm#StartTopic...">Relationship Queries
+ </ulink>
+ operation in the SalesForce documentation for limitations.
+ </para>
+
+ </sect1>
</chapter>
16 years, 7 months
teiid SVN: r1347 - in trunk: client/src/main/java/com/metamatrix/dqp/embedded and 3 other directories.
by teiid-commits@lists.jboss.org
Author: shawkins
Date: 2009-09-14 14:09:28 -0400 (Mon, 14 Sep 2009)
New Revision: 1347
Modified:
trunk/build/kit-runtime/deploy.properties
trunk/client/src/main/java/com/metamatrix/dqp/embedded/DQPEmbeddedProperties.java
trunk/engine/src/main/java/org/teiid/dqp/internal/process/CodeTableCache.java
trunk/engine/src/main/java/org/teiid/dqp/internal/process/DQPCore.java
trunk/engine/src/main/java/org/teiid/dqp/internal/process/DataTierManagerImpl.java
trunk/engine/src/main/resources/com/metamatrix/dqp/i18n.properties
trunk/engine/src/test/java/org/teiid/dqp/internal/process/TestCodeTableCache.java
trunk/engine/src/test/java/org/teiid/dqp/internal/process/TestDataTierManager.java
Log:
TEIID-828 update of code table caching to allow for more tables.
Modified: trunk/build/kit-runtime/deploy.properties
===================================================================
--- trunk/build/kit-runtime/deploy.properties 2009-09-14 15:59:00 UTC (rev 1346)
+++ trunk/build/kit-runtime/deploy.properties 2009-09-14 18:09:28 UTC (rev 1347)
@@ -63,12 +63,15 @@
#The maximum number of query plans that are cached. Note: this is a memory based cache. (default 250)
PreparedPlanCache.maxCount=250
-#Maximum number of cached lookup tables. Note: this is a memory based cache. (default 20)
-CodeTables.maxCount=20
+#Maximum number of cached lookup tables. Note: this is a memory based cache and should be set to a value of at least 10 to accomidate system usage. (default 200)
+CodeTables.maxCount=200
#Maximum number of records in a single lookup table (default 10000)
-CodeTables.maxRows=10000
+CodeTables.maxRowsPerTable=10000
+#Maximum number of records in all lookup tables (default 200000)
+CodeTables.maxRows=200000
+
#Denotes whether or not result set caching is enabled. (default false)
ResultSetCache.enabled=false
@@ -85,10 +88,10 @@
# Session Service Settings
#
-#Maximum number of sessions allowed by the system
+#Maximum number of sessions allowed by the system (default 5000)
session.maxSessions=5000
-#Max allowed time before the session is terminated by the system (default unlimited, below value is 24hrs)
+#Max allowed time before the session is terminated by the system (default unlimited, default is 86400000 - 24hrs)
#session.expirationTimeInMilli=86400000
#
@@ -120,13 +123,22 @@
server.portNumber=31000
server.bindAddress=localhost
+
+#Max number of threads dedicated to Admin and initial request processing (default 15)
server.maxSocketThreads=15
+
+#SO_RCVBUF size, 0 indicates that system default should be used (default 0)
server.inputBufferSize=0
+
+#SO_SNDBUF size, 0 indicates that system default should be used (default 0)
server.outputBufferSize=0
# SSL Settings
+#Setting to enable the use of SSL for socket connections. Note all client must use the mms protocol when enabled. (default false)
ssl.enabled=false
#ssl.protocol=SSLv3
+
+#SSL Authentication Mode, may be one of 1-way, 2-way, or annonymous (default 1-way)
#ssl.authenticationMode=1-way
#ssl.keymanagementalgorithm=
#ssl.keystore.filename=ssl.keystore
@@ -142,9 +154,12 @@
# Setting to enable the use of transactions for XA, local,
# and request scope transactions (default true)
xa.enabled=true
+
# default transaction time out in seconds (default 120)
xa.max_timeout=120
+
# Setting to enable recovery scans (default true)
xa.enable_recovery=true
+
# JBoss transactions status port (default 0 - selects an available port)
xa.txnstatus_port=0
\ No newline at end of file
Modified: trunk/client/src/main/java/com/metamatrix/dqp/embedded/DQPEmbeddedProperties.java
===================================================================
--- trunk/client/src/main/java/com/metamatrix/dqp/embedded/DQPEmbeddedProperties.java 2009-09-14 15:59:00 UTC (rev 1346)
+++ trunk/client/src/main/java/com/metamatrix/dqp/embedded/DQPEmbeddedProperties.java 2009-09-14 18:09:28 UTC (rev 1347)
@@ -45,6 +45,7 @@
public static final String MAX_RESULTSET_CACHE_SIZE = "ResultSetCache.maxSizeInMB"; //$NON-NLS-1$
public static final String MAX_RESULTSET_CACHE_AGE = "ResultSetCache.maxAgeInSeconds"; //$NON-NLS-1$
public static final String RESULTSET_CACHE_SCOPE = "ResultSetCache.scope"; //$NON-NLS-1$
+ public static final String MAX_CODE_TABLE_RECORDS_PER_TABLE = "CodeTables.maxRowsPerTable"; //$NON-NLS-1$
public static final String MAX_CODE_TABLE_RECORDS = "CodeTables.maxRows"; //$NON-NLS-1$
public static final String MAX_CODE_TABLES = "CodeTables.maxCount"; //$NON-NLS-1$
public static final String MAX_PLAN_CACHE_SIZE = "PreparedPlanCache.maxCount"; //$NON-NLS-1$
Modified: trunk/engine/src/main/java/org/teiid/dqp/internal/process/CodeTableCache.java
===================================================================
--- trunk/engine/src/main/java/org/teiid/dqp/internal/process/CodeTableCache.java 2009-09-14 15:59:00 UTC (rev 1346)
+++ trunk/engine/src/main/java/org/teiid/dqp/internal/process/CodeTableCache.java 2009-09-14 18:09:28 UTC (rev 1347)
@@ -22,43 +22,45 @@
package org.teiid.dqp.internal.process;
-import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
import com.metamatrix.api.exception.MetaMatrixComponentException;
import com.metamatrix.api.exception.MetaMatrixProcessingException;
import com.metamatrix.common.log.LogManager;
import com.metamatrix.core.util.HashCodeUtil;
import com.metamatrix.dqp.DQPPlugin;
+import com.metamatrix.dqp.embedded.DQPEmbeddedProperties;
import com.metamatrix.dqp.util.LogConstants;
import com.metamatrix.query.util.CommandContext;
/**
- * Code table cache.
+ * Code table cache. Heavily synchronized in-memory cache of code tables. There is no purging policy for this cache. Once the limits have been reached exceptions will occur.
*/
class CodeTableCache {
- // Max number of code tables that can be loaded
+ private static class CodeTable {
+ Map<Object, Object> codeMap;
+ Set<Object> waitingRequests = new HashSet<Object>();
+ }
+
+ // Max number of code tables that can be loaded
private int maxCodeTables;
- // Caches being loaded - key is CacheKey, value is WaitingRequests
- private Map loadingCaches = new HashMap();
-
- // Map of RequestID/nodeID -> CacheKey
- private Map requestToCacheKeyMap = Collections.synchronizedMap(new HashMap());
-
+ // Max number of code records that can be loaded
+ private int maxCodeRecords;
+
+ private int maxCodeTableRecords;
+
+ private int rowCount;
+
// Cache itself - key is CacheKey, value is Map (which is the key value -> return value for the code table)
- private Map codeTableCache = new HashMap();
+ private Map<CacheKey, CodeTable> codeTableCache = new HashMap<CacheKey, CodeTable>();
- // Cache keys for stuff already in the cache
- private Set cacheKeyDone = new HashSet();
-
public enum CacheState {
CACHE_EXISTS,
CACHE_LOADING,
@@ -66,13 +68,13 @@
CACHE_OVERLOAD
}
- private AtomicInteger requestSequence = new AtomicInteger();
-
/**
* Construct a code table cache
*/
- public CodeTableCache(int maxCodeTables) {
- this.maxCodeTables = maxCodeTables;
+ public CodeTableCache(int maxCodeTables, int maxCodeRecords, int maxCodeTableRecords) {
+ this.maxCodeRecords = maxCodeRecords;
+ this.maxCodeTables = maxCodeTables;
+ this.maxCodeTableRecords = maxCodeTableRecords;
}
/**
@@ -95,31 +97,26 @@
// Create a CacheKey
CacheKey cacheKey = new CacheKey(codeTable, returnElement, keyElement, context.getVdbName(), context.getVdbVersion());
-
- if (cacheKeyDone.contains(cacheKey)) { // CacheKey exists in codeTableCache
+ CodeTable table = this.codeTableCache.get(cacheKey);
+ if (table == null) {
+ if(codeTableCache.size() >= maxCodeTables) {
+ // In this case we already have some number of existing + loading caches
+ // that are >= the max number we are allowed to have. Thus, we cannot load
+ // another cache.
+ return CacheState.CACHE_OVERLOAD;
+ }
+ table = new CodeTable();
+ table.waitingRequests.add(context.getProcessorID());
+ this.codeTableCache.put(cacheKey, table);
+ return CacheState.CACHE_NOT_EXIST;
+ }
+ if (table.waitingRequests == null) { // CacheKey exists in codeTableCache
return CacheState.CACHE_EXISTS;
-
}
- if (loadingCaches.containsKey(cacheKey)) { // CacheKey exists in loadingCache
- // Add context to additional contexts
- WaitingRequests wqr = (WaitingRequests) loadingCaches.get(cacheKey);
- wqr.addRequestID(context.getProcessorID());
- loadingCaches.put(cacheKey, wqr);
- return CacheState.CACHE_LOADING;
-
- } else if(codeTableCache.size() + loadingCaches.size() >= maxCodeTables) {
- // In this case we already have some number of existing + loading caches
- // that are >= the max number we are allowed to have. Thus, we cannot load
- // another cache.
- return CacheState.CACHE_OVERLOAD;
-
- } else { // CacheKey not exists in loadingCache
- // Add to loadingCaches as primary context
- WaitingRequests wqr = new WaitingRequests(context.getProcessorID());
- loadingCaches.put(cacheKey, wqr);
- return CacheState.CACHE_NOT_EXIST;
- }
- }
+ // Add context to additional contexts
+ table.waitingRequests.add(context.getProcessorID());
+ return CacheState.CACHE_LOADING;
+ }
/**
* Set request ID for request key to cache key mapping.
@@ -130,14 +127,8 @@
* @param requestID Request ID
* @param nodeID Plan Node ID
*/
- public Integer createCacheRequest(String codeTable, String returnElement, String keyElement, CommandContext context) {
- // Create a cache key
- CacheKey cacheKey = new CacheKey(codeTable, returnElement, keyElement, context.getVdbName(), context.getVdbVersion());
- Integer result = this.requestSequence.getAndIncrement();
-
- // Add requestID/nodeID pair to map for later lookup
- requestToCacheKeyMap.put(result, cacheKey);
- return result;
+ public CacheKey createCacheRequest(String codeTable, String returnElement, String keyElement, CommandContext context) {
+ return new CacheKey(codeTable, returnElement, keyElement, context.getVdbName(), context.getVdbVersion());
}
/**
@@ -147,25 +138,32 @@
* @param results QueryResults of <List<List<keyValue, returnValue>>
* @throws MetaMatrixProcessingException
*/
- public synchronized void loadTable(Integer requestKey, List[] records) throws MetaMatrixProcessingException {
- // Look up cache key by requestID/nodeID pair
- CacheKey cacheKey = (CacheKey) requestToCacheKeyMap.get(requestKey);
-
+ public synchronized void loadTable(CacheKey cacheKey, List[] records) throws MetaMatrixProcessingException {
// Lookup the existing data
// Map of data: keyValue --> returnValue;
- Map existingMap = (Map) codeTableCache.get(cacheKey);
- if(existingMap == null) {
- existingMap = new HashMap();
- codeTableCache.put(cacheKey, existingMap);
+ CodeTable table = codeTableCache.get(cacheKey);
+ if(table.codeMap == null) {
+ table.codeMap = new HashMap<Object, Object>();
}
-
+
+ // Determine whether the results should be added to code table cache
+ // Depends on size of results and available memory and system parameters
+ int potentialSize = table.codeMap.size() + records.length;
+ if (potentialSize > maxCodeTableRecords) {
+ throw new MetaMatrixProcessingException("ERR.018.005.0100", DQPPlugin.Util.getString("ERR.018.005.0100", DQPEmbeddedProperties.MAX_CODE_TABLE_RECORDS_PER_TABLE)); //$NON-NLS-1$ //$NON-NLS-2$
+ }
+
+ if (potentialSize + rowCount > maxCodeRecords) {
+ throw new MetaMatrixProcessingException("ERR.018.005.0100", DQPPlugin.Util.getString("ERR.018.005.0100", DQPEmbeddedProperties.MAX_CODE_TABLE_RECORDS)); //$NON-NLS-1$ //$NON-NLS-2$
+ }
+
// Add data: <List<List<keyValue, returnValue>> from results to the code table cache
for ( int i = 0; i < records.length; i++ ) {
// each record or row
- List record = records[i];
+ List<Object> record = records[i];
Object keyValue = record.get(0);
Object returnValue = record.get(1);
- Object existing = existingMap.put(keyValue, returnValue);
+ Object existing = table.codeMap.put(keyValue, returnValue);
if (existing != null) {
throw new MetaMatrixProcessingException(DQPPlugin.Util.getString("CodeTableCache.duplicate_key", cacheKey.getCodeTable(), cacheKey.getKeyElement(), keyValue)); //$NON-NLS-1$
}
@@ -181,21 +179,17 @@
* @return Object of return value in code table cache
*/
public synchronized Object lookupValue(String codeTable, String returnElement, String keyElement, Object keyValue, CommandContext context) throws MetaMatrixComponentException {
- Object returnValue = null;
-
// Create CacheKey
CacheKey cacheKey = new CacheKey(codeTable, returnElement, keyElement, context.getVdbName(), context.getVdbVersion());
// Find the corresponding data map in cache for the cache key
- Map dataMap = (Map) codeTableCache.get(cacheKey);
- if(dataMap == null) {
- Object[] params = new Object[] {codeTable,keyElement,returnElement};
- throw new MetaMatrixComponentException(DQPPlugin.Util.getString("CodeTableCache.No_code_table", params)); //$NON-NLS-1$
+ CodeTable table = codeTableCache.get(cacheKey);
+ if(table == null || table.codeMap == null) {
+ throw new MetaMatrixComponentException(DQPPlugin.Util.getString("CodeTableCache.No_code_table", cacheKey.codeTable,cacheKey.keyElement,cacheKey.returnElement)); //$NON-NLS-1$
}
- returnValue = dataMap.get(keyValue);
- return returnValue;
+ return table.codeMap.get(keyValue);
}
-
+
/**
* Places the lookup results in the cache and marks the cache loaded
* @param requestID
@@ -203,7 +197,7 @@
* @return the set of waiting requests
* @since 4.2
*/
- public Set markCacheLoaded(Integer requestKey) {
+ public Set<Object> markCacheLoaded(CacheKey requestKey) {
return markCacheDone(requestKey, false);
}
@@ -215,26 +209,27 @@
* @return the set of waiting requests
* @since 4.2
*/
- public Set errorLoadingCache(Integer requestKey) {
+ public Set<Object> errorLoadingCache(CacheKey requestKey) {
return markCacheDone(requestKey, true);
}
- private synchronized Set markCacheDone(Integer requestKey, boolean errorOccurred) {
- // Remove request from requestToCacheKeyMap
- CacheKey cacheKey = (CacheKey) requestToCacheKeyMap.remove(requestKey);
+ private synchronized Set<Object> markCacheDone(CacheKey cacheKey, boolean errorOccurred) {
if (errorOccurred) {
// Remove any results already cached
- codeTableCache.remove(cacheKey);
- } else {
- cacheKeyDone.add(cacheKey);
+ CodeTable table = codeTableCache.remove(cacheKey);
+ if (table != null) {
+ return table.waitingRequests;
+ }
+ return null;
}
-
- // Remove cache key from loadingCaches
- WaitingRequests waitingRequests = (WaitingRequests)loadingCaches.remove(cacheKey);
- if (waitingRequests != null) {
- return waitingRequests.getWaitingRequestIDs();
- }
- return null;
+ CodeTable table = codeTableCache.get(cacheKey);
+ if (table == null || table.codeMap == null) {
+ return null;
+ }
+ rowCount += table.codeMap.size();
+ Set<Object> waiting = table.waitingRequests;
+ table.waitingRequests = null;
+ return waiting;
}
public synchronized void clearAll() {
@@ -247,26 +242,25 @@
// Walk through every key in the done cache and remove it
int removedTables = 0;
- int removedRecords = 0;
- Iterator keyIter = cacheKeyDone.iterator();
- while(keyIter.hasNext()) {
- CacheKey cacheKey = (CacheKey) keyIter.next();
- Map codeTable = (Map) codeTableCache.remove(cacheKey);
- removedTables++;
- removedRecords += codeTable.size();
+ int removedRecords = this.rowCount;
+ for (Iterator<CodeTable> iter = codeTableCache.values().iterator(); iter.hasNext();) {
+ CodeTable table = iter.next();
+ if (table.waitingRequests == null) {
+ removedTables++;
+ iter.remove();
+ }
}
// Clear the cacheKeyDone
- cacheKeyDone.clear();
-
+ this.rowCount = 0;
// Log status
- LogManager.logInfo(LogConstants.CTX_DQP, DQPPlugin.Util.getString("CodeTableCache.Cleared_code_tables", new Object[]{new Integer(removedTables), new Integer(removedRecords)})); //$NON-NLS-1$
+ LogManager.logInfo(LogConstants.CTX_DQP, DQPPlugin.Util.getString("CodeTableCache.Cleared_code_tables", removedTables, removedRecords)); //$NON-NLS-1$
}
/**
* Cache Key consists: codeTable, returnElement and keyElement.
*/
- private static class CacheKey {
+ static class CacheKey {
private String codeTable;
private String returnElement;
private String keyElement;
@@ -294,10 +288,6 @@
return this.codeTable;
}
- public String getReturnElement() {
- return this.returnElement;
- }
-
public String getKeyElement() {
return this.keyElement;
}
@@ -324,45 +314,4 @@
}
}
- /**
- * Waiting Requests consist: primary requestID and list of additional waiting requestIDs.
- */
- private static class WaitingRequests {
- Object primaryRequestID;
- Set additionalRequestIDs;
-
- public WaitingRequests(Object requestID) {
- this.primaryRequestID = requestID;
- }
-
- public void addRequestID(Object requestID) {
- if(additionalRequestIDs == null) {
- additionalRequestIDs = new HashSet(8, 0.9f);
- }
- additionalRequestIDs.add(requestID);
- }
-
- /**
- * Return the set of requestIDs for waiting requests.
- * @return Set of waiting requests' IDs
- */
- private Set getWaitingRequestIDs() {
- Set requestIDs = null;
-
- // Waiting Requests can contain both primary and additional context
- if (additionalRequestIDs != null) {
- requestIDs = new HashSet(additionalRequestIDs.size() + 1, 1.0f);
- requestIDs.addAll(additionalRequestIDs);
- } else {
- requestIDs = new HashSet(2, 1.0f);
- }
- if (primaryRequestID != null) {
- requestIDs.add(primaryRequestID);
- }
-
- return requestIDs;
- }
-
- }
-
}
Modified: trunk/engine/src/main/java/org/teiid/dqp/internal/process/DQPCore.java
===================================================================
--- trunk/engine/src/main/java/org/teiid/dqp/internal/process/DQPCore.java 2009-09-14 15:59:00 UTC (rev 1346)
+++ trunk/engine/src/main/java/org/teiid/dqp/internal/process/DQPCore.java 2009-09-14 18:09:28 UTC (rev 1347)
@@ -121,7 +121,8 @@
//Constants
private static final int DEFAULT_MAX_CODE_TABLE_RECORDS = 10000;
- private static final int DEFAULT_MAX_CODE_TABLES = 20;
+ private static final int DEFAULT_MAX_CODE_TABLES = 200;
+ private static final int DEFAULT_MAX_CODE_RECORDS = 200000;
private static final int DEFAULT_FETCH_SIZE = 2000;
private static final int DEFAULT_PROCESSOR_TIMESLICE = 2000;
private static final String PROCESS_PLAN_QUEUE_NAME = "QueryProcessorQueue"; //$NON-NLS-1$
@@ -132,6 +133,7 @@
// System properties for Code Table
private int maxCodeTableRecords = DEFAULT_MAX_CODE_TABLE_RECORDS;
private int maxCodeTables = DEFAULT_MAX_CODE_TABLES;
+ private int maxCodeRecords = DEFAULT_MAX_CODE_RECORDS;
private int maxFetchSize = DEFAULT_FETCH_SIZE;
@@ -632,8 +634,9 @@
this.processorTimeslice = PropertiesUtils.getIntProperty(props, DQPEmbeddedProperties.PROCESS_TIMESLICE, DEFAULT_PROCESSOR_TIMESLICE);
this.maxFetchSize = PropertiesUtils.getIntProperty(props, DQPEmbeddedProperties.MAX_FETCH_SIZE, DEFAULT_FETCH_SIZE);
this.processorDebugAllowed = PropertiesUtils.getBooleanProperty(props, DQPEmbeddedProperties.PROCESSOR_DEBUG_ALLOWED, true);
- this.maxCodeTableRecords = PropertiesUtils.getIntProperty(props, DQPEmbeddedProperties.MAX_CODE_TABLE_RECORDS, DEFAULT_MAX_CODE_TABLE_RECORDS);
+ this.maxCodeTableRecords = PropertiesUtils.getIntProperty(props, DQPEmbeddedProperties.MAX_CODE_TABLE_RECORDS_PER_TABLE, DEFAULT_MAX_CODE_TABLE_RECORDS);
this.maxCodeTables = PropertiesUtils.getIntProperty(props, DQPEmbeddedProperties.MAX_CODE_TABLES, DEFAULT_MAX_CODE_TABLES);
+ this.maxCodeRecords = PropertiesUtils.getIntProperty(props, DQPEmbeddedProperties.MAX_CODE_TABLE_RECORDS, DEFAULT_MAX_CODE_RECORDS);
this.chunkSize = PropertiesUtils.getIntProperty(props, DQPEmbeddedProperties.STREAMING_BATCH_SIZE, 10) * 1024;
@@ -671,6 +674,7 @@
(VDBService) env.findService(DQPServiceNames.VDB_SERVICE),
(BufferService) env.findService(DQPServiceNames.BUFFER_SERVICE),
this.maxCodeTables,
+ this.maxCodeRecords,
this.maxCodeTableRecords);
}
Modified: trunk/engine/src/main/java/org/teiid/dqp/internal/process/DataTierManagerImpl.java
===================================================================
--- trunk/engine/src/main/java/org/teiid/dqp/internal/process/DataTierManagerImpl.java 2009-09-14 15:59:00 UTC (rev 1346)
+++ trunk/engine/src/main/java/org/teiid/dqp/internal/process/DataTierManagerImpl.java 2009-09-14 18:09:28 UTC (rev 1347)
@@ -26,6 +26,8 @@
import java.util.Iterator;
import java.util.List;
+import org.teiid.dqp.internal.process.CodeTableCache.CacheKey;
+
import com.metamatrix.api.exception.MetaMatrixComponentException;
import com.metamatrix.api.exception.MetaMatrixProcessingException;
import com.metamatrix.common.buffer.BlockedException;
@@ -35,6 +37,7 @@
import com.metamatrix.common.log.LogManager;
import com.metamatrix.core.util.Assertion;
import com.metamatrix.dqp.DQPPlugin;
+import com.metamatrix.dqp.embedded.DQPEmbeddedProperties;
import com.metamatrix.dqp.internal.datamgr.ConnectorID;
import com.metamatrix.dqp.message.AtomicRequestID;
import com.metamatrix.dqp.message.AtomicRequestMessage;
@@ -59,23 +62,19 @@
private VDBService vdbService;
private BufferService bufferService;
- // Code table limits
- private int maxCodeTableRecords;
-
// Processor state
private CodeTableCache codeTableCache;
public DataTierManagerImpl(DQPCore requestMgr,
DataService dataService, VDBService vdbService, BufferService bufferService,
- int maxCodeTables, int maxCodeTableRecords) {
+ int maxCodeTables, int maxCodeRecords, int maxCodeTableRecords) {
this.requestMgr = requestMgr;
this.dataService = dataService;
this.vdbService = vdbService;
- this.maxCodeTableRecords = maxCodeTableRecords;
this.bufferService = bufferService;
- this.codeTableCache = new CodeTableCache(maxCodeTables);
+ this.codeTableCache = new CodeTableCache(maxCodeTables, maxCodeRecords, maxCodeTableRecords);
}
public TupleSource registerRequest(Object processorId, Command command,
@@ -181,7 +180,7 @@
case CACHE_EXISTS:
return this.codeTableCache.lookupValue(codeTableName, returnElementName, keyElementName, keyValue, context);
case CACHE_OVERLOAD:
- throw new MetaMatrixProcessingException("ERR.018.005.0099", DQPPlugin.Util.getString("ERR.018.005.0099")); //$NON-NLS-1$ //$NON-NLS-2$
+ throw new MetaMatrixProcessingException("ERR.018.005.0100", DQPPlugin.Util.getString("ERR.018.005.0100", DQPEmbeddedProperties.MAX_CODE_TABLES)); //$NON-NLS-1$ //$NON-NLS-2$
default:
throw BlockedException.INSTANCE;
}
@@ -196,7 +195,7 @@
String query = ReservedWords.SELECT + ' ' + keyElementName + " ," + returnElementName + ' ' + ReservedWords.FROM + ' ' + codeTableName; //$NON-NLS-1$
- final Integer codeRequestId = this.codeTableCache.createCacheRequest(codeTableName, returnElementName, keyElementName, context);
+ final CacheKey codeRequestId = this.codeTableCache.createCacheRequest(codeTableName, returnElementName, keyElementName, context);
boolean success = false;
QueryProcessor processor = null;
@@ -206,12 +205,6 @@
processor.setBatchHandler(new QueryProcessor.BatchHandler() {
@Override
public void batchProduced(TupleBatch batch) throws MetaMatrixProcessingException {
- // Determine whether the results should be added to code table cache
- // Depends on size of results and available memory and system parameters
-
- if (batch.getEndRow() > maxCodeTableRecords) {
- throw new MetaMatrixProcessingException("ERR.018.005.0100", DQPPlugin.Util.getString("ERR.018.005.0100", context.getProcessorID(), codeRequestId)); //$NON-NLS-1$ //$NON-NLS-2$
- }
codeTableCache.loadTable(codeRequestId, batch.getAllTuples());
}
});
Modified: trunk/engine/src/main/resources/com/metamatrix/dqp/i18n.properties
===================================================================
--- trunk/engine/src/main/resources/com/metamatrix/dqp/i18n.properties 2009-09-14 15:59:00 UTC (rev 1346)
+++ trunk/engine/src/main/resources/com/metamatrix/dqp/i18n.properties 2009-09-14 18:09:28 UTC (rev 1347)
@@ -362,8 +362,7 @@
ERR.018.005.0096 = There was an error in the response.
ERR.018.005.0097 = Exception trying to determine maximum number of code tables.
ERR.018.005.0098 = Exception trying to determine maximum record size of a code table.
-ERR.018.005.0099 = Unable to load code table because code table entries exceeds the allowed parameter - MaxCodeTables.
-ERR.018.005.0100 = Unable to load code table for requestID {0} of and nodeID of {1} because result sizes exceeds the allowed parameter - MaxCodeTableRecords.
+ERR.018.005.0100 = Unable to load code table for because result sizes exceeds the allowed parameter - {0}.
# services (003)
ERR.022.003.0001=
Modified: trunk/engine/src/test/java/org/teiid/dqp/internal/process/TestCodeTableCache.java
===================================================================
--- trunk/engine/src/test/java/org/teiid/dqp/internal/process/TestCodeTableCache.java 2009-09-14 15:59:00 UTC (rev 1346)
+++ trunk/engine/src/test/java/org/teiid/dqp/internal/process/TestCodeTableCache.java 2009-09-14 18:09:28 UTC (rev 1347)
@@ -26,6 +26,7 @@
import java.util.List;
import org.teiid.dqp.internal.process.CodeTableCache;
+import org.teiid.dqp.internal.process.CodeTableCache.CacheKey;
import org.teiid.dqp.internal.process.CodeTableCache.CacheState;
import junit.framework.TestCase;
@@ -57,10 +58,10 @@
}
private CodeTableCache setUpSampleCodeTable(boolean setDone) {
- CodeTableCache ctc = new CodeTableCache(10);
-
+ CodeTableCache ctc = new CodeTableCache(10, 10, 10);
+ assertEquals(CacheState.CACHE_NOT_EXIST, ctc.cacheExists("countrycode", "code", "country", TEST_CONTEXT)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// must set the requestToCacheKeyMap first
- int nodeId = ctc.createCacheRequest("countrycode", "code", "country", TEST_CONTEXT); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ CacheKey nodeId = ctc.createCacheRequest("countrycode", "code", "country", TEST_CONTEXT); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
List[] results = exampleResultObject();
// table/countrycode (keyElem/country, returnElem/code);
@@ -74,16 +75,18 @@
}
if(setDone) {
ctc.markCacheLoaded(nodeId);
+ } else {
+ ctc.errorLoadingCache(nodeId);
}
return ctc;
}
// Max = 1 and 1 table is set up
private CodeTableCache setUpSampleCodeTable2() {
- CodeTableCache ctc = new CodeTableCache(1);
-
+ CodeTableCache ctc = new CodeTableCache(1, 10, 10);
+ assertEquals(CacheState.CACHE_NOT_EXIST, ctc.cacheExists("countrycode", "code", "country", TEST_CONTEXT)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// must set the requestToCacheKeyMap first
- int nodeId = ctc.createCacheRequest("countrycode", "code", "country", TEST_CONTEXT); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ CacheKey nodeId = ctc.createCacheRequest("countrycode", "code", "country", TEST_CONTEXT); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
List[] results = exampleResultObject();
// table/countrycode (keyElem/country, returnElem/code);
@@ -100,7 +103,7 @@
}
public void testLookupValue() throws Exception {
- CodeTableCache ctc = setUpSampleCodeTable(false);
+ CodeTableCache ctc = setUpSampleCodeTable(true);
String code = (String) ctc.lookupValue("countrycode", "code", "country", "Germany", TEST_CONTEXT); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
assertEquals("Actual lookup value doesn't match with expected: ", code, "GM"); //$NON-NLS-1$ //$NON-NLS-2$
}
@@ -116,7 +119,7 @@
/** state = 1; loading state */
public void testCacheExists2() {
- CodeTableCache ctc = new CodeTableCache(10);
+ CodeTableCache ctc = new CodeTableCache(10, 10, 10);
ctc.cacheExists("countrycode", "code", "country", TEST_CONTEXT); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
CacheState actualState = ctc.cacheExists("countrycode", "code", "country", TEST_CONTEXT); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
@@ -190,10 +193,10 @@
}
public void testDuplicateKeyException() {
- CodeTableCache ctc = new CodeTableCache(1);
-
+ CodeTableCache ctc = new CodeTableCache(1, 10, 10);
+ assertEquals(CacheState.CACHE_NOT_EXIST, ctc.cacheExists("table", "key", "value", TEST_CONTEXT)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// must set the requestToCacheKeyMap first
- int nodeId = ctc.createCacheRequest("table", "key", "value", TEST_CONTEXT); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ CacheKey nodeId = ctc.createCacheRequest("table", "key", "value", TEST_CONTEXT); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
List[] results = new List[] {
Arrays.asList(1, 2),
Arrays.asList(1, 3),
@@ -206,5 +209,41 @@
assertEquals("Duplicate code table 'table' key 'value' value '1'", e.getMessage()); //$NON-NLS-1$
}
}
+
+ public void testMaxRecords() {
+ CodeTableCache ctc = new CodeTableCache(1, 1, 10);
+ assertEquals(CacheState.CACHE_NOT_EXIST, ctc.cacheExists("table", "key", "value", TEST_CONTEXT)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ // must set the requestToCacheKeyMap first
+ CacheKey nodeId = ctc.createCacheRequest("table", "key", "value", TEST_CONTEXT); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ List[] results = new List[] {
+ Arrays.asList(1, 2),
+ Arrays.asList(2, 3),
+ };
+
+ try {
+ ctc.loadTable(nodeId, results);
+ fail("expected exception"); //$NON-NLS-1$
+ } catch (MetaMatrixProcessingException e) {
+ assertEquals("Error Code:ERR.018.005.0100 Message:Unable to load code table for because result sizes exceeds the allowed parameter - CodeTables.maxRows.", e.getMessage()); //$NON-NLS-1$
+ }
+ }
+
+ public void testMaxRecordsPerTable() {
+ CodeTableCache ctc = new CodeTableCache(10, 10, 1);
+ assertEquals(CacheState.CACHE_NOT_EXIST, ctc.cacheExists("table", "key", "value", TEST_CONTEXT)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ // must set the requestToCacheKeyMap first
+ CacheKey nodeId = ctc.createCacheRequest("table", "key", "value", TEST_CONTEXT); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ List[] results = new List[] {
+ Arrays.asList(1, 2),
+ Arrays.asList(2, 3),
+ };
+
+ try {
+ ctc.loadTable(nodeId, results);
+ fail("expected exception"); //$NON-NLS-1$
+ } catch (MetaMatrixProcessingException e) {
+ assertEquals("Error Code:ERR.018.005.0100 Message:Unable to load code table for because result sizes exceeds the allowed parameter - CodeTables.maxRowsPerTable.", e.getMessage()); //$NON-NLS-1$
+ }
+ }
}
Modified: trunk/engine/src/test/java/org/teiid/dqp/internal/process/TestDataTierManager.java
===================================================================
--- trunk/engine/src/test/java/org/teiid/dqp/internal/process/TestDataTierManager.java 2009-09-14 15:59:00 UTC (rev 1346)
+++ trunk/engine/src/test/java/org/teiid/dqp/internal/process/TestDataTierManager.java 2009-09-14 18:09:28 UTC (rev 1347)
@@ -111,6 +111,7 @@
dataService,
vdbService,
bs,
+ 20,
1000,
1000);
command = helpGetCommand(sql, metadata);
16 years, 7 months
teiid SVN: r1346 - in trunk/build/kit-runtime/examples: dynamicvdb-portfolio and 1 other directory.
by teiid-commits@lists.jboss.org
Author: shawkins
Date: 2009-09-14 11:59:00 -0400 (Mon, 14 Sep 2009)
New Revision: 1346
Added:
trunk/build/kit-runtime/examples/dynamicvdb-portfolio/
trunk/build/kit-runtime/examples/dynamicvdb-portfolio/dynamic.def
Removed:
trunk/build/kit-runtime/examples/dynamicvdb-portfolio/vdbless.def
trunk/build/kit-runtime/examples/vdbless-portfolio/
Modified:
trunk/build/kit-runtime/examples/dynamicvdb-portfolio/README.txt
Log:
TEIID-684 changing example to be name dynamic rather than vdbless
Copied: trunk/build/kit-runtime/examples/dynamicvdb-portfolio (from rev 1344, trunk/build/kit-runtime/examples/vdbless-portfolio)
Modified: trunk/build/kit-runtime/examples/dynamicvdb-portfolio/README.txt
===================================================================
--- trunk/build/kit-runtime/examples/vdbless-portfolio/README.txt 2009-09-11 21:42:50 UTC (rev 1344)
+++ trunk/build/kit-runtime/examples/dynamicvdb-portfolio/README.txt 2009-09-14 15:59:00 UTC (rev 1346)
@@ -1,11 +1,11 @@
Follow the same derby setup instructions as the portfolio example.
-Copy the vdbless.def file to the <teiid home>/deploy directory.
+Copy the dynamic.def file to the <teiid home>/deploy directory.
Use the simple client example run script i.e.
-$run.sh vdblessportfolio "select * from product, price where product.symbol=price.symbol"
+$run.sh dynamicportfolio "select * from product, price where product.symbol=price.symbol"
That will execute the query against both Derby and the text file using the
-vdbless connector supplied metadata running in Teiid embedded mode.
+connector supplied metadata running in Teiid embedded mode.
Added: trunk/build/kit-runtime/examples/dynamicvdb-portfolio/dynamic.def
===================================================================
--- trunk/build/kit-runtime/examples/dynamicvdb-portfolio/dynamic.def (rev 0)
+++ trunk/build/kit-runtime/examples/dynamicvdb-portfolio/dynamic.def 2009-09-14 15:59:00 UTC (rev 1346)
@@ -0,0 +1,71 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<VDB>
+ <VDBInfo>
+<!--
+ Name and version will determine the deploy location, which is of the form
+ <Teiid Home>/deploy/<vdb name>/<vdb version>
+-->
+ <Property Name="Name" Value="DynamicPortfolio" />
+ <Property Name="Version" Value="1" />
+<!--
+ Setting to use connector supplied metadata. Can be "true" or "cached".
+ "true" will obtain metadata once for every launch of Teiid.
+ "cached" will save a file containing the metadata into
+ the deploy/<vdb name>/<vdb version/META-INF directory
+-->
+ <Property Name="UseConnectorMetadata" Value="cached" />
+ </VDBInfo>
+
+<!--
+ Each model represents a access to one or more connector bindings.
+ The name of the model will be used as a top level schema name
+ for all of the metadata imported from the connector.
+
+ NOTE: Multiple model, with different import settings, can be bound to
+ the same connector binding and will be treated as the same source at
+ runtime.
+-->
+ <Model>
+ <Property Name="Name" Value="MarketData" />
+ <ConnectorBindings>
+ <Connector Name="Text Connector" />
+ </ConnectorBindings>
+ </Model>
+ <Model>
+ <Property Name="Name" Value="Accounts" />
+
+ <ConnectorBindings>
+ <Connector Name="Derby Connector" />
+ </ConnectorBindings>
+
+<!--
+ JDBC Import settings
+
+ importer.useFullSchemaName directs the importer to drop the source
+ schema from the Teiid object name, so that the Teiid fully qualified name
+ will be in the form of <model name>.<table name>
+-->
+ <Property Name="importer.useFullSchemaName" Value="false"/>
+ </Model>
+
+<!--
+ Connector bindings follow the ComponentTypes defined in <Teiid home>/deploy/configuration.xml
+ Defining connector bindings in the .def file makes them local to this VDB.
+ Connector bindings can also be defined in the configuration.xml file after the ComponentDefinitions
+ and will be available for use by all vdbs.
+-->
+ <ConnectorBindings>
+ <Connector Name="Text Connector" ComponentType="Text File Connector">
+ <Properties>
+ <Property Name="Immutable">true</Property>
+ <Property Name="DescriptorFile">${teiid.home}/examples/portfolio/marketdata-def.txt</Property>
+ </Properties>
+ </Connector>
+ <Connector Name="Derby Connector" ComponentType="Apache Derby Network Connector">
+ <Properties>
+ <Property Name="URL">jdbc:derby://localhost:1527/teiid/accounts</Property>
+ <Property Name="ConnectorClassPath">extensionjar:derbyclient.jar</Property>
+ </Properties>
+ </Connector>
+ </ConnectorBindings>
+</VDB>
\ No newline at end of file
Deleted: trunk/build/kit-runtime/examples/dynamicvdb-portfolio/vdbless.def
===================================================================
--- trunk/build/kit-runtime/examples/vdbless-portfolio/vdbless.def 2009-09-11 21:42:50 UTC (rev 1344)
+++ trunk/build/kit-runtime/examples/dynamicvdb-portfolio/vdbless.def 2009-09-14 15:59:00 UTC (rev 1346)
@@ -1,71 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<VDB>
- <VDBInfo>
-<!--
- Name and version will determine the deploy location, which is of the form
- <Teiid Home>/deploy/<vdb name>/<vdb version>
--->
- <Property Name="Name" Value="VDBLessPortfolio" />
- <Property Name="Version" Value="1" />
-<!--
- Setting to use connector supplied metadata. Can be "true" or "cached".
- "true" will obtain metadata once for every launch of Teiid.
- "cached" will save a file containing the metadata into
- the deploy/<vdb name>/<vdb version/META-INF directory
--->
- <Property Name="UseConnectorMetadata" Value="cached" />
- </VDBInfo>
-
-<!--
- Each model represents a access to one or more connector bindings.
- The name of the model will be used as a top level schema name
- for all of the metadata imported from the connector.
-
- NOTE: Multiple model, with different import settings, can be bound to
- the same connector binding and will be treated as the same source at
- runtime.
--->
- <Model>
- <Property Name="Name" Value="MarketData" />
- <ConnectorBindings>
- <Connector Name="Text Connector" />
- </ConnectorBindings>
- </Model>
- <Model>
- <Property Name="Name" Value="Accounts" />
-
- <ConnectorBindings>
- <Connector Name="Derby Connector" />
- </ConnectorBindings>
-
-<!--
- JDBC Import settings
-
- importer.useFullSchemaName directs the importer to drop the source
- schema from the Teiid object name, so that the Teiid fully qualified name
- will be in the form of <model name>.<table name>
--->
- <Property Name="importer.useFullSchemaName" Value="false"/>
- </Model>
-
-<!--
- Connector bindings follow the ComponentTypes defined in <Teiid home>/deploy/configuration.xml
- Defining connector bindings in the .def file makes them local to this VDB.
- Connector bindings can also be defined in the configuration.xml file after the ComponentDefinitions
- and will be available for use by all vdbs.
--->
- <ConnectorBindings>
- <Connector Name="Text Connector" ComponentType="Text File Connector">
- <Properties>
- <Property Name="Immutable">true</Property>
- <Property Name="DescriptorFile">${teiid.home}/examples/portfolio/marketdata-def.txt</Property>
- </Properties>
- </Connector>
- <Connector Name="Derby Connector" ComponentType="Apache Derby Network Connector">
- <Properties>
- <Property Name="URL">jdbc:derby://localhost:1527/teiid/accounts</Property>
- <Property Name="ConnectorClassPath">extensionjar:derbyclient.jar</Property>
- </Properties>
- </Connector>
- </ConnectorBindings>
-</VDB>
\ No newline at end of file
16 years, 7 months