Hibernate SVN: r10873 - branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/cfg/reveng/dialect
by hibernate-commits@lists.jboss.org
Author: max.andersen(a)jboss.com
Date: 2006-11-28 09:09:01 -0500 (Tue, 28 Nov 2006)
New Revision: 10873
Modified:
branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/cfg/reveng/dialect/OracleMetaDataDialect.java
Log:
HBX-817 Use PreparedStatements in OracleMetaDialect
Modified: branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/cfg/reveng/dialect/OracleMetaDataDialect.java
===================================================================
--- branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/cfg/reveng/dialect/OracleMetaDataDialect.java 2006-11-28 10:39:32 UTC (rev 10872)
+++ branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/cfg/reveng/dialect/OracleMetaDataDialect.java 2006-11-28 14:09:01 UTC (rev 10873)
@@ -1,9 +1,10 @@
package org.hibernate.cfg.reveng.dialect;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
-import java.sql.Statement;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
@@ -11,167 +12,314 @@
import org.hibernate.mapping.Table;
/**
- * Oracle Specialised MetaData dialect that uses standard JDBC
- * and querys on the Data Dictionary for reading metadata.
+ * Oracle Specialised MetaData dialect that uses standard JDBC and querys on the
+ * Data Dictionary for reading metadata.
*
* @author David Channon
+ * @author Eric Kershner (added preparedstatements HBX-817)
+ *
*/
+
public class OracleMetaDataDialect extends AbstractMetaDataDialect {
+ public OracleMetaDataDialect() {
+ super();
+ }
- public Iterator getTables(final String catalog, final String schema, String table) {
- try {
+ /* ******* TABLE QUERIES ******* */
+ private static final String SQL_TABLE_BASE =
+ "select a.table_name, a.owner, b.comments, 'TABLE' "
+ + "from all_tables a left join all_tab_comments b "
+ + "on (a.owner=b.owner and a.table_name=b.table_name) ";
+
+ private static final String SQL_TABLE_VIEW =
+ " union all select view_name, owner, NULL, 'VIEW' from all_views ";
+
+ private static final String SQL_TABLE_NONE = SQL_TABLE_BASE + SQL_TABLE_VIEW;
+
+ private static final String SQL_TABLE_SCHEMA = SQL_TABLE_BASE
+ + "where a.owner = ? " + SQL_TABLE_VIEW + " where owner = ?";
+
+ private static final String SQL_TABLE_TABLE = SQL_TABLE_BASE
+ + "where a.table_name = ?" + SQL_TABLE_VIEW + "where view_name = ?";
+
+ private static final String SQL_TABLE_SCHEMA_AND_TABLE = SQL_TABLE_BASE
+ + "where a.owner = ? and a.table_name = ?" + SQL_TABLE_VIEW
+ + "where owner = ? and view_name = ?";
+
+ private PreparedStatement prepTableNone;
+
+ private PreparedStatement prepTableSchema;
+
+ private PreparedStatement prepTableTable;
+
+ private PreparedStatement prepTableSchemaAndTable;
+
+ /* ***************************** */
+ /* ******* INDEX QUERIES ******* */
+ /* ***************************** */
+ private static final String SQL_INDEX_BASE = "select a.column_name, "
+ + "decode(b.uniqueness,'UNIQUE','false','true'), "
+ + "a.index_owner, a.index_name, a.table_name "
+ + "from all_ind_columns a left join all_indexes b on "
+ + "(a.table_name = b.table_name "
+ + " AND a.table_owner = b.table_owner "
+ + " AND a.index_name = b.index_name) ";
+
+ private static final String SQL_INDEX_ORDER = " order by a.table_name, a.column_position";
+
+ private static final String SQL_INDEX_NONE = SQL_INDEX_BASE
+ + SQL_INDEX_ORDER;
+
+ private static final String SQL_INDEX_SCHEMA = SQL_INDEX_BASE
+ + "where a.table_owner = ? " + SQL_INDEX_ORDER;
+
+ private static final String SQL_INDEX_TABLE = SQL_INDEX_BASE
+ + "where a.table_name = ? " + SQL_INDEX_ORDER;
+
+ private static final String SQL_INDEX_SCHEMA_AND_TABLE = SQL_INDEX_BASE
+ + "where a.table_owner = ? and a.table_name = ? " + SQL_INDEX_ORDER;
+
+ private PreparedStatement prepIndexNone;
+
+ private PreparedStatement prepIndexSchema;
+
+ private PreparedStatement prepIndexTable;
+
+ private PreparedStatement prepIndexSchemaAndTable;
+
+ /* ****** COLUMN QUERIES ******* */
+ private static final String SQL_COLUMN_BASE = "select a.column_name as COLUMN_NAME, a.owner as TABLE_SCHEM, "
+ + "decode(a.nullable,'N',0,1) as NULLABLE, "
+ + "decode(a.data_type, 'FLOAT',decode(a.data_precision,null, "
+ + "a.data_length, a.data_precision), 'NUMBER', decode(a.data_precision,null, "
+ + "a.data_length, a.data_precision), a.data_length) as COLUMN_SIZE, "
+ + "decode(a.data_type,'CHAR',1, 'DATE',91, 'FLOAT',6, "
+ + "'LONG',-1, 'NUMBER',2, 'VARCHAR2',12, 'BFILE',-13, "
+ + "'BLOB',2004, 'CLOB',2005, 'MLSLABEL',1111, 'NCHAR',1, 'NCLOB',2005, 'NVARCHAR2',12, "
+ + "'RAW',-3, 'ROWID',1111, 'UROWID',1111, 'LONG RAW', -4, "
+ + "'TIMESTAMP', 93, 'XMLTYPE',2005, 1111) as DATA_TYPE, "
+ + "a.table_name as TABLE_NAME, a.data_type as TYPE_NAME, "
+ + "decode(a.data_scale, null, 0 ,a.data_scale) as DECIMAL_DIGITS, b.comments "
+ + "from all_tab_columns a left join all_col_comments b on "
+ + "(a.owner=b.owner and a.table_name=b.table_name and a.column_name=b.column_name) ";
+
+ private static final String SQL_COLUMN_ORDER = " order by column_id ";
+
+ private static final String SQL_COLUMN_NONE = SQL_COLUMN_BASE
+ + SQL_COLUMN_ORDER;
+
+ private static final String SQL_COLUMN_SCHEMA = SQL_COLUMN_BASE
+ + "where a.owner = ? " + SQL_COLUMN_ORDER;
+
+ private static final String SQL_COLUMN_TABLE = SQL_COLUMN_BASE
+ + "where a.table_name = ? " + SQL_COLUMN_ORDER;
+
+ private static final String SQL_COLUMN_COLUMN = SQL_COLUMN_BASE
+ + "where a.column_name = ? " + SQL_COLUMN_ORDER;
+
+ private static final String SQL_COLUMN_SCHEMA_AND_TABLE = SQL_COLUMN_BASE
+ + "where a.owner = ? and a.table_name = ? " + SQL_COLUMN_ORDER;
+
+ private static final String SQL_COLUMN_SCHEMA_AND_COLUMN = SQL_COLUMN_BASE
+ + "where a.owner = ? and a.column_name = ? " + SQL_COLUMN_ORDER;
+
+ private static final String SQL_COLUMN_TABLE_AND_COLUMN = SQL_COLUMN_BASE
+ + "where a.table_name = ? and a.column_name = ? "
+ + SQL_COLUMN_ORDER;
+
+ private static final String SQL_COLUMN_SCHEMA_AND_TABLE_AND_COLUMN = SQL_COLUMN_BASE
+ + "where a.owner = ? and a.table_name = ? and a.column_name = ? "
+ + SQL_COLUMN_ORDER;
+
+ private PreparedStatement prepColumnNone;
+
+ private PreparedStatement prepColumnSchema;
+
+ private PreparedStatement prepColumnTable;
+
+ private PreparedStatement prepColumnColumn;
+
+ private PreparedStatement prepColumnSchemaAndTable;
+
+ private PreparedStatement prepColumnSchemaAndColumn;
+
+ private PreparedStatement prepColumnTableAndColumn;
+
+ private PreparedStatement prepColumnSchemaAndTableAndColumn;
+
+ /* ***************************** */
+ /* ******** PK QUERIES ********* */
+ /* ***************************** */
+ private static final String SQL_PK_BASE = "select c.table_name, c.column_name, c.position, c.constraint_name, "
+ + "c.owner from all_cons_columns c left join all_constraints k on "
+ + "(k.owner = c.owner AND k.table_name = c.table_name AND k.constraint_name = c.constraint_name) "
+ + "where k.constraint_type = 'P' ";
+
+ private static final String SQL_PK_ORDER = " order by c.table_name, c.constraint_name, c.position desc ";
+
+ private static final String SQL_PK_NONE = SQL_PK_BASE + SQL_PK_ORDER;
+
+ private static final String SQL_PK_SCHEMA = SQL_PK_BASE
+ + " and c.owner = ? " + SQL_PK_ORDER;
+
+ private static final String SQL_PK_TABLE = SQL_PK_BASE
+ + " and c.table_name = ? " + SQL_PK_ORDER;
+
+ private static final String SQL_PK_SCHEMA_AND_TABLE = SQL_PK_BASE
+ + " and c.owner = ? and c.table_name = ? " + SQL_PK_ORDER;
+
+ private PreparedStatement prepPkNone;
+
+ private PreparedStatement prepPkSchema;
+
+ private PreparedStatement prepPkTable;
+
+ private PreparedStatement prepPkSchemaAndTable;
+
+ /* ***************************** */
+ /* ******** FK QUERIES ********* */
+ /* ***************************** */
+ private static final String SQL_FK_BASE = "select p.table_name, p.owner, f.owner, f.table_name, "
+ + "fc.column_name, pc.column_name, f.constraint_name, fc.position "
+ + "from all_constraints p left join all_cons_columns pc on "
+ + "(pc.owner = p.owner and pc.constraint_name = p.constraint_name and pc.table_name = p.table_name) "
+ + "left join all_constraints f on (p.owner = f.r_owner and p.constraint_name = f.r_constraint_name) "
+ + "left join all_cons_columns fc on "
+ + "(fc.owner = f.owner and fc.constraint_name = f.constraint_name and fc.table_name = f.table_name and"
+ + " fc.position = pc.position) where f.constraint_type = 'R' AND p.constraint_type = 'P' ";
+
+ private static final String SQL_FK_ORDER = " order by f.table_name, f.constraint_name, fc.position ";
+
+ private static final String SQL_FK_NONE = SQL_FK_BASE + SQL_FK_ORDER;
+
+ private static final String SQL_FK_SCHEMA = SQL_FK_BASE
+ + " and p.owner = ? " + SQL_FK_ORDER;
+
+ private static final String SQL_FK_TABLE = SQL_FK_BASE
+ + " and p.table_name = ? " + SQL_FK_ORDER;
+
+ private static final String SQL_FK_SCHEMA_AND_TABLE = SQL_FK_BASE
+ + " and p.owner = ? and p.table_name = ? " + SQL_FK_ORDER;
+
+ private PreparedStatement prepFkNone;
+
+ private PreparedStatement prepFkSchema;
+
+ private PreparedStatement prepFkTable;
+
+ private PreparedStatement prepFkSchemaAndTable;
+
+ public Iterator getTables(final String catalog, final String schema,
+ String table) {
+ try {
log.debug("getTables(" + catalog + "." + schema + "." + table + ")");
- // Collect both Tables and Views from the 'ALL' data dicitonary tables.
- // Note: This will potentally collect more tables that the jdbc meta data
- Statement stmt = this.getConnection().createStatement();
- StringBuffer query = new StringBuffer();
- query.append("select table_name, owner, 'TABLE' from all_tables ");
- if (schema != null || table != null)
- query.append("where ");
- if (schema != null) {
- query.append("owner='" + schema + "' ");
- }
- if (table != null) {
- if (schema != null)
- query.append("and ");
- query.append("table_name = '" + table + "' ");
- }
- query.append("union all ");
- query.append("select view_name, owner, 'VIEW' from all_views ");
- if (schema != null || table != null)
- query.append("where ");
- if (schema != null) {
- query.append("owner='" + schema + "' ");
- }
- if (table != null) {
- if (schema != null)
- query.append("and ");
- query.append("view_name = '" + table + "' ");
- }
- if (log.isDebugEnabled())
- log.debug("getTables Query:" + query.toString());
- ResultSet tableRs = stmt.executeQuery(query.toString());
-
- return new ResultSetIterator(stmt, tableRs, getSQLExceptionConverter()) {
+ ResultSet tableRs;
+ tableRs = getTableResultSet( schema, table );
+
+ return new ResultSetIterator(null, tableRs,
+ getSQLExceptionConverter()) {
+
+ Map element = new HashMap();
+
- Map element = new HashMap();
- protected Object convertRow(ResultSet tableRs) throws SQLException {
+ protected Object convertRow(ResultSet tableRs)
+ throws SQLException {
element.clear();
element.put("TABLE_NAME", tableRs.getString(1));
element.put("TABLE_SCHEM", tableRs.getString(2));
element.put("TABLE_CAT", null);
- element.put("TABLE_TYPE", tableRs.getString(3));
- element.put("REMARKS", null);
- return element;
+ element.put("TABLE_TYPE", tableRs.getString(4));
+ element.put("REMARKS", tableRs.getString(3));
+ log.info( element );
+ return element;
}
+
protected Throwable handleSQLException(SQLException e) {
- // schemaRs and catalogRs are only used for error reporting if
+ // schemaRs and catalogRs are only used for error reporting
+ // if
// we get an exception
- String databaseStructure = getDatabaseStructure( catalog, schema );
- throw getSQLExceptionConverter().convert( e,
+ String databaseStructure = getDatabaseStructure(catalog,
+ schema);
+ throw getSQLExceptionConverter().convert(
+ e,
"Could not get list of tables from database. Probably a JDBC driver problem. "
- + databaseStructure, null );
+ + databaseStructure, null);
}
};
} catch (SQLException e) {
- // schemaRs and catalogRs are only used for error reporting if we get an exception
- String databaseStructure = getDatabaseStructure(catalog,schema);
- throw getSQLExceptionConverter().convert(e, "Could not get list of tables from database. Probably a JDBC driver problem. " + databaseStructure, null);
- }
+ // schemaRs and catalogRs are only used for error reporting if we
+ // get an exception
+ String databaseStructure = getDatabaseStructure(catalog, schema);
+ throw getSQLExceptionConverter().convert(
+ e,
+ "Could not get list of tables from database. Probably a JDBC driver problem. "
+ + databaseStructure, null);
+ }
}
+
+ public Iterator getIndexInfo(final String catalog, final String schema,
+ final String table) {
+ try {
+ log.debug("getIndexInfo(" + catalog + "." + schema + "." + table + ")");
- public Iterator getIndexInfo(final String catalog, final String schema, final String table) {
- try {
- log.debug("getIndexInfo(" + catalog + "." + schema + "." + table + ")");
- // Collect both Indexes from the 'ALL' data dicitonary table.
- // It is assumed that atleast the TABLE name is supplied.
- Statement stmt = this.getConnection().createStatement();
- StringBuffer query = new StringBuffer();
-
- query.append("select a.column_name, decode(b.uniqueness,'UNIQUE','false','true'), a.index_owner, a.index_name, a.table_name ");
- query.append("from all_ind_columns a, all_indexes b ");
- query.append("where a.table_name = b.table_name ");
- query.append("AND a.table_owner = b.table_owner ");
- query.append("AND a.index_name = b.index_name ");
- if (schema != null) {
- query.append("AND a.table_owner='" + schema + "' ");
- }
- query.append("AND a.table_name = '" + table + "' ");
- query.append("order by a.table_name, a.column_position ");
-
- if (log.isDebugEnabled())
- log.debug("getIndexInfo Query:" + query.toString());
-
- ResultSet indexRs = stmt.executeQuery(query.toString());
-
- return new ResultSetIterator(stmt, indexRs, getSQLExceptionConverter()) {
+ ResultSet indexRs;
+ indexRs = getIndexInfoResultSet( schema, table );
+
+ return new ResultSetIterator(null, indexRs,
+ getSQLExceptionConverter()) {
+
+ Map element = new HashMap();
+
- Map element = new HashMap();
protected Object convertRow(ResultSet rs) throws SQLException {
element.clear();
element.put("COLUMN_NAME", rs.getString(1));
- element.put("TYPE", new Short((short)1)); // CLUSTERED INDEX
- element.put("NON_UNIQUE", Boolean.valueOf( rs.getString(2) ));
+ element.put("TYPE", new Short((short) 1)); // CLUSTERED
+ // INDEX
+ element.put("NON_UNIQUE", Boolean.valueOf(rs.getString(2)));
element.put("TABLE_SCHEM", rs.getString(3));
- element.put("INDEX_NAME", rs.getString(4));
+ element.put("INDEX_NAME", rs.getString(4));
element.put("TABLE_CAT", null);
element.put("TABLE_NAME", rs.getString(5));
- return element;
+ return element;
}
+
protected Throwable handleSQLException(SQLException e) {
- throw getSQLExceptionConverter().convert(e, "Exception while getting index info for " + Table.qualify(catalog, schema, table), null);
+ throw getSQLExceptionConverter().convert(
+ e,
+ "Exception while getting index info for "
+ + Table.qualify(catalog, schema, table),
+ null);
}
};
} catch (SQLException e) {
- throw getSQLExceptionConverter().convert(e, "Exception while getting index info for " + Table.qualify(catalog, schema, table), null);
- }
+ throw getSQLExceptionConverter().convert(
+ e,
+ "Exception while getting index info for "
+ + Table.qualify(catalog, schema, table), null);
+ }
}
- public Iterator getColumns(final String catalog, final String schema, final String table, String column) {
- try {
+ public Iterator getColumns(final String catalog, final String schema,
+ final String table, String column) {
+
+ try {
log.debug("getColumns(" + catalog + "." + schema + "." + table + "." + column + ")");
- // Collect Columns from the 'ALL' data dicitonary table.
- // A decode is used to map the type name to the JDBC Type ID
- Statement stmt = this.getConnection().createStatement();
- StringBuffer query = new StringBuffer();
- query.append("select column_name as COLUMN_NAME, owner as TABLE_SCHEM, decode(nullable,'N',0,1) as NULLABLE, ");
- query.append("decode(data_type, 'FLOAT',decode(data_precision,null, data_length, data_precision), 'NUMBER', decode(data_precision,null, data_length, data_precision), data_length) as COLUMN_SIZE, ");
- query.append("decode(data_type,'CHAR',1, 'DATE',91, 'FLOAT',6, 'LONG',-1, 'NUMBER',2, 'VARCHAR2',12, 'BFILE',-13, ");
- query.append("'BLOB',2004, 'CLOB',2005, 'MLSLABEL',1111, 'NCHAR',1, 'NCLOB',2005, 'NVARCHAR2',12, ");
- query.append("'RAW',-3, 'ROWID',1111, 'UROWID',1111, 'LONG RAW', -4, 'TIMESTAMP', 93, 'XMLTYPE',2005, 1111) as DATA_TYPE, ");
- query.append("table_name as TABLE_NAME, data_type as TYPE_NAME, decode(data_scale, null, 0 ,data_scale) as DECIMAL_DIGITS ");
- query.append("from all_tab_columns ");
- if (schema != null || table != null || column != null)
- query.append("where ");
- if (schema != null) {
- query.append("owner='" + schema + "' ");
- }
- if (table != null) {
- if (schema != null)
- query.append("and ");
- query.append("table_name = '" + table + "' ");
- }
- if (column != null) {
- if (schema != null || table != null)
- query.append("and ");
- query.append("column_name = '" + column + "' ");
- }
- query.append("order by column_id ");
-
- if (log.isDebugEnabled())
- log.debug("getIndexInfo Query:" + query.toString());
-
- ResultSet columnRs = stmt.executeQuery(query.toString());
-
- return new ResultSetIterator(stmt, columnRs, getSQLExceptionConverter()) {
+ ResultSet columnRs;
+ columnRs = getColumnsResultSet( schema, table, column );
+
+ return new ResultSetIterator(null, columnRs,
+ getSQLExceptionConverter()) {
+
+ Map element = new HashMap();
+
- Map element = new HashMap();
protected Object convertRow(ResultSet rs) throws SQLException {
element.clear();
element.put("COLUMN_NAME", rs.getString(1));
@@ -181,49 +329,44 @@
element.put("DATA_TYPE", new Integer(rs.getInt(5)));
element.put("TABLE_NAME", rs.getString(6));
element.put("TYPE_NAME", rs.getString(7));
- element.put("DECIMAL_DIGITS", new Integer(rs.getInt(8)));
+ element.put("DECIMAL_DIGITS", new Integer(rs.getInt(8)));
element.put("TABLE_CAT", null);
- element.put("REMARKS", null);
+ element.put("REMARKS", rs.getString(9));
return element;
}
+
protected Throwable handleSQLException(SQLException e) {
- throw getSQLExceptionConverter().convert(e, "Error while reading column meta data for " + Table.qualify(catalog, schema, table), null);
+ throw getSQLExceptionConverter().convert(
+ e,
+ "Error while reading column meta data for "
+ + Table.qualify(catalog, schema, table),
+ null);
}
};
} catch (SQLException e) {
- throw getSQLExceptionConverter().convert(e, "Error while reading column meta data for " + Table.qualify(catalog, schema, table), null);
- }
+ throw getSQLExceptionConverter().convert(
+ e,
+ "Error while reading column meta data for "
+ + Table.qualify(catalog, schema, table), null);
+ }
}
- public Iterator getPrimaryKeys(final String catalog, final String schema, final String table) {
- try {
- log.debug("getPrimaryKeys(" + catalog + "." + schema + "." + table + ")");
- // Collect PrimaryKeys from the 'ALL' data dicitonary tables.
- Statement stmt = this.getConnection().createStatement();
- StringBuffer query = new StringBuffer();
-
- query.append("select c.table_name, c.column_name, c.position, c.constraint_name, c.owner ");
- query.append("from all_cons_columns c, all_constraints k ");
- query.append("where k.constraint_type = 'P' ");
- query.append("AND k.constraint_name = c.constraint_name ");
- query.append("AND k.table_name = c.table_name ");
- query.append("AND k.owner = c.owner ");
- if (schema != null) {
- query.append("AND k.owner='" + schema + "' ");
- }
- if (table != null) {
- query.append("AND k.table_name = '" + table + "' ");
- }
- query.append("order by c.table_name, c.constraint_name, c.position desc ");
-
- if (log.isDebugEnabled())
- log.debug("getPrimaryKeys Query:" + query.toString());
-
- ResultSet pkeyRs = stmt.executeQuery(query.toString());
-
- return new ResultSetIterator(stmt, pkeyRs, getSQLExceptionConverter()) {
+ public Iterator getPrimaryKeys(final String catalog, final String schema,
+ final String table) {
+
+ try {
+ log.debug("getPrimaryKeys(" + catalog + "." + schema + "." + table
+ + ")");
+
+ ResultSet pkeyRs;
+ pkeyRs = getPrimaryKeysResultSet( schema, table );
+
+ return new ResultSetIterator(null, pkeyRs,
+ getSQLExceptionConverter()) {
+
+ Map element = new HashMap();
+
- Map element = new HashMap();
protected Object convertRow(ResultSet rs) throws SQLException {
element.clear();
element.put("TABLE_NAME", rs.getString(1));
@@ -232,75 +375,314 @@
element.put("PK_NAME", rs.getString(4));
element.put("TABLE_SCHEM", rs.getString(5));
element.put("TABLE_CAT", null);
- return element;
+ return element;
}
+
protected Throwable handleSQLException(SQLException e) {
- throw getSQLExceptionConverter().convert(e, "Error while reading primary key meta data for " + Table.qualify(catalog, schema, table), null);
+ throw getSQLExceptionConverter().convert(
+ e,
+ "Error while reading primary key meta data for "
+ + Table.qualify(catalog, schema, table),
+ null);
}
};
} catch (SQLException e) {
- throw getSQLExceptionConverter().convert(e, "Error while reading primary key meta data for " + Table.qualify(catalog, schema, table), null);
- }
+ throw getSQLExceptionConverter().convert(
+ e,
+ "Error while reading primary key meta data for "
+ + Table.qualify(catalog, schema, table), null);
+ }
}
- public Iterator getExportedKeys(final String catalog, final String schema, final String table) {
- try {
- log.debug("getExportedKeys(" + catalog + "." + schema + "." + table + ")");
- // Collect ExportedKeys from the 'ALL' data dicitonary tables.
+ public Iterator getExportedKeys(final String catalog, final String schema,
+ final String table) {
+
+ try {
+ log.debug("getExportedKeys(" + catalog + "." + schema + "." + table
+ + ")");
- Statement stmt = this.getConnection().createStatement();
- StringBuffer query = new StringBuffer();
+ ResultSet pExportRs;
+ pExportRs = getExportedKeysResultSet( schema, table );
- query.append("select p.table_name, p.owner, f.owner, f.table_name, fc.column_name, pc.column_name, f.constraint_name, fc.position ");
- query.append("from all_cons_columns pc, all_constraints p, all_cons_columns fc, all_constraints f ");
- query.append("where f.constraint_type = 'R' ");
- query.append("AND p.owner = f.r_owner ");
- query.append("AND p.constraint_name = f.r_constraint_name ");
- query.append("AND p.constraint_type = 'P' ");
- query.append("AND pc.owner = p.owner ");
- query.append("AND pc.constraint_name = p.constraint_name ");
- query.append("AND pc.table_name = p.table_name ");
- query.append("AND fc.owner = f.owner ");
- query.append("AND fc.constraint_name = f.constraint_name ");
- query.append("AND fc.table_name = f.table_name ");
- query.append("AND fc.position = pc.position ");
- if (schema != null) {
- query.append("AND p.owner='" + schema + "' ");
- }
- if (table != null) {
- query.append("AND p.table_name = '" + table + "' ");
- }
- query.append("order by f.table_name, f.constraint_name, fc.position ");
-
- if (log.isDebugEnabled())
- log.debug("getExportedKeys Query:" + query.toString());
-
- ResultSet pExportRs = stmt.executeQuery(query.toString());
-
- return new ResultSetIterator(stmt, pExportRs, getSQLExceptionConverter()) {
+ return new ResultSetIterator(null, pExportRs,
+ getSQLExceptionConverter()) {
+
+ Map element = new HashMap();
+
- Map element = new HashMap();
protected Object convertRow(ResultSet rs) throws SQLException {
element.clear();
- element.put( "PKTABLE_NAME", rs.getString(1));
- element.put( "PKTABLE_SCHEM", rs.getString(2));
- element.put( "PKTABLE_CAT", null);
- element.put( "FKTABLE_CAT", null);
- element.put( "FKTABLE_SCHEM",rs.getString(3));
- element.put( "FKTABLE_NAME", rs.getString(4));
- element.put( "FKCOLUMN_NAME", rs.getString(5));
- element.put( "PKCOLUMN_NAME", rs.getString(6));
- element.put( "FK_NAME", rs.getString(7));
- element.put( "KEY_SEQ", new Short(rs.getShort(8)));
- return element;
+ element.put("PKTABLE_NAME", rs.getString(1));
+ element.put("PKTABLE_SCHEM", rs.getString(2));
+ element.put("PKTABLE_CAT", null);
+ element.put("FKTABLE_CAT", null);
+ element.put("FKTABLE_SCHEM", rs.getString(3));
+ element.put("FKTABLE_NAME", rs.getString(4));
+ element.put("FKCOLUMN_NAME", rs.getString(5));
+ element.put("PKCOLUMN_NAME", rs.getString(6));
+ element.put("FK_NAME", rs.getString(7));
+ element.put("KEY_SEQ", new Short(rs.getShort(8)));
+ return element;
}
+
protected Throwable handleSQLException(SQLException e) {
- throw getSQLExceptionConverter().convert(e, "Error while reading exported keys meta data for " + Table.qualify(catalog, schema, table), null);
+ throw getSQLExceptionConverter().convert(
+ e,
+ "Error while reading exported keys meta data for "
+ + Table.qualify(catalog, schema, table),
+ null);
}
};
} catch (SQLException e) {
- throw getSQLExceptionConverter().convert(e, "Error while reading exported keys meta data for " + Table.qualify(catalog, schema, table), null);
- }
+ throw getSQLExceptionConverter().convert(
+ e,
+ "Error while reading exported keys meta data for "
+ + Table.qualify(catalog, schema, table), null);
+ }
+ }
+
+ public void close() {
+ try {
+ prepTableNone = close( prepTableNone );
+ prepTableSchema = close( prepTableSchema );
+ prepTableTable = close( prepTableTable );
+ prepTableSchemaAndTable = close( prepTableSchemaAndTable );
+ prepIndexNone = close( prepIndexNone );
+ prepIndexSchema = close( prepIndexSchema );
+ prepIndexTable = close( prepIndexTable );
+ prepIndexSchemaAndTable = close( prepIndexSchemaAndTable );
+ prepColumnNone = close( prepColumnNone );
+ prepColumnSchema = close( prepColumnSchema );
+ prepColumnTable = close( prepColumnTable );
+ prepColumnColumn = close( prepColumnColumn );
+ prepColumnSchemaAndTable = close( prepColumnSchemaAndTable );
+ prepColumnSchemaAndColumn = close( prepColumnSchemaAndColumn );
+ prepColumnTableAndColumn = close( prepColumnTableAndColumn );
+ prepColumnSchemaAndTableAndColumn = close( prepColumnSchemaAndTableAndColumn );
+ prepPkNone = close( prepPkNone );
+ prepPkSchema = close( prepPkSchema );
+ prepPkTable = close( prepPkTable );
+ prepPkSchemaAndTable = close( prepPkSchemaAndTable );
+ prepFkNone = close( prepFkNone );
+ prepFkSchema = close( prepFkSchema );
+ prepFkTable = close( prepFkTable );
+ prepFkSchemaAndTable = close( prepFkSchemaAndTable );
+ }
+ finally {
+ super.close();
+ }
}
+
+ private PreparedStatement close(PreparedStatement ps) {
+ if(ps==null) {
+ return null;
+ } else {
+ try {
+ ps.close();
+ }
+ catch (SQLException e) {
+ throw getSQLExceptionConverter().convert(e,
+ "Problem while closing prepared statement", null);
+ }
+ return null;
+ }
+
+ }
+
+ private ResultSet getPrimaryKeysResultSet(final String schema, final String table) throws SQLException {
+ if(prepPkNone==null) {
+ // Prepare primary key queries
+ log.debug("Preparing primary key queries...");
+ Connection con = getConnection();
+ prepPkNone = con .prepareStatement(SQL_PK_NONE);
+ prepPkSchema = con.prepareStatement(SQL_PK_SCHEMA);
+ prepPkTable = con.prepareStatement(SQL_PK_TABLE);
+ prepPkSchemaAndTable = con
+ .prepareStatement(SQL_PK_SCHEMA_AND_TABLE);
+ log.debug(" primary key queries prepared!");
+ }
+
+ ResultSet pkeyRs;
+ if (schema == null && table == null) {
+ pkeyRs = prepPkNone.executeQuery();
+ } else if (schema != null) {
+ if (table == null) {
+ prepPkSchema.setString(1, schema);
+ pkeyRs = prepPkSchema.executeQuery();
+ } else {
+ prepPkSchemaAndTable.setString(1, schema);
+ prepPkSchemaAndTable.setString(2, table);
+ pkeyRs = prepPkSchemaAndTable.executeQuery();
+ }
+ } else {
+ prepPkTable.setString(1, table);
+ pkeyRs = prepPkTable.executeQuery();
+ }
+ return pkeyRs;
+ }
+
+ private ResultSet getIndexInfoResultSet(final String schema, final String table) throws SQLException {
+ if(prepIndexNone==null) {
+ // Prepare index queries
+ log.debug("Preparing index queries...");
+ Connection con = getConnection();
+ prepIndexNone = con.prepareStatement(SQL_INDEX_NONE);
+ prepIndexSchema = con.prepareStatement(SQL_INDEX_SCHEMA);
+ prepIndexTable = con.prepareStatement(SQL_INDEX_TABLE);
+ prepIndexSchemaAndTable = con.prepareStatement(SQL_INDEX_SCHEMA_AND_TABLE);
+ log.debug(" ...index queries prepared!");
+ }
+ ResultSet indexRs;
+ if (schema == null && table == null) {
+ indexRs = prepIndexNone.executeQuery();
+ } else if (schema != null) {
+ if (table == null) {
+ prepIndexSchema.setString(1, schema);
+ indexRs = prepIndexSchema.executeQuery();
+ } else {
+ prepIndexSchemaAndTable.setString(1, schema);
+ prepIndexSchemaAndTable.setString(2, table);
+ indexRs = prepIndexSchemaAndTable.executeQuery();
+ }
+ } else {
+ prepIndexTable.setString(1, table);
+ indexRs = prepIndexTable.executeQuery();
+ }
+ return indexRs;
+ }
+
+ private ResultSet getExportedKeysResultSet(final String schema, final String table) throws SQLException {
+ if(prepFkNone==null) {
+ // Prepare foreign key queries
+ log.debug("Preparing foreign key queries...");
+ Connection con = getConnection();
+ prepFkNone = con .prepareStatement(SQL_FK_NONE);
+ prepFkSchema = con.prepareStatement(SQL_FK_SCHEMA);
+ prepFkTable = con.prepareStatement(SQL_FK_TABLE);
+ prepFkSchemaAndTable = con
+ .prepareStatement(SQL_FK_SCHEMA_AND_TABLE);
+ log.debug(" foreign key queries prepared!");
+ }
+
+ ResultSet pExportRs;
+ if (schema == null && table == null) {
+ pExportRs = prepFkNone.executeQuery();
+ } else if (schema != null) {
+ if (table == null) {
+ prepFkSchema.setString(1, schema);
+ pExportRs = prepFkSchema.executeQuery();
+ } else {
+ prepFkSchemaAndTable.setString(1, schema);
+ prepFkSchemaAndTable.setString(2, table);
+ pExportRs = prepFkSchemaAndTable.executeQuery();
+ }
+ } else {
+ prepFkTable.setString(1, table);
+ pExportRs = prepFkTable.executeQuery();
+ }
+ return pExportRs;
+ }
+ private ResultSet getColumnsResultSet(final String schema, final String table, String column) throws SQLException {
+
+ if(prepColumnNone==null) {
+ // Prepare column queries
+ log.debug("Preparing column queries...");
+ Connection con = getConnection();
+ prepColumnNone = con.prepareStatement(SQL_COLUMN_NONE);
+ prepColumnSchema = con.prepareStatement(SQL_COLUMN_SCHEMA);
+ prepColumnTable = con.prepareStatement(SQL_COLUMN_TABLE);
+ prepColumnColumn = con.prepareStatement(SQL_COLUMN_COLUMN);
+ prepColumnSchemaAndTable = con.prepareStatement(SQL_COLUMN_SCHEMA_AND_TABLE);
+ prepColumnSchemaAndColumn = con.prepareStatement(SQL_COLUMN_SCHEMA_AND_COLUMN);
+ prepColumnTableAndColumn = con.prepareStatement(SQL_COLUMN_TABLE_AND_COLUMN);
+ prepColumnSchemaAndTableAndColumn = con.prepareStatement(SQL_COLUMN_SCHEMA_AND_TABLE_AND_COLUMN);
+ log.debug(" ...column queries prepared!");
+ }
+
+ ResultSet columnRs;
+ // No parameters specified
+ if (schema == null && table == null && column == null) {
+ columnRs = prepColumnNone.executeQuery();
+ } else if (schema != null) {
+ if (table == null) {
+ if (column == null) {
+ // Schema specified
+ prepColumnSchema.setString(1, schema);
+ columnRs = prepColumnSchema.executeQuery();
+ } else {
+ // Schema and column specified
+ prepColumnSchemaAndColumn.setString(1, schema);
+ prepColumnSchemaAndColumn.setString(2, column);
+ columnRs = prepColumnSchemaAndColumn.executeQuery();
+ }
+ } else {
+ if (column == null) {
+ // Schema and table specified
+ prepColumnSchemaAndTable.setString(1, schema);
+ prepColumnSchemaAndTable.setString(2, table);
+ columnRs = prepColumnSchemaAndTable.executeQuery();
+ } else {
+ // Schema, table and column specified
+ prepColumnSchemaAndTableAndColumn.setString(1, schema);
+ prepColumnSchemaAndTableAndColumn.setString(2, table);
+ prepColumnSchemaAndTableAndColumn.setString(3, column);
+ columnRs = prepColumnSchemaAndTableAndColumn.executeQuery();
+ }
+ }
+ } else {
+ if (table == null) {
+ // Column specified
+ prepColumnColumn.setString(1, column);
+ columnRs = prepColumnColumn.executeQuery();
+ } else {
+ if (column == null) {
+ // Table specified
+ prepColumnTable.setString(1, table);
+ columnRs = prepColumnTable.executeQuery();
+ } else {
+ // Table and column specified
+ prepColumnTableAndColumn.setString(1, table);
+ prepColumnTableAndColumn.setString(2, column);
+ columnRs = prepColumnTableAndColumn.executeQuery();
+ }
+ }
+ }
+ return columnRs;
+ }
+
+ private ResultSet getTableResultSet(final String schema, String table) throws SQLException {
+ ResultSet tableRs;
+ if(prepTableNone==null) {
+ // Prepare table queries
+ log.debug("Preparing table queries...");
+ Connection connection2 = getConnection();
+ prepTableNone = connection2.prepareStatement(SQL_TABLE_NONE);
+ prepTableSchema = connection2.prepareStatement(SQL_TABLE_SCHEMA);
+ prepTableTable = connection2.prepareStatement(SQL_TABLE_TABLE);
+ prepTableSchemaAndTable = connection2.prepareStatement(SQL_TABLE_SCHEMA_AND_TABLE);
+ log.debug(" ...table queries prepared!");
+ }
+ if (schema == null && table == null) {
+ tableRs = prepTableNone.executeQuery();
+ } else if (schema != null) {
+ if (table == null) {
+ prepTableSchema.setString(1, schema);
+ prepTableSchema.setString(2, schema);
+ tableRs = prepTableSchema.executeQuery();
+ } else {
+ prepTableSchemaAndTable.setString(1, schema);
+ prepTableSchemaAndTable.setString(2, table);
+ prepTableSchemaAndTable.setString(3, schema);
+ prepTableSchemaAndTable.setString(4, table);
+ tableRs = prepTableSchemaAndTable.executeQuery();
+ }
+ } else {
+ prepTableTable.setString(1, table);
+ prepTableTable.setString(2, table);
+ tableRs = prepTableTable.executeQuery();
+ }
+ return tableRs;
+ }
+
}
18 years
Hibernate SVN: r10872 - branches/Branch_3_2/Hibernate3/test/org/hibernate/test/hql
by hibernate-commits@lists.jboss.org
Author: max.andersen(a)jboss.com
Date: 2006-11-28 05:39:32 -0500 (Tue, 28 Nov 2006)
New Revision: 10872
Modified:
branches/Branch_3_2/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java
Log:
adjusted test case for Ingres.
Modified: branches/Branch_3_2/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java
===================================================================
--- branches/Branch_3_2/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java 2006-11-28 10:39:27 UTC (rev 10871)
+++ branches/Branch_3_2/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java 2006-11-28 10:39:32 UTC (rev 10872)
@@ -1624,10 +1624,10 @@
public void testSubselectBetween() {
if(supportsSubselectOnLeftSideIn()) {
assertResultSize("from Animal x where (select max(a.bodyWeight) from Animal a) in (1,2,3)", 0);
+ assertResultSize("from Animal x where (select max(a.bodyWeight) from Animal a) between 0 and 100", 0);
+ assertResultSize("from Animal x where (select max(a.description) from Animal a) like 'big%'", 0);
+ assertResultSize("from Animal x where (select max(a.bodyWeight) from Animal a) is not null", 0);
}
- assertResultSize("from Animal x where (select max(a.bodyWeight) from Animal a) between 0 and 100", 0);
- assertResultSize("from Animal x where (select max(a.description) from Animal a) like 'big%'", 0);
- assertResultSize("from Animal x where (select max(a.bodyWeight) from Animal a) is not null", 0);
assertResultSize("from Animal x where exists (select max(a.bodyWeight) from Animal a)", 0);
}
18 years
Hibernate SVN: r10871 - trunk/Hibernate3/test/org/hibernate/test/hql
by hibernate-commits@lists.jboss.org
Author: max.andersen(a)jboss.com
Date: 2006-11-28 05:39:27 -0500 (Tue, 28 Nov 2006)
New Revision: 10871
Modified:
trunk/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java
Log:
adjusted test case for Ingres.
Modified: trunk/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java
===================================================================
--- trunk/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java 2006-11-26 14:16:21 UTC (rev 10870)
+++ trunk/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java 2006-11-28 10:39:27 UTC (rev 10871)
@@ -1624,13 +1624,13 @@
public void testSubselectBetween() {
if(supportsSubselectOnLeftSideIn()) {
assertResultSize("from Animal x where (select max(a.bodyWeight) from Animal a) in (1,2,3)", 0);
+ assertResultSize("from Animal x where (select max(a.bodyWeight) from Animal a) between 0 and 100", 0);
+ assertResultSize("from Animal x where (select max(a.description) from Animal a) like 'big%'", 0);
+ assertResultSize("from Animal x where (select max(a.bodyWeight) from Animal a) is not null", 0);
}
- assertResultSize("from Animal x where (select max(a.bodyWeight) from Animal a) between 0 and 100", 0);
- assertResultSize("from Animal x where (select max(a.description) from Animal a) like 'big%'", 0);
- assertResultSize("from Animal x where (select max(a.bodyWeight) from Animal a) is not null", 0);
assertResultSize("from Animal x where exists (select max(a.bodyWeight) from Animal a)", 0);
}
-
+
private void assertResultSize(String hql, int size) {
Session session = openSession();
Transaction txn = session.beginTransaction();
18 years
Hibernate SVN: r10870 - in branches/Branch_3_2/HibernateExt/metadata/src: java/org/hibernate/search/backend/impl java/org/hibernate/search/event java/org/hibernate/search/impl java/org/hibernate/search/util test/org/hibernate/search/test
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2006-11-26 09:16:21 -0500 (Sun, 26 Nov 2006)
New Revision: 10870
Added:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/WeakIdentityHashMap.java
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TransactionTest.java
Log:
ANN-497 one queue per transaction making operations faster.
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java 2006-11-25 14:23:04 UTC (rev 10869)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java 2006-11-26 14:16:21 UTC (rev 10870)
@@ -6,6 +6,7 @@
import org.hibernate.search.backend.WorkQueue;
import org.hibernate.search.backend.Work;
+import org.hibernate.search.util.WeakIdentityHashMap;
/**
* Execute some work inside a transaction sychronization
@@ -15,11 +16,17 @@
public class PostTransactionWorkQueueSynchronization implements Synchronization {
private WorkQueue workQueue;
private boolean consumed;
+ private WeakIdentityHashMap queuePerTransaction;
public PostTransactionWorkQueueSynchronization(WorkQueue workQueue) {
this.workQueue = workQueue;
}
+ public PostTransactionWorkQueueSynchronization(WorkQueue workQueue, WeakIdentityHashMap queuePerTransaction) {
+ this(workQueue);
+ this.queuePerTransaction = queuePerTransaction;
+ }
+
public void add(Work work) {
workQueue.add( work );
}
@@ -42,6 +49,9 @@
}
finally {
consumed = true;
+ //clean the Synchronization per Transaction
+ //not needed stricto sensus but a cleaner approach and faster than the GC
+ if (queuePerTransaction != null) queuePerTransaction.removeValue( this );
}
}
}
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2006-11-25 14:23:04 UTC (rev 10869)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2006-11-26 14:16:21 UTC (rev 10870)
@@ -6,6 +6,7 @@
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
+import java.util.WeakHashMap;
import java.util.concurrent.locks.ReentrantLock;
import javax.transaction.Status;
@@ -15,6 +16,7 @@
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.hibernate.HibernateException;
+import org.hibernate.Transaction;
import org.hibernate.cfg.AnnotationConfiguration;
import org.hibernate.cfg.Configuration;
import org.hibernate.event.AbstractEvent;
@@ -26,6 +28,7 @@
import org.hibernate.event.PostUpdateEvent;
import org.hibernate.event.PostUpdateEventListener;
import org.hibernate.search.Environment;
+import org.hibernate.search.util.WeakIdentityHashMap;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.backend.AddWork;
import org.hibernate.search.backend.DeleteWork;
@@ -54,7 +57,9 @@
//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
public class FullTextIndexEventListener implements PostDeleteEventListener, PostInsertEventListener,
PostUpdateEventListener, Initializable {
- ReflectionManager reflectionManager;
+ protected ReflectionManager reflectionManager;
+ //not a synchronized map since for a given transaction, we have not concurrent access
+ protected WeakIdentityHashMap queuePerTransaction;
//FIXME keeping this here is a bad decision since you might want to search indexes wo maintain it
@Deprecated
@@ -75,6 +80,7 @@
if ( initialized ) return;
//yuk
reflectionManager = ( (AnnotationConfiguration) cfg ).createExtendedMappings().getReflectionManager();
+ queuePerTransaction = new WeakIdentityHashMap();
Class analyzerClass;
String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
@@ -163,13 +169,22 @@
}
private void processWork(Work work, AbstractEvent event) {
- WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
- workQueue.add( work );
- PostTransactionWorkQueueSynchronization sync = new PostTransactionWorkQueueSynchronization( workQueue );
if ( event.getSession().isTransactionInProgress() ) {
- event.getSession().getTransaction().registerSynchronization( sync );
+ Transaction transaction = event.getSession().getTransaction();
+ PostTransactionWorkQueueSynchronization sync = (PostTransactionWorkQueueSynchronization)
+ queuePerTransaction.get( transaction );
+ if ( sync == null || sync.isConsumed() ) {
+ WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
+ sync = new PostTransactionWorkQueueSynchronization( workQueue, queuePerTransaction );
+ transaction.registerSynchronization( sync );
+ queuePerTransaction.put(transaction, sync);
+ }
+ sync.add( work );
}
else {
+ WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
+ PostTransactionWorkQueueSynchronization sync = new PostTransactionWorkQueueSynchronization( workQueue );
+ sync.add( work );
sync.afterCompletion( Status.STATUS_COMMITTED );
}
}
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2006-11-25 14:23:04 UTC (rev 10869)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2006-11-26 14:16:21 UTC (rev 10870)
@@ -213,6 +213,7 @@
}
public void clear() {
+ postTransactionWorkQueueSynch.afterCompletion( Status.STATUS_ROLLEDBACK );
session.clear();
}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/WeakIdentityHashMap.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/WeakIdentityHashMap.java 2006-11-25 14:23:04 UTC (rev 10869)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/WeakIdentityHashMap.java 2006-11-26 14:16:21 UTC (rev 10870)
@@ -0,0 +1,1019 @@
+//$Id: $
+/*
+ * JBoss, Home of Professional Open Source
+ * Copyright 2005, JBoss Inc., and individual contributors as indicated
+ * by the @authors tag. See the copyright.txt in the distribution for a
+ * full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+
+package org.hibernate.search.util;
+
+
+import java.lang.ref.ReferenceQueue;
+import java.lang.ref.WeakReference;
+import java.util.AbstractCollection;
+import java.util.AbstractSet;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.ConcurrentModificationException;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+
+/**
+ * A hashtable-based <tt>Map</tt> implementation with <em>weak keys</em> and
+ * using reference-equality in place of object-equality when comparing keys
+ * (and values). In an <tt>WeakIdentityHashMap</tt>, two keys <tt>k1</tt> and
+ * <tt>k2</tt> are considered equal if and only if <tt>(k1==k2)</tt>.
+ * An entry in a <tt>WeakIdentityHashMap</tt> will automatically be removed when
+ * its key is no longer in ordinary use. More precisely, the presence of a
+ * mapping for a given key will not prevent the key from being discarded by the
+ * garbage collector, that is, made finalizable, finalized, and then reclaimed.
+ * When a key has been discarded its entry is effectively removed from the map.
+ * <p/>
+ * <p>Based on java.util.WeakHashMap</p>
+ * <p>Based on org.jboss.common.collection.WeakIdentityHashMap</p>
+ *
+ * @author Dawid Kurzyniec
+ * @author <a href="mailto:kabir.khan@jboss.org">Kabir Khan</a>
+ * @author Emmanuel Bernard
+
+ * @see java.util.IdentityHashMap
+ * @see java.util.WeakHashMap
+ */
+public class WeakIdentityHashMap /*extends AbstractMap*/ implements Map {
+
+ /**
+ * The default initial capacity -- MUST be a power of two.
+ */
+ private static final int DEFAULT_INITIAL_CAPACITY = 16;
+
+ /**
+ * The maximum capacity, used if a higher value is implicitly specified
+ * by either of the constructors with arguments.
+ * MUST be a power of two <= 1<<30.
+ */
+ private static final int MAXIMUM_CAPACITY = 1 << 30;
+
+ /**
+ * The load fast used when none specified in constructor.
+ */
+ private static final float DEFAULT_LOAD_FACTOR = 0.75f;
+
+ /**
+ * The table, resized as necessary. Length MUST Always be a power of two.
+ */
+ private Entry[] table;
+
+ /**
+ * The number of key-value mappings contained in this weak hash map.
+ */
+ private int size;
+
+ /**
+ * The next size value at which to resize (capacity * load factor).
+ */
+ private int threshold;
+
+ /**
+ * The load factor for the hash table.
+ */
+ private final float loadFactor;
+
+ /**
+ * Reference queue for cleared WeakEntries
+ */
+ private final ReferenceQueue queue = new ReferenceQueue();
+
+ /**
+ * The number of times this HashMap has been structurally modified
+ * Structural modifications are those that change the number of mappings in
+ * the HashMap or otherwise modify its internal structure (e.g.,
+ * rehash). This field is used to make iterators on Collection-views of
+ * the HashMap fail-fast. (See ConcurrentModificationException).
+ */
+ private volatile int modCount;
+
+ /**
+ * Each of these fields are initialized to contain an instance of the
+ * appropriate view the first time this view is requested. The views are
+ * stateless, so there's no reason to create more than one of each.
+ */
+ transient volatile Set keySet = null;
+ transient volatile Collection values = null;
+
+ /**
+ * Constructs a new, empty <tt>WeakIdentityHashMap</tt> with the given
+ * initial capacity and the given load factor.
+ *
+ * @param initialCapacity The initial capacity of the
+ * <tt>WeakIdentityHashMap</tt>
+ * @param loadFactor The load factor of the
+ * <tt>WeakIdentityHashMap</tt>
+ * @throws IllegalArgumentException If the initial capacity is negative,
+ * or if the load factor is nonpositive.
+ */
+ public WeakIdentityHashMap(int initialCapacity, float loadFactor) {
+ if ( initialCapacity < 0 )
+ throw new IllegalArgumentException( "Illegal Initial Capacity: " +
+ initialCapacity );
+ if ( initialCapacity > MAXIMUM_CAPACITY )
+ initialCapacity = MAXIMUM_CAPACITY;
+
+ if ( loadFactor <= 0 || Float.isNaN( loadFactor ) )
+ throw new IllegalArgumentException( "Illegal Load factor: " +
+ loadFactor );
+ int capacity = 1;
+ while ( capacity < initialCapacity )
+ capacity <<= 1;
+ table = new Entry[capacity];
+ this.loadFactor = loadFactor;
+ threshold = (int) ( capacity * loadFactor );
+ }
+
+ /**
+ * Constructs a new, empty <tt>WeakIdentityHashMap</tt> with the given
+ * initial capacity and the default load factor, which is <tt>0.75</tt>.
+ *
+ * @param initialCapacity The initial capacity of the
+ * <tt>WeakIdentityHashMap</tt>
+ * @throws IllegalArgumentException If the initial capacity is negative.
+ */
+ public WeakIdentityHashMap(int initialCapacity) {
+ this( initialCapacity, DEFAULT_LOAD_FACTOR );
+ }
+
+ /**
+ * Constructs a new, empty <tt>WeakIdentityHashMap</tt> with the default
+ * initial capacity (16) and the default load factor (0.75).
+ */
+ public WeakIdentityHashMap() {
+ this.loadFactor = DEFAULT_LOAD_FACTOR;
+ threshold = (int) ( DEFAULT_INITIAL_CAPACITY );
+ table = new Entry[DEFAULT_INITIAL_CAPACITY];
+ }
+
+ /**
+ * Constructs a new <tt>WeakIdentityHashMap</tt> with the same mappings as
+ * the specified <tt>Map</tt>. The <tt>WeakIdentityHashMap</tt> is created
+ * with default load factor, which is <tt>0.75</tt> and an initial capacity
+ * sufficient to hold the mappings in the specified <tt>Map</tt>.
+ *
+ * @param t the map whose mappings are to be placed in this map.
+ * @throws NullPointerException if the specified map is null.
+ */
+ public WeakIdentityHashMap(Map t) {
+ this( Math.max( (int) ( t.size() / DEFAULT_LOAD_FACTOR ) + 1, 16 ),
+ DEFAULT_LOAD_FACTOR );
+ putAll( t );
+ }
+
+ // internal utilities
+
+ /**
+ * Value representing null keys inside tables.
+ */
+ private static final Object NULL_KEY = new Object();
+
+ /**
+ * Use NULL_KEY for key if it is null.
+ */
+ private static Object maskNull(Object key) {
+ return ( key == null ?
+ NULL_KEY :
+ key );
+ }
+
+ /**
+ * Return internal representation of null key back to caller as null
+ */
+ private static Object unmaskNull(Object key) {
+ return ( key == NULL_KEY ?
+ null :
+ key );
+ }
+
+ /**
+ * Return a hash code for non-null Object x.
+ */
+ int hash(Object x) {
+ int h = System.identityHashCode( x );
+ return h - ( h << 7 ); // that is,, -127 * h
+ }
+
+ /**
+ * Return index for hash code h.
+ */
+ static int indexFor(int h, int length) {
+ return h & ( length - 1 );
+ }
+
+ /**
+ * Expunge stale entries from the table.
+ */
+ private void expungeStaleEntries() {
+ Object r;
+ while ( ( r = queue.poll() ) != null ) {
+ Entry e = (Entry) r;
+ int h = e.hash;
+ int i = indexFor( h, table.length );
+
+ Entry prev = table[i];
+ Entry p = prev;
+ while ( p != null ) {
+ Entry next = p.next;
+ if ( p == e ) {
+ if ( prev == e )
+ table[i] = next;
+ else
+ prev.next = next;
+ e.next = null; // Help GC
+ e.value = null; // " "
+ size--;
+ break;
+ }
+ prev = p;
+ p = next;
+ }
+ }
+ }
+
+ /**
+ * Return the table after first expunging stale entries
+ */
+ private Entry[] getTable() {
+ expungeStaleEntries();
+ return table;
+ }
+
+ /**
+ * Returns the number of key-value mappings in this map.
+ * This result is a snapshot, and may not reflect unprocessed
+ * entries that will be removed before next attempted access
+ * because they are no longer referenced.
+ */
+ public int size() {
+ if ( size == 0 )
+ return 0;
+ expungeStaleEntries();
+ return size;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this map contains no key-value mappings.
+ * This result is a snapshot, and may not reflect unprocessed
+ * entries that will be removed before next attempted access
+ * because they are no longer referenced.
+ */
+ public boolean isEmpty() {
+ return size() == 0;
+ }
+
+ /**
+ * Returns the value to which the specified key is mapped in this weak
+ * hash map, or <tt>null</tt> if the map contains no mapping for
+ * this key. A return value of <tt>null</tt> does not <i>necessarily</i>
+ * indicate that the map contains no mapping for the key; it is also
+ * possible that the map explicitly maps the key to <tt>null</tt>. The
+ * <tt>containsKey</tt> method may be used to distinguish these two
+ * cases.
+ *
+ * @param key the key whose associated value is to be returned.
+ * @return the value to which this map maps the specified key, or
+ * <tt>null</tt> if the map contains no mapping for this key.
+ * @see #put(Object,Object)
+ */
+ public Object get(Object key) {
+ Object k = maskNull( key );
+ int h = hash( k );
+ Entry[] tab = getTable();
+ int index = indexFor( h, tab.length );
+ Entry e = tab[index];
+ while ( e != null ) {
+ if ( e.hash == h && k == e.get() )
+ return e.value;
+ e = e.next;
+ }
+ return null;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this map contains a mapping for the
+ * specified key.
+ *
+ * @param key The key whose presence in this map is to be tested
+ * @return <tt>true</tt> if there is a mapping for <tt>key</tt>;
+ * <tt>false</tt> otherwise
+ */
+ public boolean containsKey(Object key) {
+ return getEntry( key ) != null;
+ }
+
+ /**
+ * Returns the entry associated with the specified key in the HashMap.
+ * Returns null if the HashMap contains no mapping for this key.
+ */
+ Entry getEntry(Object key) {
+ Object k = maskNull( key );
+ int h = hash( k );
+ Entry[] tab = getTable();
+ int index = indexFor( h, tab.length );
+ Entry e = tab[index];
+ while ( e != null && !( e.hash == h && k == e.get() ) )
+ e = e.next;
+ return e;
+ }
+
+ /**
+ * Associates the specified value with the specified key in this map.
+ * If the map previously contained a mapping for this key, the old
+ * value is replaced.
+ *
+ * @param key key with which the specified value is to be associated.
+ * @param value value to be associated with the specified key.
+ * @return previous value associated with specified key, or <tt>null</tt>
+ * if there was no mapping for key. A <tt>null</tt> return can
+ * also indicate that the HashMap previously associated
+ * <tt>null</tt> with the specified key.
+ */
+ public Object put(Object key, Object value) {
+ Object k = maskNull( key );
+ int h = hash( k );
+ Entry[] tab = getTable();
+ int i = indexFor( h, tab.length );
+
+ for ( Entry e = tab[i]; e != null; e = e.next ) {
+ if ( h == e.hash && k == e.get() ) {
+ Object oldValue = e.value;
+ if ( value != oldValue )
+ e.value = value;
+ return oldValue;
+ }
+ }
+
+ modCount++;
+ tab[i] = new Entry( k, value, queue, h, tab[i] );
+ if ( ++size >= threshold )
+ resize( tab.length * 2 );
+ return null;
+ }
+
+ /**
+ * Rehashes the contents of this map into a new <tt>HashMap</tt> instance
+ * with a larger capacity. This method is called automatically when the
+ * number of keys in this map exceeds its capacity and load factor.
+ * <p/>
+ * Note that this method is a no-op if it's called with newCapacity ==
+ * 2*MAXIMUM_CAPACITY (which is Integer.MIN_VALUE).
+ *
+ * @param newCapacity the new capacity, MUST be a power of two.
+ */
+ void resize(int newCapacity) {
+ // assert (newCapacity & -newCapacity) == newCapacity; // power of 2
+
+ Entry[] oldTable = getTable();
+ int oldCapacity = oldTable.length;
+
+ // check if needed
+ if ( size < threshold || oldCapacity > newCapacity )
+ return;
+
+ Entry[] newTable = new Entry[newCapacity];
+
+ transfer( oldTable, newTable );
+ table = newTable;
+
+ /*
+ * If ignoring null elements and processing ref queue caused massive
+ * shrinkage, then restore old table. This should be rare, but avoids
+ * unbounded expansion of garbage-filled tables.
+ */
+ if ( size >= threshold / 2 ) {
+ threshold = (int) ( newCapacity * loadFactor );
+ }
+ else {
+ expungeStaleEntries();
+ transfer( newTable, oldTable );
+ table = oldTable;
+ }
+ }
+
+ /**
+ * Transfer all entries from src to dest tables
+ */
+ private void transfer(Entry[] src, Entry[] dest) {
+ for ( int j = 0; j < src.length; ++j ) {
+ Entry e = src[j];
+ src[j] = null;
+ while ( e != null ) {
+ Entry next = e.next;
+ Object key = e.get();
+ if ( key == null ) {
+ e.next = null; // Help GC
+ e.value = null; // " "
+ size--;
+ }
+ else {
+ int i = indexFor( e.hash, dest.length );
+ e.next = dest[i];
+ dest[i] = e;
+ }
+ e = next;
+ }
+ }
+ }
+
+ /**
+ * Copies all of the mappings from the specified map to this map These
+ * mappings will replace any mappings that this map had for any of the
+ * keys currently in the specified map.<p>
+ *
+ * @param t mappings to be stored in this map.
+ * @throws NullPointerException if the specified map is null.
+ */
+ public void putAll(Map t) {
+ // Expand enough to hold t's elements without resizing.
+ int n = t.size();
+ if ( n == 0 )
+ return;
+ if ( n >= threshold ) {
+ n = (int) ( n / loadFactor + 1 );
+ if ( n > MAXIMUM_CAPACITY )
+ n = MAXIMUM_CAPACITY;
+ int capacity = table.length;
+ while ( capacity < n )
+ capacity <<= 1;
+ resize( capacity );
+ }
+
+ for ( Iterator i = t.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry e = (Map.Entry) i.next();
+ put( e.getKey(), e.getValue() );
+ }
+ }
+
+ /**
+ * Removes the mapping for this key from this map if present.
+ *
+ * @param key key whose mapping is to be removed from the map.
+ * @return previous value associated with specified key, or <tt>null</tt>
+ * if there was no mapping for key. A <tt>null</tt> return can
+ * also indicate that the map previously associated <tt>null</tt>
+ * with the specified key.
+ */
+ public Object remove(Object key) {
+ Object k = maskNull( key );
+ int h = hash( k );
+ Entry[] tab = getTable();
+ int i = indexFor( h, tab.length );
+ Entry prev = tab[i];
+ Entry e = prev;
+
+ while ( e != null ) {
+ Entry next = e.next;
+ if ( h == e.hash && k == e.get() ) {
+ modCount++;
+ size--;
+ if ( prev == e )
+ tab[i] = next;
+ else
+ prev.next = next;
+ return e.value;
+ }
+ prev = e;
+ e = next;
+ }
+
+ return null;
+ }
+
+
+ /**
+ * Special version of remove needed by Entry set
+ */
+ Entry removeMapping(Object o) {
+ if ( !( o instanceof Map.Entry ) )
+ return null;
+ Entry[] tab = getTable();
+ Map.Entry entry = (Map.Entry) o;
+ Object k = maskNull( entry.getKey() );
+ int h = hash( k );
+ int i = indexFor( h, tab.length );
+ Entry prev = tab[i];
+ Entry e = prev;
+
+ while ( e != null ) {
+ Entry next = e.next;
+ if ( h == e.hash && e.equals( entry ) ) {
+ modCount++;
+ size--;
+ if ( prev == e )
+ tab[i] = next;
+ else
+ prev.next = next;
+ return e;
+ }
+ prev = e;
+ e = next;
+ }
+
+ return null;
+ }
+
+ /**
+ * Removes all mappings from this map.
+ */
+ public void clear() {
+ // clear out ref queue. We don't need to expunge entries
+ // since table is getting cleared.
+ while ( queue.poll() != null )
+ ;
+
+ modCount++;
+ Entry tab[] = table;
+ for ( int i = 0; i < tab.length; ++i )
+ tab[i] = null;
+ size = 0;
+
+ // Allocation of array may have caused GC, which may have caused
+ // additional entries to go stale. Removing these entries from the
+ // reference queue will make them eligible for reclamation.
+ while ( queue.poll() != null )
+ ;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this map maps one or more keys to the
+ * specified value.
+ *
+ * @param value value whose presence in this map is to be tested.
+ * @return <tt>true</tt> if this map maps one or more keys to the
+ * specified value.
+ */
+ public boolean containsValue(Object value) {
+ if ( value == null )
+ return containsNullValue();
+
+ Entry tab[] = getTable();
+ for ( int i = tab.length; i-- > 0; )
+ for ( Entry e = tab[i]; e != null; e = e.next )
+ if ( value.equals( e.value ) )
+ return true;
+ return false;
+ }
+
+ /**
+ * Special-case code for containsValue with null argument
+ */
+ private boolean containsNullValue() {
+ Entry tab[] = getTable();
+ for ( int i = tab.length; i-- > 0; )
+ for ( Entry e = tab[i]; e != null; e = e.next )
+ if ( e.value == null )
+ return true;
+ return false;
+ }
+
+ /**
+ * Remove elements having the according value.
+ * Intended to avoid concurrent access exceptions
+ * It is expected that nobody add a key being removed by value
+ *
+ * @param value value whose presence in this map is to be removed.
+ * @return <tt>true</tt> if this map maps one or more keys to the
+ * specified value.
+ */
+ public boolean removeValue(Object value) {
+ if ( value == null )
+ return removeNullValue();
+
+ Entry tab[] = getTable();
+ Set keys = new HashSet();
+ for ( int i = tab.length; i-- > 0; )
+ for ( Entry e = tab[i]; e != null; e = e.next )
+ if ( value.equals( e.value ) )
+ keys.add( e.getKey() );
+ for ( Object key : keys ) remove( key );
+ return !keys.isEmpty();
+ }
+
+ /**
+ * Special-case code for removeValue with null argument
+ */
+ private boolean removeNullValue() {
+ Entry tab[] = getTable();
+ Set keys = new HashSet();
+ for ( int i = tab.length; i-- > 0; )
+ for ( Entry e = tab[i]; e != null; e = e.next )
+ if ( e.value == null )
+ keys.add( e.getKey() );
+ for ( Object key : keys ) remove( key );
+ return !keys.isEmpty();
+ }
+
+ /**
+ * The entries in this hash table extend WeakReference, using its main ref
+ * field as the key.
+ */
+ private static class Entry extends WeakReference implements Map.Entry {
+ private Object value;
+ private final int hash;
+ private Entry next;
+
+ /**
+ * Create new entry.
+ */
+ Entry(Object key, Object value, ReferenceQueue queue,
+ int hash, Entry next) {
+ super( key, queue );
+ this.value = value;
+ this.hash = hash;
+ this.next = next;
+ }
+
+ public Object getKey() {
+ return unmaskNull( this.get() );
+ }
+
+ public Object getValue() {
+ return value;
+ }
+
+ public Object setValue(Object newValue) {
+ Object oldValue = value;
+ value = newValue;
+ return oldValue;
+ }
+
+ public boolean equals(Object o) {
+ if ( !( o instanceof Map.Entry ) )
+ return false;
+ Map.Entry e = (Map.Entry) o;
+ Object k1 = getKey();
+ Object k2 = e.getKey();
+ if ( k1 == k2 ) {
+ Object v1 = getValue();
+ Object v2 = e.getValue();
+ if ( v1 == v2 || ( v1 != null && v1.equals( v2 ) ) )
+ return true;
+ }
+ return false;
+ }
+
+ public int hashCode() {
+ Object k = getKey();
+ Object v = getValue();
+ return ( ( k == null ?
+ 0 :
+ System.identityHashCode( k ) ) ^
+ ( v == null ?
+ 0 :
+ v.hashCode() ) );
+ }
+
+ public String toString() {
+ return getKey() + "=" + getValue();
+ }
+ }
+
+ private abstract class HashIterator implements Iterator {
+ int index;
+ Entry entry = null;
+ Entry lastReturned = null;
+ int expectedModCount = modCount;
+
+ /**
+ * Strong reference needed to avoid disappearance of key
+ * between hasNext and next
+ */
+ Object nextKey = null;
+
+ /**
+ * Strong reference needed to avoid disappearance of key
+ * between nextEntry() and any use of the entry
+ */
+ Object currentKey = null;
+
+ HashIterator() {
+ index = ( size() != 0 ?
+ table.length :
+ 0 );
+ }
+
+ public boolean hasNext() {
+ Entry[] t = table;
+
+ while ( nextKey == null ) {
+ Entry e = entry;
+ int i = index;
+ while ( e == null && i > 0 )
+ e = t[--i];
+ entry = e;
+ index = i;
+ if ( e == null ) {
+ currentKey = null;
+ return false;
+ }
+ nextKey = e.get(); // hold on to key in strong ref
+ if ( nextKey == null )
+ entry = entry.next;
+ }
+ return true;
+ }
+
+ /**
+ * The common parts of next() across different types of iterators
+ */
+ protected Entry nextEntry() {
+ if ( modCount != expectedModCount )
+ throw new ConcurrentModificationException();
+ if ( nextKey == null && !hasNext() )
+ throw new NoSuchElementException();
+
+ lastReturned = entry;
+ entry = entry.next;
+ currentKey = nextKey;
+ nextKey = null;
+ return lastReturned;
+ }
+
+ public void remove() {
+ if ( lastReturned == null )
+ throw new IllegalStateException();
+ if ( modCount != expectedModCount )
+ throw new ConcurrentModificationException();
+
+ WeakIdentityHashMap.this.remove( currentKey );
+ expectedModCount = modCount;
+ lastReturned = null;
+ currentKey = null;
+ }
+
+ }
+
+ private class ValueIterator extends HashIterator {
+ public Object next() {
+ return nextEntry().value;
+ }
+ }
+
+ private class KeyIterator extends HashIterator {
+ public Object next() {
+ return nextEntry().getKey();
+ }
+ }
+
+ private class EntryIterator extends HashIterator {
+ public Object next() {
+ return nextEntry();
+ }
+ }
+
+ // Views
+
+ private transient Set entrySet = null;
+
+ /**
+ * Returns a set view of the keys contained in this map. The set is
+ * backed by the map, so changes to the map are reflected in the set, and
+ * vice-versa. The set supports element removal, which removes the
+ * corresponding mapping from this map, via the <tt>Iterator.remove</tt>,
+ * <tt>Set.remove</tt>, <tt>removeAll</tt>, <tt>retainAll</tt>, and
+ * <tt>clear</tt> operations. It does not support the <tt>add</tt> or
+ * <tt>addAll</tt> operations.
+ *
+ * @return a set view of the keys contained in this map.
+ */
+ public Set keySet() {
+ Set ks = keySet;
+ return ( ks != null ?
+ ks :
+ ( keySet = new KeySet() ) );
+ }
+
+ private class KeySet extends AbstractSet {
+ public Iterator iterator() {
+ return new KeyIterator();
+ }
+
+ public int size() {
+ return WeakIdentityHashMap.this.size();
+ }
+
+ public boolean contains(Object o) {
+ return containsKey( o );
+ }
+
+ public boolean remove(Object o) {
+ if ( containsKey( o ) ) {
+ WeakIdentityHashMap.this.remove( o );
+ return true;
+ }
+ else
+ return false;
+ }
+
+ public void clear() {
+ WeakIdentityHashMap.this.clear();
+ }
+
+ public Object[] toArray() {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( i.next() );
+ return c.toArray();
+ }
+
+ public Object[] toArray(Object a[]) {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( i.next() );
+ return c.toArray( a );
+ }
+ }
+
+ /**
+ * Returns a collection view of the values contained in this map. The
+ * collection is backed by the map, so changes to the map are reflected in
+ * the collection, and vice-versa. The collection supports element
+ * removal, which removes the corresponding mapping from this map, via the
+ * <tt>Iterator.remove</tt>, <tt>Collection.remove</tt>,
+ * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt> operations.
+ * It does not support the <tt>add</tt> or <tt>addAll</tt> operations.
+ *
+ * @return a collection view of the values contained in this map.
+ */
+ public Collection values() {
+ Collection vs = values;
+ return ( vs != null ?
+ vs :
+ ( values = new Values() ) );
+ }
+
+ private class Values extends AbstractCollection {
+ public Iterator iterator() {
+ return new ValueIterator();
+ }
+
+ public int size() {
+ return WeakIdentityHashMap.this.size();
+ }
+
+ public boolean contains(Object o) {
+ return containsValue( o );
+ }
+
+ public void clear() {
+ WeakIdentityHashMap.this.clear();
+ }
+
+ public Object[] toArray() {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( i.next() );
+ return c.toArray();
+ }
+
+ public Object[] toArray(Object a[]) {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( i.next() );
+ return c.toArray( a );
+ }
+ }
+
+ /**
+ * Returns a collection view of the mappings contained in this map. Each
+ * element in the returned collection is a <tt>Map.Entry</tt>. The
+ * collection is backed by the map, so changes to the map are reflected in
+ * the collection, and vice-versa. The collection supports element
+ * removal, which removes the corresponding mapping from the map, via the
+ * <tt>Iterator.remove</tt>, <tt>Collection.remove</tt>,
+ * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt> operations.
+ * It does not support the <tt>add</tt> or <tt>addAll</tt> operations.
+ *
+ * @return a collection view of the mappings contained in this map.
+ * @see java.util.Map.Entry
+ */
+ public Set entrySet() {
+ Set es = entrySet;
+ return ( es != null ?
+ es :
+ ( entrySet = new EntrySet() ) );
+ }
+
+ private class EntrySet extends AbstractSet {
+ public Iterator iterator() {
+ return new EntryIterator();
+ }
+
+ public boolean contains(Object o) {
+ if ( !( o instanceof Map.Entry ) )
+ return false;
+ Map.Entry e = (Map.Entry) o;
+ Object k = e.getKey();
+ Entry candidate = getEntry( e.getKey() );
+ return candidate != null && candidate.equals( e );
+ }
+
+ public boolean remove(Object o) {
+ return removeMapping( o ) != null;
+ }
+
+ public int size() {
+ return WeakIdentityHashMap.this.size();
+ }
+
+ public void clear() {
+ WeakIdentityHashMap.this.clear();
+ }
+
+ public Object[] toArray() {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( new SimpleEntry( (Map.Entry) i.next() ) );
+ return c.toArray();
+ }
+
+ public Object[] toArray(Object a[]) {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( new SimpleEntry( (Map.Entry) i.next() ) );
+ return c.toArray( a );
+ }
+ }
+
+ static class SimpleEntry implements Map.Entry {
+ Object key;
+ Object value;
+
+ public SimpleEntry(Object key, Object value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ public SimpleEntry(Map.Entry e) {
+ this.key = e.getKey();
+ this.value = e.getValue();
+ }
+
+ public Object getKey() {
+ return key;
+ }
+
+ public Object getValue() {
+ return value;
+ }
+
+ public Object setValue(Object value) {
+ Object oldValue = this.value;
+ this.value = value;
+ return oldValue;
+ }
+
+ public boolean equals(Object o) {
+ if ( !( o instanceof Map.Entry ) )
+ return false;
+ Map.Entry e = (Map.Entry) o;
+ return eq( key, e.getKey() ) && eq( value, e.getValue() );
+ }
+
+ public int hashCode() {
+ Object v;
+ return ( ( key == null ) ?
+ 0 :
+ key.hashCode() ) ^
+ ( ( value == null ) ?
+ 0 :
+ value.hashCode() );
+ }
+
+ public String toString() {
+ return key + "=" + value;
+ }
+
+ private static boolean eq(Object o1, Object o2) {
+ return ( o1 == null ?
+ o2 == null :
+ o1.equals( o2 ) );
+ }
+ }
+
+}
Modified: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TransactionTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TransactionTest.java 2006-11-25 14:23:04 UTC (rev 10869)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TransactionTest.java 2006-11-26 14:16:21 UTC (rev 10870)
@@ -17,10 +17,16 @@
s.persist(
new Document( "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" )
);
+ s.persist(
+ new Document( "Lucene in Action", "FullText search engine", "blah blah blah" )
+ );
+ s.persist(
+ new Document( "Hibernate Search in Action", "ORM and FullText search engine", "blah blah blah" )
+ );
s.getTransaction().commit();
s.close();
- assertEquals( "transaction.commit() should no index", 1, getDocumentNumber() );
+ assertEquals( "transaction.commit() should index", 3, getDocumentNumber() );
s = getSessions().openSession();
s.getTransaction().begin();
@@ -31,7 +37,7 @@
s.getTransaction().rollback();
s.close();
- assertEquals( "rollback() should not index", 1, getDocumentNumber() );
+ assertEquals( "rollback() should not index", 3, getDocumentNumber() );
s = getSessions().openSession();
s.persist(
@@ -40,7 +46,7 @@
s.flush();
s.close();
- assertEquals( "no transaction should index", 2, getDocumentNumber() );
+ assertEquals( "no transaction should index", 4, getDocumentNumber() );
}
18 years
Hibernate SVN: r10869 - trunk/Hibernate3/src/org/hibernate/engine/query
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2006-11-25 09:23:04 -0500 (Sat, 25 Nov 2006)
New Revision: 10869
Modified:
trunk/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java
Log:
HHH-2264 Avoid NPE on user query error
Modified: trunk/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java
===================================================================
--- trunk/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java 2006-11-25 14:22:25 UTC (rev 10868)
+++ trunk/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java 2006-11-25 14:23:04 UTC (rev 10869)
@@ -64,6 +64,10 @@
int right = StringHelper.firstIndexOfChar( sqlString, ParserHelper.HQL_SEPARATORS, indx + 1 );
int chopLocation = right < 0 ? sqlString.length() : right;
String param = sqlString.substring( indx + 1, chopLocation );
+ if ( StringHelper.isEmpty( param ) ) {
+ throw new QueryException("Space is not allowed after parameter prefix ':' '"
+ + sqlString + "'");
+ }
recognizer.namedParameter( param, indx );
indx = chopLocation - 1;
}
18 years, 1 month
Hibernate SVN: r10868 - branches/Branch_3_2/Hibernate3/src/org/hibernate/engine/query
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2006-11-25 09:22:25 -0500 (Sat, 25 Nov 2006)
New Revision: 10868
Modified:
branches/Branch_3_2/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java
Log:
HHH-2264 avoid NPE on user query error
Modified: branches/Branch_3_2/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java
===================================================================
--- branches/Branch_3_2/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java 2006-11-24 00:37:46 UTC (rev 10867)
+++ branches/Branch_3_2/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java 2006-11-25 14:22:25 UTC (rev 10868)
@@ -64,6 +64,10 @@
int right = StringHelper.firstIndexOfChar( sqlString, ParserHelper.HQL_SEPARATORS, indx + 1 );
int chopLocation = right < 0 ? sqlString.length() : right;
String param = sqlString.substring( indx + 1, chopLocation );
+ if ( StringHelper.isEmpty( param ) ) {
+ throw new QueryException("Space is not allowed after parameter prefix ':' '"
+ + sqlString + "'");
+ }
recognizer.namedParameter( param, indx );
indx = chopLocation - 1;
}
18 years, 1 month
Hibernate SVN: r10867 - in branches/Branch_3_2/HibernateExt/metadata: doc/reference/en/modules lib src/java/org/hibernate src/java/org/hibernate/search src/java/org/hibernate/search/annotations src/java/org/hibernate/search/backend src/java/org/hibernate/search/backend/impl src/java/org/hibernate/search/bridge src/java/org/hibernate/search/bridge/builtin src/java/org/hibernate/search/engine src/java/org/hibernate/search/event src/java/org/hibernate/search/impl src/java/org/hibernate/sear
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2006-11-23 19:37:46 -0500 (Thu, 23 Nov 2006)
New Revision: 10867
Added:
branches/Branch_3_2/HibernateExt/metadata/lib/jta.jar
branches/Branch_3_2/HibernateExt/metadata/lib/jta.licence.txt
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/Environment.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/Search.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Boost.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/DateBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/DocumentId.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Field.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/FieldBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Index.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Indexed.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Keyword.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Resolution.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Store.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Text.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Unstored.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/AddWork.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/DeleteWork.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/UpdateWork.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Work.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkQueue.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchLuceneWorkQueue.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/LuceneWorker.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/BridgeFactory.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/ParameterizedBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/String2FieldBridgeAdaptor.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/StringBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayFieldBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayString2FieldBridgeAdaptor.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayStringBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/BigDecimalBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/BigIntegerBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/DateBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/DoubleBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/FloatBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/IntegerBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/LongBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/NumberBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/ShortBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/StringBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/EntityInfo.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProvider.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/FSDirectoryProvider.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/RAMDirectoryProvider.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/BinderHelper.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/AlternateDocument.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/Clock.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/Document.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/RamDirectoryTest.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TransactionTest.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/Cloud.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/TruncateFieldBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/TruncateStringBridge.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/Document.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/Animal.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/Mammal.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/AlternateBook.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/Book.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/Clock.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/session/
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/session/Email.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java
Removed:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/lucene/
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/lucene/
Modified:
branches/Branch_3_2/HibernateExt/metadata/doc/reference/en/modules/lucene.xml
Log:
Merge back Lucene_integration to 3.2
Modified: branches/Branch_3_2/HibernateExt/metadata/doc/reference/en/modules/lucene.xml
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/doc/reference/en/modules/lucene.xml 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/doc/reference/en/modules/lucene.xml 2006-11-24 00:37:46 UTC (rev 10867)
@@ -1,91 +1,63 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
-<chapter id="lucene" revision="1">
- <title>Hibernate Lucene Integration</title>
+<chapter id="lucene" revision="2">
+ <title>Hibernate Search: Apache <trademark>Lucene</trademark>
+ Integration</title>
- <para>Lucene is a high-performance Java search engine library available from
- the Apache Software Foundation. Hibernate Annotations includes a package of
- annotations that allows you to mark any domain model object as indexable and
- have Hibernate maintain a Lucene index of any instances persisted via
- Hibernate.</para>
+ <para><ulink url="http://lucene.apache.org">Apache Lucene</ulink> is a
+ high-performance Java search engine library available at the Apache Software
+ Foundation. Hibernate Annotations includes a package of annotations that
+ allows you to mark any domain model object as indexable and have Hibernate
+ maintain a Lucene index of any instances persisted via Hibernate. Apache
+ Lucene is also integrated with the Hibernate query facility.</para>
- <para>Hibernate Lucene is a work in progress and new features are cooking in
+ <para>Hibernate Search is a work in progress and new features are cooking in
this area. So expect some compatibility changes in subsequent
versions.</para>
- <section id="lucene-mapping">
- <title>Mapping the entities to the index</title>
+ <section id="lucene-architecture">
+ <title>Architecture</title>
- <para>First, we must declare a persistent class as indexable. This is done
- by annotating the class with <literal>@Indexed</literal>:</para>
+ <para>Hibernate Search is made of an indexing engine and an index search
+ engine. Both are backed by Apache Lucene.</para>
- <programlisting>@Entity
-@Indexed(index="indexes/essays")
-public class Essay {
- ...
-}</programlisting>
+ <para>When an entity is inserted, updated or removed to/from the database,
+ <productname>Hibernate Search</productname> will keep track of this event
+ (through the Hibernate event system) and schedule an index update. When
+ out of transaction, the update is executed right after the actual database
+ operation. It is however recommended, for both your database and Hibernate
+ Search, to execute your operation in a transaction (whether JDBC or JTA).
+ When in a transaction, the index update is schedule for the transaction
+ commit (and discarded in case of transaction rollback). You can think of
+ this as the regular (infamous) autocommit vs transactional behavior. From
+ a performance perspective, the <emphasis>in transaction</emphasis> mode is
+ recommended. All the index updates are handled for you without you having
+ to use the Apache Lucene APIs.</para>
- <para>The <literal>index</literal> attribute tells Hibernate what the
- lucene directory name is (usually a directory on your file system). If you
- wish to define a base directory for all lucene indexes, you can use the
- <literal>hibernate.lucene.default.indexDir</literal> property in your
- configuration file.</para>
+ <para>To interact with Apache Lucene indexes, Hibernate Search has the
+ notion of <classname>DirectoryProvider</classname>. A directory provider
+ will manage a given Lucene <classname>Directory</classname> type. You can
+ configure directory providers to adjust the directory target.</para>
- <para>Lucene indexes contain four kinds of fields:
- <emphasis>keyword</emphasis> fields, <emphasis>text</emphasis> fields,
- <emphasis>unstored</emphasis> fields and <emphasis>unindexed</emphasis>
- fields. Hibernate Annotations provides annotations to mark a property of
- an entity as one of the first three kinds of indexed fields.</para>
-
- <programlisting>@Entity
-@Indexed(index="indexes/essays")
-public class Essay {
- ...
-
- @Id
- @Keyword(id=true)
- public Long getId() { return id; }
-
- @Text(name="Abstract")
- public String getSummary() { return summary; }
-
- @Lob
- @Unstored
- public String getText() { return text; }
-
-}</programlisting>
-
- <para>These annotations define an index with three fields:
- <literal>id</literal>, <literal>Abstract</literal> and
- <literal>text</literal>. Note that by default the field name is
- decapitalized, following the JavaBean specification.</para>
-
- <para>Note: you <emphasis>must</emphasis> specify
- <literal>@Keyword(id=true)</literal> on the identifier property of your
- entity class.</para>
-
- <para>Lucene has the notion of <emphasis>boost factor</emphasis>. It's a
- way to give more weigth to a field or to an indexed element over an other
- during the indexation process. You can use <literal>@Boost</literal> at
- the field or the class level.</para>
-
- <para>The analyzer class used to index the elements is configurable
- through the <literal>hibernate.lucene.analyzer</literal> property. If none
- defined,
- <classname>org.apache.lucene.analysis.standard.StandardAnalyzer</classname>
- is used as the default.</para>
+ <para><productname>Hibernate Search</productname> can also use a Lucene
+ index to search an entity and return a (list of) managed entity saving you
+ from the tedious Object / Lucene Document mapping and low level Lucene
+ APIs. The application code use the unified
+ <classname>org.hibernate.Query</classname> API exactly the way a HQL or
+ native query would be done.</para>
</section>
<section id="lucene-configuration">
<title>Configuration</title>
<section id="lucene-configuration-directory">
- <title>directory configuration</title>
+ <title>Directory configuration</title>
- <para>Lucene has a notion of Directory where the index is stored. The
- Directory implementation can be customized but Lucene comes bundled with
- a file system and a full memory implementation. Hibernate Lucene has the
- notion of <literal>DirectoryProvider</literal> that handle the
- configuration and the initialization of the Lucene Directory.</para>
+ <para>Apache Lucene has a notion of Directory where the index is stored.
+ The Directory implementation can be customized but Lucene comes bundled
+ with a file system and a full memory implementation.
+ <productname>Hibernate Search</productname> has the notion of
+ <literal>DirectoryProvider</literal> that handle the configuration and
+ the initialization of the Lucene Directory.</para>
<table>
<title>List of built-in Directory Providers</title>
@@ -103,19 +75,19 @@
<tbody>
<row>
- <entry>org.hibernate.lucene.store.FSDirectoryProvider</entry>
+ <entry>org.hibernate.search.store.FSDirectoryProvider</entry>
<entry>File system based directory. The directory used will be
- <indexBase>/<<literal>@Index.name</literal>></entry>
+ <indexBase>/<<literal>@Indexed.name</literal>></entry>
<entry><literal>indexBase</literal>: Base directory</entry>
</row>
<row>
- <entry>org.hibernate.lucene.store.RAMDirectoryProvider</entry>
+ <entry>org.hibernate.search.store.RAMDirectoryProvider</entry>
<entry>Memory based directory, the directory will be uniquely
- indentified by the <literal>@Index.name</literal>
+ indentified by the <literal>@Indexed.name</literal>
element</entry>
<entry>none</entry>
@@ -132,17 +104,17 @@
<para>Each indexed entity is associated to a Lucene index (an index can
be shared by several entities but this is not usually the case). You can
configure the index through properties prefixed by
- <literal><literal>hibernate.lucene.<indexname></literal></literal>.
+ <constant>hibernate.search.</constant><replaceable>indexname</replaceable>.
Default properties inherited to all indexes can be defined using the
- prefix hibernate.lucene.default.</para>
+ prefix <constant>hibernate.search.default.</constant></para>
<para>To define the directory provider of a given index, you use the
- <literal>hibernate.lucene.<indexname>.directory_provider</literal></para>
+ <constant>hibernate.search.<replaceable>indexname</replaceable>.directory_provider</constant></para>
- <programlisting>hibernate.lucene.default.directory_provider org.hibernate.lucene.store.FSDirectoryProvider
-hibernate.lucene.default.indexDir=/usr/lucene/indexes
+ <programlisting>hibernate.search.default.directory_provider org.hibernate.search.store.FSDirectoryProvider
+hibernate.search.default.indexDir=/usr/lucene/indexes
-hibernate.lucene.Rules.directory_provider org.hibernate.lucene.store.RAMDirectoryProvider
+hibernate.search.Rules.directory_provider org.hibernate.search.store.RAMDirectoryProvider
</programlisting>
<para>applied on</para>
@@ -162,32 +134,537 @@
and base directory, and overide those default later on on a per index
basis.</para>
- <para>Writing your own DirectoryProvider, you can benefit this
- configuration mechanism too.</para>
+ <para>Writing your own <classname>DirectoryProvider</classname>, you can
+ benefit this configuration mechanism too.</para>
</section>
- <section id="lucene-configuration-event">
+ <section id="lucene-configuration-event" revision="1">
<title>Enabling automatic indexing</title>
- <para>Finally, we enable the <literal>LuceneEventListener</literal> for
- the three Hibernate events that occur after changes are committed to the
+ <para>Finally, we enable the <literal>SearchEventListener</literal> for
+ the three Hibernate events that occur after changes are executed to the
database.</para>
<programlisting><hibernate-configuration>
...
- <event type="post-commit-update"
- <listener
- class="org.hibernate.lucene.event.LuceneEventListener"/>
+ <event type="post-update"
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
</event>
- <event type="post-commit-insert"
- <listener
- class="org.hibernate.lucene.event.LuceneEventListener"/>
+ <event type="post-insert"
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
</event>
- <event type="post-commit-delete"
- <listener
- class="org.hibernate.lucene.event.LuceneEventListener"/>
+ <event type="post-delete"
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
</event>
</hibernate-configuration></programlisting>
</section>
</section>
+
+ <section id="lucene-mapping" revision="1">
+ <title>Mapping entities to the index structure</title>
+
+ <para>All the metadata information related to indexed entities is
+ described through some Java annotations. There is no need for xml mapping
+ files nor a list of indexed entities. The list is discovered at startup
+ time scanning the Hibernate mapped entities.</para>
+
+ <para>First, we must declare a persistent class as indexable. This is done
+ by annotating the class with <literal>@Indexed</literal> (all entities not
+ annotated with <literal>@Indexed</literal> will be ignored by the indexing
+ process):</para>
+
+ <programlisting>@Entity
+<emphasis role="bold">@Indexed(index="indexes/essays")</emphasis>
+public class Essay {
+ ...
+}</programlisting>
+
+ <para>The <literal>index</literal> attribute tells Hibernate what the
+ Lucene directory name is (usually a directory on your file system). If you
+ wish to define a base directory for all Lucene indexes, you can use the
+ <literal>hibernate.search.default.indexDir</literal> property in your
+ configuration file. Each entity instance will be represented by a Lucene
+ <classname>Document</classname> inside the given index (aka
+ Directory).</para>
+
+ <para>For each property (or attribute) of your entity, you have the
+ ability to describe how it will be indexed. The default (ie no annotation)
+ means that the property is completly ignored by the indexing process.
+ <literal>@Field</literal> does declare a property as indexed. When
+ indexing an element to a Lucene document you can specify how it is
+ indexed:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para><literal>name</literal>: describe under which name, the property
+ should be stored in the Lucene Document. The default value is the
+ property name (following the JavaBeans convention)</para>
+ </listitem>
+
+ <listitem>
+ <para><literal>store</literal>: describe whether or not the property
+ is stored in the Lucene index. You can store the value
+ <literal>Store.YES</literal> (comsuming more space in the index),
+ store it in a compressed way <literal>Store.COMPRESS</literal> (this
+ does consume more CPU), or avoid any storage
+ <literal>Store.NO</literal> (this is the default value). When a
+ property is stored, you can retrieve it from the Lucene Document (note
+ that this is not related to whether the element is indexed or
+ not).</para>
+ </listitem>
+
+ <listitem>
+ <para>index: describe how the element is indexed (ie the process used
+ to index the property and the type of information store). The
+ different values are <literal>Index.NO</literal> (no indexing, ie
+ cannot be found by a query), <literal>Index.TOKENIZED</literal> (use
+ an analyzer to process the property),
+ <literal>Index.UN_TOKENISED</literal> (no analyzer pre processing),
+ <literal>Index.NO_NORM</literal> (do not store the normalization
+ data).</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>These attributes are part of the <literal>@Field</literal>
+ annotation.</para>
+
+ <para>Whether or not you want to store the data depends on how you wish to
+ use the index query result. As of today, for a pure <productname>Hibernate
+ Search</productname> usage, storing is not necessary. Whether or not you
+ want to tokenize a property or not depends on whether you wish to search
+ the element as is, or only normalized part of it. It make sense to
+ tokenize a text field, but it does not to do it for a date field (or an id
+ field).</para>
+
+ <para>Finally, the id property of an entity is a special property used by
+ <productname>Hibernate Search</productname> to ensure index unicity of a
+ given entity. By design, an id has to be stored and must not be tokenized.
+ To mark a property as index id, use the <literal>@DocumentId</literal>
+ annotation.</para>
+
+ <programlisting>@Entity
+@Indexed(index="indexes/essays")
+public class Essay {
+ ...
+
+ @Id
+ <emphasis role="bold">@DocumentId</emphasis>
+ public Long getId() { return id; }
+
+ <emphasis role="bold">@Field(name="Abstract", index=Index.TOKENIZED, store=Store.YES)</emphasis>
+ public String getSummary() { return summary; }
+
+ @Lob
+ <emphasis role="bold">@Field(index=Index.TOKENIZED)</emphasis>
+ public String getText() { return text; }
+
+}</programlisting>
+
+ <para>These annotations define an index with three fields:
+ <literal>id</literal>, <literal>Abstract</literal> and
+ <literal>text</literal>. Note that by default the field name is
+ decapitalized, following the JavaBean specification.</para>
+
+ <para>Note: you <emphasis>must</emphasis> specify
+ <literal>@DocumentId</literal> on the identifier property of your entity
+ class.</para>
+
+ <para>Lucene has the notion of <emphasis>boost factor</emphasis>. It's a
+ way to give more weigth to a field or to an indexed element over an other
+ during the indexation process. You can use <literal>@Boost</literal> at
+ the field or the class level.</para>
+
+ <programlisting>@Entity
+@Indexed(index="indexes/essays")
+<emphasis role="bold">@Boost(2)</emphasis>
+public class Essay {
+ ...
+
+ @Id
+ @DocumentId
+ public Long getId() { return id; }
+
+ @Field(name="Abstract", index=Index.TOKENIZED, store=Store.YES)
+ <emphasis role="bold">@Boost(2.5f)</emphasis>
+ public String getSummary() { return summary; }
+
+ @Lob
+ @Field(index=Index.TOKENIZED)
+ public String getText() { return text; }
+
+}</programlisting>
+
+ <para>In our example, Essay's probability to reach the top of the search
+ list will be multiplied by 2 and the summary field will be 2.5 more
+ important than the test field. Note that this explaination is actually
+ wrong, but it is simple and close enought to the reality. Please check the
+ Lucene documentation or the excellent <citetitle>Lucene In
+ Action</citetitle> from Otis Gospodnetic and Erik Hatcher.</para>
+
+ <para>The analyzer class used to index the elements is configurable
+ through the <literal>hibernate.search.analyzer</literal> property. If none
+ defined,
+ <classname>org.apache.lucene.analysis.standard.StandardAnalyzer</classname>
+ is used as the default.</para>
+ </section>
+
+ <section id="lucene-bridge">
+ <title>Property/Field Bridge</title>
+
+ <para>All field of a full text index in Lucene have to be represented as
+ Strings. Ones Java properties have to be indexed in a String form. For
+ most of your properties, <productname>Hibernate Search</productname> does
+ the translation job for you thanks to a built-in set of bridges. In some
+ cases, though you need a fine grain control over the translation
+ process.</para>
+
+ <section>
+ <title>Built-in bridges</title>
+
+ <para><literal>Hibernate Search</literal> comes bundled with a set of
+ built-in bridges between a Java property type and its full text
+ representation.</para>
+
+ <para><literal>Null</literal> elements are not indexed (Lucene does not
+ support null elements and it does not make much sense either)</para>
+
+ <variablelist>
+ <varlistentry>
+ <term>null</term>
+
+ <listitem>
+ <para>null elements are not indexed. Lucene does not support null
+ elements and this does not make much sense either.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>java.lang.String</term>
+
+ <listitem>
+ <para>String are indexed as is</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>short, Short, integer, Integer, long, Long, float, Float,
+ double, Double, BigInteger, BigDecimal</term>
+
+ <listitem>
+ <para>Numbers are converted in their String representation. Note
+ that numbers cannot be compared by Lucene (ie used in ranged
+ queries) out of the box: they have to be padded <footnote>
+ <para>Using a Range query is debattable and has drawbacks, an
+ alternative approach is to use a Filter query which will
+ filter the result query to the appropriate range.</para>
+
+ <para><productname>Hibernate Search</productname> will support
+ a padding mechanism</para>
+ </footnote></para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>java.util.Date</term>
+
+ <listitem>
+ <para>Dates are stored as yyyyMMddHHmmssSSS in GMT time
+ (200611072203012 for Nov 7th of 2006 4:03PM and 12ms EST). You
+ shouldn't really bother with the internal format. What is
+ important is that when using a DateRange Query, you should know
+ that the dates have to be expressed in GMT time.</para>
+
+ <para>Usually, storing the date up to the milisecond is not
+ necessary. <literal>@DateBridge</literal> defines the appropriate
+ resolution you are willing to store in the index
+ (<literal><literal>@DateBridge(resolution=Resolution.DAY)</literal></literal>).
+ The date pattern will then be truncated accordingly.</para>
+
+ <programlisting>@Entity @Indexed
+public class Meeting {
+ @Field(index=Index.UN_TOKENIZED)
+ <emphasis role="bold">@DateBridge(resolution=Resolution.MINUTE)</emphasis>
+ private Date date;
+ ...
+}</programlisting>
+
+ <warning>
+ <para>A Date whose resolution is lower than
+ <literal>MILLISECOND</literal> cannot be a
+ <literal>@DocumentId</literal></para>
+ </warning>
+ </listitem>
+ </varlistentry>
+ </variablelist>
+
+ <para></para>
+ </section>
+
+ <section>
+ <title>Custom Bridge</title>
+
+ <para>It can happen that the built-in bridges of Hibernate Search does
+ not cover some of your property types, or that the String representation
+ used is not what you expect.</para>
+
+ <section>
+ <title>StringBridge</title>
+
+ <para>The simpliest custom solution is to give <productname>Hibernate
+ Search</productname> an implementation of your expected
+ <emphasis>object to String</emphasis> bridge. To do so you need to
+ implements the
+ <literal>org.hibernate.search.bridge.StringBridge</literal>
+ interface</para>
+
+ <programlisting>/**
+ * Padding Integer bridge.
+ * All numbers will be padded with 0 to match 5 digits
+ *
+ * @author Emmanuel Bernard
+ */
+public class PaddedIntegerBridge implements <emphasis role="bold">StringBridge</emphasis> {
+
+ private int PADDING = 5;
+
+ <emphasis role="bold">public String objectToString(Object object)</emphasis> {
+ String rawInteger = ( (Integer) object ).toString();
+ if (rawInteger.length() > PADDING) throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < PADDING ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+}</programlisting>
+
+ <para>Then any property or field can use this bridge thanks to the
+ <literal>@FieldBridge</literal> annotation</para>
+
+ <programlisting><emphasis role="bold">@FieldBridge(impl = PaddedIntegerBridge.class)</emphasis>
+private Integer length;</programlisting>
+
+ <para>Parameters can be passed to the Bridge implementation making it
+ more flexible. The Bridge implementation implements a
+ <classname>ParameterizedBridge</classname> interface, and the
+ parameters are passed through the <literal>@FieldBridge</literal>
+ annotation.</para>
+
+ <programlisting>public class PaddedIntegerBridge implements StringBridge, <emphasis
+ role="bold">ParameterizedBridge</emphasis> {
+
+ public static String PADDING_PROPERTY = "padding";
+ private int padding = 5; //default
+
+ <emphasis role="bold">public void setParameterValues(Map parameters)</emphasis> {
+ Object padding = parameters.get( PADDING_PROPERTY );
+ if (padding != null) this.padding = (Integer) padding;
+ }
+
+ public String objectToString(Object object) {
+ String rawInteger = ( (Integer) object ).toString();
+ if (rawInteger.length() > padding) throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+}
+
+
+//property
+@FieldBridge(impl = PaddedIntegerBridge.class,
+ <emphasis role="bold">params = @Parameter(name="padding", value="10")</emphasis> )
+private Integer length;</programlisting>
+
+ <para>The <classname>ParameterizedBridge</classname> interface can be
+ implemented by <classname>StringBridge</classname>,
+ <classname>TwoWayStringBridge</classname>,
+ <classname>FieldBridge</classname> implementations (see
+ bellow).</para>
+
+ <para>If you expect to use your bridge implementation on for an id
+ property (ie annotated with <literal>@DocumentId</literal>), you need
+ to use a slightly extended version of <literal>StringBridge</literal>
+ named <classname>TwoWayStringBridge</classname>. <literal>Hibernate
+ Search</literal> needs to read the string representation of the
+ identifier and generate the object out of it. There is not difference
+ in the way the <literal>@FieldBridge</literal> annotation is
+ used.</para>
+
+ <programlisting>public class PaddedIntegerBridge implements TwoWayStringBridge, ParameterizedBridge {
+
+ public static String PADDING_PROPERTY = "padding";
+ private int padding = 5; //default
+
+ public void setParameterValues(Map parameters) {
+ Object padding = parameters.get( PADDING_PROPERTY );
+ if (padding != null) this.padding = (Integer) padding;
+ }
+
+ public String objectToString(Object object) {
+ String rawInteger = ( (Integer) object ).toString();
+ if (rawInteger.length() > padding) throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+
+ <emphasis role="bold">public Object stringToObject(String stringValue)</emphasis> {
+ return new Integer(stringValue);
+ }
+}
+
+
+//id property
+@DocumentId
+@FieldBridge(impl = PaddedIntegerBridge.class,
+ params = @Parameter(name="padding", value="10") )
+private Integer id;</programlisting>
+
+ <para>It is critically important for the two-way process to be
+ idempotent (ie object = stringToObject( objectToString( object ) )
+ ).</para>
+ </section>
+
+ <section>
+ <title>FieldBridge</title>
+
+ <para>Some usecase requires more than a simple object to string
+ translation when mapping a property to a Lucene index. To give you
+ most of the flexibility you can also implement a bridge as a
+ <classname>FieldBridge</classname>. This interface give you a property
+ value and let you map it the way you want in your Lucene
+ <classname>Document</classname>.This interface is very similar in its
+ concept to the <productname>Hibernate</productname>
+ <classname>UserType</classname>.</para>
+
+ <para>You can for example store a given property in two different
+ document fields</para>
+
+ <programlisting>/**
+ * Store the date in 3 different field year, month, day
+ * to ease Range Query per year, month or day
+ * (eg get all the elements of december for the last 5 years)
+ *
+ * @author Emmanuel Bernard
+ */
+public class DateSplitBridge implements FieldBridge {
+ private final static TimeZone GMT = TimeZone.getTimeZone("GMT");
+
+ <emphasis role="bold">public void set(String name, Object value, Document document, Field.Store store, Field.Index index, Float boost) {</emphasis>
+ Date date = (Date) value;
+ Calendar cal = GregorianCalendar.getInstance( GMT );
+ cal.setTime( date );
+ int year = cal.get( Calendar.YEAR );
+ int month = cal.get( Calendar.MONTH ) + 1;
+ int day = cal.get( Calendar.DAY_OF_MONTH );
+ //set year
+ Field field = new Field( name + ".year", String.valueOf(year), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ //set month and pad it if needed
+ field = new Field( name + ".month", month < 10 ? "0" : "" + String.valueOf(month), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ //set day and pad it if needed
+ field = new Field( name + ".day", day < 10 ? "0" : "" + String.valueOf(day), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ }
+}
+
+
+//property
+<emphasis role="bold">@FieldBridge(impl = DateSplitBridge.class)</emphasis>
+private Integer length;</programlisting>
+
+ <para></para>
+ </section>
+ </section>
+ </section>
+
+ <section id="lucene-query">
+ <title>Querying</title>
+
+ <para>The second most important capability of <productname>Hibernate
+ Search</productname> is the ability to execute a Lucene query and retrieve
+ entities managed by an Hibernate session, providing the power of Lucene
+ without living the Hibernate paradygm, and giving another dimension to the
+ Hibernate classic search mechanisms (HQL, Criteria query, native SQL
+ query).</para>
+
+ <para>To access the <productname>Hibernate Search</productname> querying
+ facilities, you have to use an Hibernate
+ <classname>FullTextSession</classname>. A SearchSession wrap an regular
+ <classname>org.hibernate.Session</classname> to provide query and indexing
+ capabilities.</para>
+
+ <programlisting>Session session = sessionFactory.openSession();
+...
+FullTextSession fullTextSession = Search.createFullTextSession(session);</programlisting>
+
+ <para>The search facility is built on native Lucene queries.</para>
+
+ <programlisting>org.apache.lucene.QueryParser parser = new QueryParser("title", new StopAnalyzer() );
+
+org.hibernate.lucene.search.Query luceneQuery = parser.parse( "summary:Festina Or brand:Seiko" );
+<emphasis role="bold">org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery );</emphasis>
+
+List result = fullTextQuery.list(); //return a list of managed objects</programlisting>
+
+ <para>The Hibernate query built on top of the Lucene query is a regular
+ <literal>org.hibernate.Query</literal>, you are is the same paradygm as
+ the other Hibernate query facilities (HQL, Native or Criteria). The
+ regular <literal>list()</literal>, <literal>uniqueResult()</literal>,
+ <literal>iterate()</literal> and <literal>scroll()</literal> can be
+ used.</para>
+
+ <para>If you expect a reasonnable result number and expect to work on all
+ of them, <methodname>list()</methodname> or
+ <methodname>uniqueResult()</methodname> are recommanded.
+ <methodname>list()</methodname> work best if the entity
+ <literal>batch-size</literal> is set up properly. Note that Hibernate
+ Seach has to process all Lucene Hits elements when using
+ <methodname>list()</methodname>, <methodname>uniqueResult()</methodname>
+ and <methodname>iterate()</methodname>. If you wish to minimize Lucene
+ document loading, <methodname>scroll()</methodname> is more appropriate,
+ Don't forget to close the <classname>ScrollableResults</classname> object
+ when you're done, since it keeps Lucene resources.</para>
+
+ <para>An efficient way to work with queries is to use pagination. The
+ pagination API is exactly the one available in
+ <classname>org.hibernate.Query</classname>:</para>
+
+ <programlisting><emphasis role="bold">org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery );</emphasis>
+fullTextQuery.setFirstResult(30);
+fullTextQuery.setMaxResult(20);
+fullTextQuery.list(); //will return a list of 20 elements starting from the 30th</programlisting>
+
+ <para>Only the relevant Lucene Documents are accessed.</para>
+ </section>
+
+ <section id="lucene-index">
+ <title>Indexing</title>
+
+ <para>It is sometimes useful to index an object event if this object is
+ not inserted nor updated to the database. This is especially true when you
+ want to build your index the first time. You can achieve that goal using
+ the <classname>FullTextSession</classname>.</para>
+
+ <programlisting>FullTextSession fullTextSession = Search.createFullTextSession(session);
+Transaction tx = fullTextSession.beginTransaction();
+for (Customer customer : customers) {
+ <emphasis role="bold">fullTextSession.index(customer);</emphasis>
+}
+tx.commit(); //index are written at commit time</programlisting>
+
+ <para>For maximum efficiency, Hibernate Search batch index operations
+ which and execute them at commit time (Note: you don't need to use
+ <classname>org.hibernate.Transaction</classname> in a JTA
+ environment).</para>
+ </section>
</chapter>
\ No newline at end of file
Added: branches/Branch_3_2/HibernateExt/metadata/lib/jta.jar
===================================================================
(Binary files differ)
Property changes on: branches/Branch_3_2/HibernateExt/metadata/lib/jta.jar
___________________________________________________________________
Name: svn:mime-type
+ application/octet-stream
Added: branches/Branch_3_2/HibernateExt/metadata/lib/jta.licence.txt
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/lib/jta.licence.txt 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/lib/jta.licence.txt 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,49 @@
+
+Sun Microsystems, Inc.
+Binary Code License Agreement
+
+READ THE TERMS OF THIS AGREEMENT AND ANY PROVIDED SUPPLEMENTAL LICENSE TERMS (COLLECTIVELY "AGREEMENT") CAREFULLY BEFORE OPENING THE SOFTWARE MEDIA PACKAGE. BY OPENING THE SOFTWARE MEDIA PACKAGE, YOU AGREE TO THE TERMS OF THIS AGREEMENT. IF YOU ARE ACCESSING THE SOFTWARE ELECTRONICALLY, INDICATE YOUR ACCEPTANCE OF THESE TERMS BY SELECTING THE "ACCEPT" BUTTON AT THE END OF THIS AGREEMENT. IF YOU DO NOT AGREE TO ALL THESE TERMS, PROMPTLY RETURN THE UNUSED SOFTWARE TO YOUR PLACE OF PURCHASE FOR A REFUND OR, IF THE SOFTWARE IS ACCESSED ELECTRONICALLY, SELECT THE "DECLINE" BUTTON AT THE END OF THIS AGREEMENT.
+
+1. LICENSE TO USE. Sun grants you a non-exclusive and non-transferable license for the internal use only of the accompanying software and documentation and any error corrections provided by Sun (collectively "Software"), by the number of users and the class of computer hardware for which the corresponding fee has been paid.
+
+2. RESTRICTIONS. Software is confidential and copyrighted. Title to Software and all associated intellectual property rights is retained by Sun and/or its licensors. Except as specifically authorized in any Supplemental License Terms, you may not make copies of Software, other than a single copy of Software for archival purposes. Unless enforcement is prohibited by applicable law, you may not modify, decompile, or reverse engineer Software. Licensee acknowledges that Licensed Software is not designed or intended for use in the design, construction, operation or maintenance of any nuclear facility. Sun Microsystems, Inc. disclaims any express or implied warranty of fitness for such uses. No right, title or interest in or to any trademark, service mark, logo or trade name of Sun or its licensors is granted under this Agreement.
+
+3. LIMITED WARRANTY. Sun warrants to you that for a period of ninety (90) days from the date of purchase, as evidenced by a copy of the receipt, the media on which Software is furnished (if any) will be free of defects in materials and workmanship under normal use. Except for the foregoing, Software is provided "AS IS". Your exclusive remedy and Sun's entire liability under this limited warranty will be at Sun's option to replace Software media or refund the fee paid for Software.
+
+4. DISCLAIMER OF WARRANTY. UNLESS SPECIFIED IN THIS AGREEMENT, ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT ARE DISCLAIMED, EXCEPT TO THE EXTENT THAT THESE DISCLAIMERS ARE HELD TO BE LEGALLY INVALID.
+
+5. LIMITATION OF LIABILITY. TO THE EXTENT NOT PROHIBITED BY LAW, IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR SPECIAL, INDIRECT, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF OR RELATED TO THE USE OF OR INABILITY TO USE SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. In no event will Sun's liability to you, whether in contract, tort (including negligence), or otherwise, exceed the amount paid by you for Software under this Agreement. The foregoing limitations will apply even if the above stated warranty fails of its essential purpose.
+
+6. Termination. This Agreement is effective until terminated. You may terminate this Agreement at any time by destroying all copies of Software. This Agreement will terminate immediately without notice from Sun if you fail to comply with any provision of this Agreement. Upon Termination, you must destroy all copies of Software.
+
+7. Export Regulations. All Software and technical data delivered under this Agreement are subject to US export control laws and may be subject to export or import regulations in other countries. You agree to comply strictly with all such laws and regulations and acknowledge that you have the responsibility to obtain such licenses to export, re-export, or import as may be required after delivery to you.
+
+8. U.S. Government Restricted Rights. If Software is being acquired by or on behalf of the U.S. Government or by a U.S. Government prime contractor or subcontractor (at any tier), then the Government's rights in Software and accompanying documentation will be only as set forth in this Agreement; this is in accordance with 48 CFR 227.7201 through 227.7202-4 (for Department of Defense (DOD) acquisitions) and with 48 CFR 2.101 and 12.212 (for non-DOD acquisitions).
+
+9. Governing Law. Any action related to this Agreement will be governed by California law and controlling U.S. federal law. No choice of law rules of any jurisdiction will apply.
+
+10. Severability. If any provision of this Agreement is held to be unenforceable, this Agreement will remain in effect with the provision omitted, unless omission would frustrate the intent of the parties, in which case this Agreement will immediately terminate.
+
+11. Integration. This Agreement is the entire agreement between you and Sun relating to its subject matter. It supersedes all prior or contemporaneous oral or written communications, proposals, representations and warranties and prevails over any conflicting or additional terms of any quote, order, acknowledgment, or other communication between the parties relating to its subject matter during the term of this Agreement. No modification of this Agreement will be binding, unless in writing and signed by an authorized representative of each party.
+
+JAVATM INTERFACE CLASSES
+JAVA TRANSACTION API (JTA), VERSION 1.0.1B, MAINTENANCE RELEASE
+SUPPLEMENTAL LICENSE TERMS
+
+These supplemental license terms ("Supplemental Terms") add to or modify the terms of the Binary Code License Agreement (collectively, the "Agreement"). Capitalized terms not defined in these Supplemental Terms shall have the same meanings ascribed to them in the Agreement. These Supplemental Terms shall supersede any inconsistent or conflicting terms in the Agreement, or in any license contained within the Software.
+
+1. Software Internal Use and Development License Grant. Subject to the terms and conditions of this Agreement, including, but not limited to Section 3 (Java Technology Restrictions) of these Supplemental Terms, Sun grants you a non-exclusive, non-transferable, limited license to reproduce internally and use internally the binary form of the Software, complete and unmodified, for the sole purpose of designing, developing and testing your Java applets and applications ("Programs").
+
+2. License to Distribute Software. In addition to the license granted in Section 1 (Software Internal Use and Development License Grant) of these Supplemental Terms, subject to the terms and conditions of this Agreement, including but not limited to Section 3 (Java Technology Restrictions), Sun grants you a non-exclusive, non-transferable, limited license to reproduce and distribute the Software in binary form only, provided that you (i) distribute the Software complete and unmodified and only bundled as part of your Programs, (ii) do not distribute additional software intended to replace any component(s) of the Software, (iii) do not remove or alter any proprietary legends or notices contained in the Software, (iv) only distribute the Software subject to a license agreement that protects Sun's interests consistent with the terms contained in this Agreement, and (v) agree to defend and indemnify Sun and its licensors from and against any damages, costs, liabilities, settle!
ment amounts and/or expenses (including attorneys' fees) incurred in connection with any claim, lawsuit or action by any third party that arises or results from the use or distribution of any and all Programs and/or Software.
+
+3. Java Technology Restrictions. You may not modify the Java Platform Interface ("JPI", identified as classes contained within the "java" package or any subpackages of the "java" package), by creating additional classes within the JPI or otherwise causing the addition to or modification of the classes in the JPI. In the event that you create an additional class and associated API(s) which (i) extends the functionality of the Java Platform, and (ii) is exposed to third party software developers for the purpose of developing additional software which invokes such additional API, you must promptly publish broadly an accurate specification for such API for free use by all developers. You may not create, or authorize your licensees to create additional classes, interfaces, or subpackages that are in any way identified as "java", "javax", "sun" or similar convention as specified by Sun in any naming convention designation.
+
+4. Trademarks and Logos. You acknowledge and agree as between you and Sun that Sun owns the SUN, SOLARIS, JAVA, JINI, FORTE, and iPLANET trademarks and all SUN, SOLARIS, JAVA, JINI, FORTE, and iPLANET-related trademarks, service marks, logos and other brand designations ("Sun Marks"), and you agree to comply with the Sun Trademark and Logo Usage Requirements currently located at http://www.sun.com/policies/trademarks. Any use you make of the Sun Marks inures to Sun's benefit.
+
+5. Source Code. Software may contain source code that is provided solely for reference purposes pursuant to the terms of this Agreement. Source code may not be redistributed unless expressly provided for in this Agreement.
+
+6. Termination for Infringement. Either party may terminate this Agreement immediately should any Software become, or in either party's opinion be likely to become, the subject of a claim of infringement of any intellectual property right.
+
+For inquiries please contact: Sun Microsystems, Inc. 4150 Network Circle, Santa Clara, California 95054.
+
+
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/Environment.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/Environment.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/Environment.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,17 @@
+//$Id: Environment.java 10742 2006-11-07 01:03:16Z epbernard $
+package org.hibernate.search;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public final class Environment {
+ /**
+ * Indexes base directory
+ */
+ public static final String INDEX_BASE_DIR = "hibernate.search.index_dir";
+
+ /**
+ * Lucene analyser
+ */
+ public static final String ANALYZER_CLASS = "hibernate.search.analyzer";
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,25 @@
+//$Id: $
+package org.hibernate.search;
+
+import org.hibernate.classic.Session;
+import org.hibernate.Query;
+
+/**
+ * Extends the Hibernate {@link Session} with Full text search and indexing capabilities
+ *
+ * @author Emmanuel Bernard
+ */
+public interface FullTextSession extends Session {
+ /**
+ * Create a Query on top of a native Lucene Query returning the matching objects
+ * of type <code>entities</code> and their respective subclasses.
+ * If no entity is provided, no type filtering is done.
+ */
+ Query createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class... entities);
+
+ /**
+ * Force the (re)indexing of a given <b>managed</b> object.
+ * Indexation is batched per transaction
+ */
+ void index(Object entity);
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/Search.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/Search.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/Search.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,19 @@
+//$Id: $
+package org.hibernate.search;
+
+import org.hibernate.Session;
+import org.hibernate.search.impl.FullTextSessionImpl;
+
+/**
+ * Helper class to get a FullTextSession out of a regular session
+ * @author Emmanuel Bernard
+ */
+public final class Search {
+
+ private Search() {
+ }
+
+ public static FullTextSession createFullTextSession(Session session) {
+ return new FullTextSessionImpl(session);
+ }
+}
\ No newline at end of file
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Boost.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Boost.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Boost.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,20 @@
+//$Id: $
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Apply a boost factor on a field or a whole entity
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.TYPE, ElementType.METHOD, ElementType.FIELD} )
+@Documented
+public @interface Boost {
+ float value();
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/DateBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/DateBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/DateBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,25 @@
+//$Id: $
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import org.hibernate.search.annotations.Resolution;
+
+/**
+ * Defines the temporal resolution of a given field
+ * Date are stored as String in GMT
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.FIELD, ElementType.METHOD} )
+@Documented
+//TODO allow pattern like yyyyMMdd?
+//TODO allow base timezone?
+public @interface DateBridge {
+ Resolution resolution();
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/DocumentId.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/DocumentId.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/DocumentId.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,22 @@
+//$Id: $
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Declare a field as the document id. If set to a property, the property will be used
+ * TODO: If set to a class, the class itself will be passed to the FieldBridge
+ * Note that @{link org.hibernate.search.bridge.FieldBridge#get} must return the Entity id
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.METHOD, ElementType.FIELD} )
+@Documented
+public @interface DocumentId {
+ String name() default "";
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Field.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Field.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Field.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,38 @@
+//$Id: $
+/**
+ * JavaDoc copy/pastle from the Apache Lucene project
+ * Available under the ASL 2.0 http://www.apache.org/licenses/LICENSE-2.0
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Mark a property as indexable
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.METHOD, ElementType.FIELD} )
+@Documented
+public @interface Field {
+ /**
+ * Field name, default to the JavaBean property name
+ */
+ String name() default "";
+
+ /**
+ * Should the value be stored in the document
+ */
+ Store store() default Store.NO;
+
+ /**
+ * Defines how the Field should be indexed
+ */
+ Index index();
+
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/FieldBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/FieldBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/FieldBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,24 @@
+//$Id: $
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import org.hibernate.annotations.Parameter;
+
+/**
+ * specifies a given field bridge implementation
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.FIELD, ElementType.METHOD} )
+@Documented
+public @interface FieldBridge {
+ public Class impl() default void.class;
+
+ public Parameter[] params() default {};
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Index.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Index.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Index.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,34 @@
+//$Id: $
+package org.hibernate.search.annotations;
+
+/**
+ * Defines how an Field should be indexed
+ */
+public enum Index {
+ /**
+ * Do not index the field value. This field can thus not be searched,
+ * but one can still access its contents provided it is
+ * {@link Store stored}.
+ */
+ NO,
+ /**
+ * Index the field's value so it can be searched. An Analyzer will be used
+ * to tokenize and possibly further normalize the text before its
+ * terms will be stored in the index. This is useful for common text.
+ */
+ TOKENIZED,
+ /**
+ * Index the field's value without using an Analyzer, so it can be searched.
+ * As no analyzer is used the value will be stored as a single term. This is
+ * useful for unique Ids like product numbers.
+ */
+ UN_TOKENISED,
+ /**
+ * Index the field's value without an Analyzer, and disable
+ * the storing of norms. No norms means that index-time boosting
+ * and field length normalization will be disabled. The benefit is
+ * less memory usage as norms take up one byte per indexed field
+ * for every document in the index.
+ */
+ NO_NORMS
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Indexed.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Indexed.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Indexed.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,21 @@
+//$Id: Indexed.java 10742 2006-11-07 01:03:16Z epbernard $
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention( RetentionPolicy.RUNTIME )
+@Target( ElementType.TYPE )
+@Documented
+/**
+ * Specifies that an entity is to be indexed by Lucene
+ */
+public @interface Indexed {
+ /**
+ * The filename of the index
+ */
+ String index() default "";
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Keyword.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Keyword.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Keyword.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,29 @@
+//$Id: Keyword.java 10742 2006-11-07 01:03:16Z epbernard $
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.METHOD, ElementType.FIELD} )
+@Documented
+/**
+ * Specifies that a property of an entity is a Lucene
+ * keyword field
+ * @deprecated use @Field(index=Index.UN_TOKENIZED, store=Store.YES) or @DocumentId when id=true was used
+ */
+@Deprecated
+public @interface Keyword {
+ /**
+ * The field name
+ */
+ String name() default "";
+
+ /**
+ * Specifies that this is the "identifier" keyword
+ */
+ boolean id() default false;
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Resolution.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Resolution.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Resolution.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,17 @@
+//$Id: $
+package org.hibernate.search.annotations;
+
+/**
+ * Date indexing resolution
+ *
+ * @author Emmanuel Bernard
+ */
+public enum Resolution {
+ YEAR,
+ MONTH,
+ DAY,
+ HOUR,
+ MINUTE,
+ SECOND,
+ MILLISECOND
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Store.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Store.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Store.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,13 @@
+//$Id: $
+package org.hibernate.search.annotations;
+
+/**
+ * Whether or not the value is stored in the document
+ *
+ * @author Emmanuel Bernard
+ */
+public enum Store {
+ NO,
+ YES,
+ COMPRESS
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Text.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Text.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Text.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,24 @@
+//$Id: Text.java 10742 2006-11-07 01:03:16Z epbernard $
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.METHOD, ElementType.FIELD} )
+@Documented
+/**
+ * Specifies that a property of an entity is a Lucene
+ * text field
+ * @deprecated use @Field(index=Index.TOKENIZED, store=Store.YES)
+ */
+@Deprecated
+public @interface Text {
+ /**
+ * The field name
+ */
+ String name() default "";
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Unstored.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Unstored.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/annotations/Unstored.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,24 @@
+//$Id: Unstored.java 10742 2006-11-07 01:03:16Z epbernard $
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.METHOD, ElementType.FIELD} )
+@Documented
+/**
+ * Specifies that a property of an entity is a Lucene
+ * unstored field
+ * @deprecated use @Field(index=Index.TOKENIZED, store=Store.NO)
+ */
+@Deprecated
+public @interface Unstored {
+ /**
+ * The field name
+ */
+ String name() default "";
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/AddWork.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/AddWork.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/AddWork.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,15 @@
+//$Id: $
+package org.hibernate.search.backend;
+
+import java.io.Serializable;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class AddWork extends Work {
+ public AddWork(Serializable id, Class entity, Document document) {
+ super( id, entity, document );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/DeleteWork.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/DeleteWork.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/DeleteWork.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,13 @@
+//$Id: $
+package org.hibernate.search.backend;
+
+import java.io.Serializable;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class DeleteWork extends Work {
+ public DeleteWork(Serializable id, Class entity) {
+ super( id, entity );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/UpdateWork.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/UpdateWork.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/UpdateWork.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,15 @@
+//$Id: $
+package org.hibernate.search.backend;
+
+import java.io.Serializable;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class UpdateWork extends Work {
+ public UpdateWork(Serializable id, Class entity, Document document) {
+ super( id, entity, document );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Work.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Work.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Work.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,40 @@
+//$Id: $
+package org.hibernate.search.backend;
+
+import java.io.Serializable;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * Represent a Lucene unit work
+ *
+ * @author Emmanuel Bernard
+ */
+public abstract class Work implements Serializable {
+ private Document document;
+ private Class entity;
+ private Serializable id;
+
+ public Work(Serializable id, Class entity) {
+ this( id, entity, null );
+ }
+
+ public Work(Serializable id, Class entity, Document document) {
+ this.id = id;
+ this.entity = entity;
+ this.document = document;
+ }
+
+
+ public Document getDocument() {
+ return document;
+ }
+
+ public Class getEntity() {
+ return entity;
+ }
+
+ public Serializable getId() {
+ return id;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkQueue.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkQueue.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkQueue.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,24 @@
+//$Id: $
+package org.hibernate.search.backend;
+
+/**
+ * Set of work operations
+ *
+ * @author Emmanuel Bernard
+ */
+public interface WorkQueue {
+ /**
+ * Add a work
+ */
+ void add(Work work);
+
+ /**
+ * Execute works
+ */
+ void performWork();
+
+ /**
+ * Rollback works
+ */
+ void cancelWork();
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,137 @@
+//$Id: $
+package org.hibernate.search.backend;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.hibernate.HibernateException;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.store.DirectoryProvider;
+
+/**
+ * Lucene workspace
+ * This is not intended to be used in a multithreaded environment
+ * <p/>
+ * One cannot execute modification through an IndexReader when an IndexWriter has been acquired on the same underlying directory
+ * One cannot get an IndexWriter when an IndexReader have been acquired and modificed on the same underlying directory
+ * The recommended approach is to execute all the modifications on the IndexReaders, {@link #clean()} }, and acquire the
+ * index writers
+ *
+ * @author Emmanuel Bernard
+ */
+//TODO introduce the notion of read only IndexReader? We cannot enforce it because Lucene use abstract classes, not interfaces
+public class Workspace {
+ private static Log log = LogFactory.getLog( Workspace.class );
+ private Map<Class, DocumentBuilder<Object>> documentBuilders;
+ private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
+ private Map<DirectoryProvider, IndexReader> readers = new HashMap<DirectoryProvider, IndexReader>();
+ private Map<DirectoryProvider, IndexWriter> writers = new HashMap<DirectoryProvider, IndexWriter>();
+ private List<DirectoryProvider> lockedProviders = new ArrayList<DirectoryProvider>();
+
+ public Workspace(Map<Class, DocumentBuilder<Object>> documentBuilders,
+ Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ this.documentBuilders = documentBuilders;
+ this.lockableDirectoryProviders = lockableDirectoryProviders;
+ }
+
+
+ public DocumentBuilder getDocumentBuilder(Class entity) {
+ return documentBuilders.get( entity );
+ }
+
+ public IndexReader getIndexReader(Class entity) {
+ //TODO NPEs
+ DirectoryProvider provider = documentBuilders.get( entity ).getDirectoryProvider();
+ IndexReader reader = readers.get( provider );
+ if ( reader != null ) return reader;
+ lockProvider( provider );
+ try {
+ reader = IndexReader.open( provider.getDirectory() );
+ readers.put( provider, reader );
+ }
+ catch (IOException e) {
+ cleanUp( new HibernateException( "Unable to open IndexReader for " + entity, e ) );
+ }
+ return reader;
+ }
+
+ public IndexWriter getIndexWriter(Class entity) {
+ DirectoryProvider provider = documentBuilders.get( entity ).getDirectoryProvider();
+ IndexWriter writer = writers.get( provider );
+ if ( writer != null ) return writer;
+ lockProvider( provider );
+ try {
+ writer = new IndexWriter(
+ provider.getDirectory(), documentBuilders.get( entity ).getAnalyzer(), false
+ ); //have been created at init time
+ writers.put( provider, writer );
+ }
+ catch (IOException e) {
+ cleanUp( new HibernateException( "Unable to open IndexWriter for " + entity, e ) );
+ }
+ return writer;
+ }
+
+ private void lockProvider(DirectoryProvider provider) {
+ //make sure to use a semaphore
+ ReentrantLock lock = lockableDirectoryProviders.get( provider );
+ //of course a given thread cannot have a race cond with itself
+ if ( !lock.isHeldByCurrentThread() ) {
+ lock.lock();
+ lockedProviders.add( provider );
+ }
+ }
+
+ private void cleanUp(HibernateException originalException) {
+ //release all readers and writers, then reelase locks
+ HibernateException raisedException = originalException;
+ for ( IndexReader reader : readers.values() ) {
+ try {
+ reader.close();
+ }
+ catch (IOException e) {
+ if ( raisedException != null ) {
+ log.error( "Subsequent Exception while closing IndexReader", e );
+ }
+ else {
+ raisedException = new HibernateException( "Exception while closing IndexReader", e );
+ }
+ }
+ }
+ for ( IndexWriter writer : writers.values() ) {
+ try {
+ writer.close();
+ }
+ catch (IOException e) {
+ if ( raisedException != null ) {
+ log.error( "Subsequent Exception while closing IndexWriter", e );
+ }
+ else {
+ raisedException = new HibernateException( "Exception while closing IndexWriter", e );
+ }
+ }
+ }
+ for ( DirectoryProvider provider : lockedProviders ) {
+ lockableDirectoryProviders.get( provider ).unlock();
+ }
+ readers.clear();
+ writers.clear();
+ lockedProviders.clear();
+ if ( raisedException != null ) throw raisedException;
+ }
+
+ /**
+ * release resources consumed in the workspace if any
+ */
+ public void clean() {
+ cleanUp( null );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchLuceneWorkQueue.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchLuceneWorkQueue.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchLuceneWorkQueue.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,65 @@
+//$Id: $
+package org.hibernate.search.backend.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.backend.impl.LuceneWorker;
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.UpdateWork;
+import org.hibernate.search.backend.DeleteWork;
+import org.hibernate.search.backend.AddWork;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class BatchLuceneWorkQueue implements WorkQueue {
+ private Workspace workspace;
+ private LuceneWorker worker;
+ private List<Work> queue = new ArrayList<Work>();
+
+ public BatchLuceneWorkQueue(Map<Class, DocumentBuilder<Object>> documentBuilders,
+ Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ workspace = new Workspace( documentBuilders, lockableDirectoryProviders );
+ worker = new LuceneWorker( workspace );
+ }
+
+ public void add(Work work) {
+ //TODO optimize by getting rid of dupe works
+ if ( work instanceof UpdateWork ) {
+ //split in 2 to optimize the process (reader first, writer next
+ queue.add( new DeleteWork( work.getId(), work.getEntity() ) );
+ queue.add( new AddWork( work.getId(), work.getEntity(), work.getDocument() ) );
+ }
+ else {
+ queue.add( work );
+ }
+ }
+
+ public void performWork() {
+ try {
+ //use of index reader
+ for ( Work work : queue ) {
+ if ( work instanceof DeleteWork ) worker.performWork( work );
+ }
+ workspace.clean(); //close readers
+ for ( Work work : queue ) {
+ if ( work instanceof AddWork ) worker.performWork( work );
+ }
+ }
+ finally {
+ workspace.clean();
+ queue.clear();
+ }
+ }
+
+ public void cancelWork() {
+ queue.clear();
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/LuceneWorker.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/LuceneWorker.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/LuceneWorker.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,116 @@
+//$Id: $
+package org.hibernate.search.backend.impl;
+
+import java.io.IOException;
+import java.io.Serializable;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.hibernate.AssertionFailure;
+import org.hibernate.HibernateException;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.backend.DeleteWork;
+import org.hibernate.search.backend.AddWork;
+import org.hibernate.search.backend.UpdateWork;
+import org.hibernate.search.backend.Work;
+
+/**
+ * Stateless implementation that perform a work
+ *
+ * @author Emmanuel Bernard
+ */
+public class LuceneWorker {
+ private Workspace workspace;
+ private static Log log = LogFactory.getLog( LuceneWorker.class );
+
+ public LuceneWorker(Workspace workspace) {
+ this.workspace = workspace;
+ }
+
+ public void performWork(Work work) {
+ if ( AddWork.class.isAssignableFrom( work.getClass() ) ) {
+ performWork( (AddWork) work );
+ }
+ else if ( UpdateWork.class.isAssignableFrom( work.getClass() ) ) {
+ performWork( (UpdateWork) work );
+ }
+ else if ( DeleteWork.class.isAssignableFrom( work.getClass() ) ) {
+ performWork( (DeleteWork) work );
+ }
+ else {
+ throw new AssertionFailure( "Unknown work type: " + work.getClass() );
+ }
+ }
+
+ public void performWork(AddWork work) {
+ Class entity = work.getEntity();
+ Serializable id = work.getId();
+ Document document = work.getDocument();
+ add( entity, id, document );
+ }
+
+ private void add(Class entity, Serializable id, Document document) {
+ if ( log.isTraceEnabled() )
+ log.trace( "add to Lucene index: " + entity + "#" + id + ": " + document );
+ IndexWriter writer = workspace.getIndexWriter( entity );
+ try {
+ writer.addDocument( document );
+ }
+ catch (IOException e) {
+ throw new HibernateException( "Unable to add to Lucene index: " + entity + "#" + id, e );
+ }
+ }
+
+ public void performWork(UpdateWork work) {
+ Class entity = work.getEntity();
+ Serializable id = work.getId();
+ Document document = work.getDocument();
+ remove( entity, id );
+ add( entity, id, document );
+ }
+
+ public void performWork(DeleteWork work) {
+ Class entity = work.getEntity();
+ Serializable id = work.getId();
+ remove( entity, id );
+ }
+
+ private void remove(Class entity, Serializable id) {
+ log.trace( "remove from Lucene index: " + entity + "#" + id );
+ DocumentBuilder builder = workspace.getDocumentBuilder( entity );
+ Term term = builder.getTerm( id );
+ IndexReader reader = workspace.getIndexReader( entity );
+ TermDocs termDocs = null;
+ try {
+ //TODO is there a faster way?
+ //TODO include TermDocs into the workspace?
+ termDocs = reader.termDocs( term );
+ String entityName = entity.getName();
+ while ( termDocs.next() ) {
+ int docIndex = termDocs.doc();
+ if ( entityName.equals( reader.document( docIndex ).get( DocumentBuilder.CLASS_FIELDNAME ) ) ) {
+ //remove only the one of the right class
+ //loop all to remove all the matches (defensive code)
+ reader.deleteDocument( docIndex );
+ }
+ }
+ }
+ catch (Exception e) {
+ throw new HibernateException( "Unable to remove from Lucene index: " + entity + "#" + id, e );
+ }
+ finally {
+ if (termDocs != null) try {
+ termDocs.close();
+ }
+ catch (IOException e) {
+ log.warn( "Unable to close termDocs properly", e);
+ }
+ }
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,47 @@
+//$Id: $
+package org.hibernate.search.backend.impl;
+
+import javax.transaction.Status;
+import javax.transaction.Synchronization;
+
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.Work;
+
+/**
+ * Execute some work inside a transaction sychronization
+ *
+ * @author Emmanuel Bernard
+ */
+public class PostTransactionWorkQueueSynchronization implements Synchronization {
+ private WorkQueue workQueue;
+ private boolean consumed;
+
+ public PostTransactionWorkQueueSynchronization(WorkQueue workQueue) {
+ this.workQueue = workQueue;
+ }
+
+ public void add(Work work) {
+ workQueue.add( work );
+ }
+
+ public boolean isConsumed() {
+ return consumed;
+ }
+
+ public void beforeCompletion() {
+ }
+
+ public void afterCompletion(int i) {
+ try {
+ if ( Status.STATUS_COMMITTED == i ) {
+ workQueue.performWork();
+ }
+ else {
+ workQueue.cancelWork();
+ }
+ }
+ finally {
+ consumed = true;
+ }
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/BridgeFactory.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/BridgeFactory.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/BridgeFactory.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,141 @@
+//$Id: $
+package org.hibernate.search.bridge;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.hibernate.HibernateException;
+import org.hibernate.AssertionFailure;
+import org.hibernate.search.bridge.builtin.DateBridge;
+import org.hibernate.search.bridge.builtin.FloatBridge;
+import org.hibernate.search.bridge.builtin.LongBridge;
+import org.hibernate.search.bridge.builtin.BigIntegerBridge;
+import org.hibernate.search.bridge.builtin.StringBridge;
+import org.hibernate.search.bridge.builtin.IntegerBridge;
+import org.hibernate.search.bridge.builtin.BigDecimalBridge;
+import org.hibernate.search.bridge.builtin.DoubleBridge;
+import org.hibernate.search.bridge.builtin.ShortBridge;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.annotations.Parameter;
+import org.hibernate.reflection.XClass;
+import org.hibernate.reflection.XMember;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class BridgeFactory {
+ private static Map<String, FieldBridge> builtInBridges = new HashMap<String, FieldBridge>();
+
+ private BridgeFactory() {
+ }
+
+ public static final TwoWayFieldBridge DOUBLE = new TwoWayString2FieldBridgeAdaptor( new DoubleBridge() );
+
+ public static final TwoWayFieldBridge FLOAT = new TwoWayString2FieldBridgeAdaptor( new FloatBridge() );
+
+ public static final TwoWayFieldBridge SHORT = new TwoWayString2FieldBridgeAdaptor( new ShortBridge() );
+
+ public static final TwoWayFieldBridge INTEGER = new TwoWayString2FieldBridgeAdaptor( new IntegerBridge() );
+
+ public static final TwoWayFieldBridge LONG = new TwoWayString2FieldBridgeAdaptor( new LongBridge() );
+
+ public static final TwoWayFieldBridge BIG_INTEGER = new TwoWayString2FieldBridgeAdaptor( new BigIntegerBridge() );
+
+ public static final TwoWayFieldBridge BIG_DECIMAL = new TwoWayString2FieldBridgeAdaptor( new BigDecimalBridge() );
+
+ public static final TwoWayFieldBridge STRING = new TwoWayString2FieldBridgeAdaptor( new StringBridge() );
+
+ public static final FieldBridge DATE_YEAR = new String2FieldBridgeAdaptor( DateBridge.DATE_YEAR );
+ public static final FieldBridge DATE_MONTH = new String2FieldBridgeAdaptor( DateBridge.DATE_MONTH );
+ public static final FieldBridge DATE_DAY = new String2FieldBridgeAdaptor( DateBridge.DATE_DAY );
+ public static final FieldBridge DATE_HOUR = new String2FieldBridgeAdaptor( DateBridge.DATE_HOUR );
+ public static final FieldBridge DATE_MINUTE = new String2FieldBridgeAdaptor( DateBridge.DATE_MINUTE );
+ public static final FieldBridge DATE_SECOND = new String2FieldBridgeAdaptor( DateBridge.DATE_SECOND );
+ public static final TwoWayFieldBridge DATE_MILLISECOND = new TwoWayString2FieldBridgeAdaptor( DateBridge.DATE_MILLISECOND );
+
+ static {
+ builtInBridges.put( Double.class.getName(), DOUBLE );
+ builtInBridges.put( double.class.getName(), DOUBLE );
+ builtInBridges.put( Float.class.getName(), FLOAT );
+ builtInBridges.put( float.class.getName(), FLOAT );
+ builtInBridges.put( Short.class.getName(), SHORT );
+ builtInBridges.put( short.class.getName(), SHORT );
+ builtInBridges.put( Integer.class.getName(), INTEGER );
+ builtInBridges.put( int.class.getName(), INTEGER );
+ builtInBridges.put( Long.class.getName(), LONG );
+ builtInBridges.put( long.class.getName(), LONG );
+ builtInBridges.put( BigInteger.class.getName(), BIG_INTEGER );
+ builtInBridges.put( BigDecimal.class.getName(), BIG_DECIMAL );
+ builtInBridges.put( String.class.getName(), STRING );
+
+ builtInBridges.put( Date.class.getName(), DATE_MILLISECOND );
+ }
+
+ public static FieldBridge guessType(XMember member) {
+ FieldBridge bridge = null;
+ org.hibernate.search.annotations.FieldBridge bridgeAnn =
+ member.getAnnotation( org.hibernate.search.annotations.FieldBridge.class );
+ if ( bridgeAnn != null ) {
+ Class impl = bridgeAnn.impl();
+ try {
+ Object instance = impl.newInstance();
+ if ( FieldBridge.class.isAssignableFrom( impl ) ) {
+ bridge = (FieldBridge) instance;
+ }
+ else if ( org.hibernate.search.bridge.TwoWayStringBridge.class.isAssignableFrom( impl ) ) {
+ bridge = new TwoWayString2FieldBridgeAdaptor( (org.hibernate.search.bridge.TwoWayStringBridge) instance );
+ }
+ else if ( org.hibernate.search.bridge.StringBridge.class.isAssignableFrom( impl ) ) {
+ bridge = new String2FieldBridgeAdaptor( (org.hibernate.search.bridge.StringBridge) instance );
+ }
+ if ( bridgeAnn.params().length > 0 && ParameterizedBridge.class.isAssignableFrom( impl ) ) {
+ Map params = new HashMap( bridgeAnn.params().length );
+ for ( Parameter param : bridgeAnn.params() ) {
+ params.put( param.name(), param.value() );
+ }
+ ( (ParameterizedBridge) instance ).setParameterValues( params );
+ }
+ }
+ catch (Exception e) {
+ //TODO add classname
+ throw new HibernateException( "Unable to instanciate FieldBridge for " + member.getName(), e );
+ }
+ }
+ else if ( member.isAnnotationPresent( org.hibernate.search.annotations.DateBridge.class ) ) {
+ Resolution resolution = member.getAnnotation( org.hibernate.search.annotations.DateBridge.class ).resolution();
+ bridge = getDateField( resolution );
+ }
+ else {
+ //find in built-ins
+ XClass returnType = member.getType();
+ bridge = builtInBridges.get( returnType.getName() );
+ }
+ //TODO add classname
+ if ( bridge == null ) throw new HibernateException( "Unable to guess FieldBridge for " + member.getName() );
+ return bridge;
+ }
+
+ public static FieldBridge getDateField(Resolution resolution) {
+ switch (resolution) {
+ case YEAR:
+ return DATE_YEAR;
+ case MONTH:
+ return DATE_MONTH;
+ case DAY:
+ return DATE_DAY;
+ case HOUR:
+ return DATE_HOUR;
+ case MINUTE:
+ return DATE_MINUTE;
+ case SECOND:
+ return DATE_SECOND;
+ case MILLISECOND:
+ return DATE_MILLISECOND;
+ default:
+ throw new AssertionFailure( "Unknown Resolution: " + resolution );
+ }
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,23 @@
+//$Id: $
+package org.hibernate.search.bridge;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+
+/**
+ * Link between a java property and a Lucene Document
+ * Usually a Java property will be linked to a Document Field
+ *
+ * @author Emmanuel Bernard
+ */
+//TODO should show Field or document?
+//document is nice since I can save an object into several fields
+public interface FieldBridge {
+ /**
+ * Manipulate the document to index the given value.
+ * A common implementation is to add a Field <code>name</code> to the given document following
+ * the parameters (<code>store</code>, <code>index</code>, <code>boost</code>) if the
+ * <code>value</code> is not null
+ */
+ void set(String name, Object value, Document document, Field.Store store, Field.Index index, Float boost);
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/ParameterizedBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/ParameterizedBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/ParameterizedBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,14 @@
+//$Id: $
+package org.hibernate.search.bridge;
+
+import java.util.Map;
+
+/**
+ * Allow parameter injection to a given bridge
+ *
+ * @author Emmanuel Bernard
+ */
+public interface ParameterizedBridge {
+ //TODO inject Properties? since the annotations cannot support Object attribute?
+ void setParameterValues(Map parameters);
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/String2FieldBridgeAdaptor.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/String2FieldBridgeAdaptor.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/String2FieldBridgeAdaptor.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,30 @@
+//$Id: $
+package org.hibernate.search.bridge;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.hibernate.util.StringHelper;
+
+/**
+ * Bridge to use a StringBridge as a FieldBridge
+ *
+ * @author Emmanuel Bernard
+ */
+public class String2FieldBridgeAdaptor implements FieldBridge {
+ private StringBridge stringBridge;
+
+ public String2FieldBridgeAdaptor(StringBridge stringBridge) {
+ this.stringBridge = stringBridge;
+ }
+
+ public void set(String name, Object value, Document document, Field.Store store, Field.Index index, Float boost) {
+ String indexedString = stringBridge.objectToString( value );
+ //Do not add fields on empty strings, seems a sensible default in most situations
+ if ( StringHelper.isNotEmpty( indexedString ) ) {
+ Field field = new Field( name, indexedString, store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ }
+ }
+
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/StringBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/StringBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/StringBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,16 @@
+//$Id: $
+package org.hibernate.search.bridge;
+
+/**
+ * Transform an object into a string representation
+ *
+ * @author Emmanuel Bernard
+ */
+public interface StringBridge {
+
+ /**
+ * convert the object representation to a String
+ * The return String must not be null, it can be empty though
+ */
+ String objectToString(Object object);
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayFieldBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayFieldBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayFieldBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,32 @@
+//$Id: $
+package org.hibernate.search.bridge;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * A FieldBrige able to convert the index representation back into an object without losing information
+ *
+ * Any bridge expected to process a document id should implement this interface
+ * EXPERIMENTAL Consider this interface as private
+ *
+ * @author Emmanuel Bernard
+ */
+//FIXME rework the interface inheritance there are some common concepts with StringBridge
+public interface TwoWayFieldBridge extends FieldBridge {
+ /**
+ * build the element object from the Document
+ *
+ * The return value is the Entity id
+ *
+ * @param name field name
+ * @param document document
+ */
+ Object get(String name, Document document);
+
+ /**
+ * convert the object representation to a String
+ * The return String must not be null, it can be empty though
+ * EXPERIMENTAL API subject to change in the future
+ */
+ String objectToString(Object object);
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayString2FieldBridgeAdaptor.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayString2FieldBridgeAdaptor.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayString2FieldBridgeAdaptor.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,30 @@
+//$Id: $
+package org.hibernate.search.bridge;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+
+/**
+ * Bridge to use a TwoWayStringBridge as a TwoWayFieldBridge
+ *
+ * @author Emmanuel Bernard
+ */
+//TODO use Generics to avoid double declaration of stringBridge
+public class TwoWayString2FieldBridgeAdaptor extends String2FieldBridgeAdaptor implements TwoWayFieldBridge {
+
+ private TwoWayStringBridge stringBridge;
+
+ public TwoWayString2FieldBridgeAdaptor(TwoWayStringBridge stringBridge) {
+ super( stringBridge );
+ this.stringBridge = stringBridge;
+ }
+
+ public String objectToString(Object object) {
+ return stringBridge.objectToString( object );
+ }
+
+ public Object get(String name, Document document) {
+ Field field = document.getField( name );
+ return stringBridge.stringToObject( field.stringValue() );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayStringBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayStringBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/TwoWayStringBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,18 @@
+//$Id: $
+package org.hibernate.search.bridge;
+
+/**
+ * StringBridge allowing a translation from the String back to the Object
+ * objectToString( stringToObject( string ) ) and stringToObject( objectToString( object ) )
+ * should be "idempotent". More precisely,
+ *
+ * objectToString( stringToObject( string ) ).equals(string) for string not null
+ * stringToObject( objectToString( object ) ).equals(object) for object not null
+ * @author Emmanuel Bernard
+ */
+public interface TwoWayStringBridge extends StringBridge {
+ /**
+ * Convert the string representation to an object
+ */
+ Object stringToObject(String stringValue);
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/BigDecimalBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/BigDecimalBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/BigDecimalBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,18 @@
+//$Id: $
+package org.hibernate.search.bridge.builtin;
+
+import java.math.BigDecimal;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a BigDecimal element
+ *
+ * @author Emmanuel Bernard
+ */
+public class BigDecimalBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new BigDecimal( stringValue );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/BigIntegerBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/BigIntegerBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/BigIntegerBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,18 @@
+//$Id: $
+package org.hibernate.search.bridge.builtin;
+
+import java.math.BigInteger;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a BigInteger element
+ *
+ * @author Emmanuel Bernard
+ */
+public class BigIntegerBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new BigInteger( stringValue );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/DateBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/DateBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/DateBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,111 @@
+//$Id: $
+package org.hibernate.search.bridge.builtin;
+
+import java.text.ParseException;
+import java.util.Date;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.lucene.document.DateTools;
+import org.hibernate.AssertionFailure;
+import org.hibernate.HibernateException;
+import org.hibernate.search.bridge.StringBridge;
+import org.hibernate.search.bridge.ParameterizedBridge;
+import org.hibernate.search.bridge.String2FieldBridgeAdaptor;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.search.bridge.TwoWayString2FieldBridgeAdaptor;
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.util.StringHelper;
+
+/**
+ * Bridge a java.util.Date to a String, truncated to the resolution
+ * Date are stored GMT based
+ * <p/>
+ * ie
+ * Resolution.YEAR: yyyy
+ * Resolution.MONTH: yyyyMM
+ * Resolution.DAY: yyyyMMdd
+ * Resolution.HOUR: yyyyMMddHH
+ * Resolution.MINUTE: yyyyMMddHHmm
+ * Resolution.SECOND: yyyyMMddHHmmss
+ * Resolution.MILLISECOND: yyyyMMddHHmmssSSS
+ *
+ * @author Emmanuel Bernard
+ */
+//TODO split into StringBridge and TwoWayStringBridge?
+public class DateBridge implements TwoWayStringBridge, ParameterizedBridge {
+
+ public static final TwoWayStringBridge DATE_YEAR = new DateBridge( Resolution.YEAR );
+ public static final TwoWayStringBridge DATE_MONTH = new DateBridge( Resolution.MONTH );
+ public static final TwoWayStringBridge DATE_DAY = new DateBridge( Resolution.DAY );
+ public static final TwoWayStringBridge DATE_HOUR = new DateBridge( Resolution.HOUR );
+ public static final TwoWayStringBridge DATE_MINUTE = new DateBridge( Resolution.MINUTE );
+ public static final TwoWayStringBridge DATE_SECOND = new DateBridge( Resolution.SECOND );
+ public static final TwoWayStringBridge DATE_MILLISECOND = new DateBridge( Resolution.MILLISECOND );
+
+ DateTools.Resolution resolution;
+
+ public DateBridge() {
+ }
+
+ public DateBridge(Resolution resolution) {
+ setResolution( resolution );
+ }
+
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ try {
+ return DateTools.stringToDate( stringValue );
+ }
+ catch (ParseException e) {
+ throw new HibernateException( "Unable to parse into date: " + stringValue, e );
+ }
+ }
+
+ public String objectToString(Object object) {
+ return object != null ?
+ DateTools.dateToString( (Date) object, resolution ) :
+ null;
+ }
+
+ public void setParameterValues(Map parameters) {
+ Object resolution = parameters.get( "resolution" );
+ Resolution hibResolution;
+ if ( resolution instanceof String ) {
+ hibResolution = Resolution.valueOf( ( (String) resolution ).toUpperCase( Locale.ENGLISH ) );
+ }
+ else {
+ hibResolution = (Resolution) resolution;
+ }
+ setResolution( hibResolution );
+ }
+
+ private void setResolution(Resolution hibResolution) {
+ switch (hibResolution) {
+ case YEAR:
+ this.resolution = DateTools.Resolution.YEAR;
+ break;
+ case MONTH:
+ this.resolution = DateTools.Resolution.MONTH;
+ break;
+ case DAY:
+ this.resolution = DateTools.Resolution.DAY;
+ break;
+ case HOUR:
+ this.resolution = DateTools.Resolution.HOUR;
+ break;
+ case MINUTE:
+ this.resolution = DateTools.Resolution.MINUTE;
+ break;
+ case SECOND:
+ this.resolution = DateTools.Resolution.SECOND;
+ break;
+ case MILLISECOND:
+ this.resolution = DateTools.Resolution.MILLISECOND;
+ break;
+ default:
+ throw new AssertionFailure( "Unknown Resolution: " + hibResolution );
+
+ }
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/DoubleBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/DoubleBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/DoubleBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,16 @@
+//$Id: $
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a double element
+ *
+ * @author Emmanuel Bernard
+ */
+public class DoubleBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new Double( stringValue );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/FloatBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/FloatBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/FloatBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,16 @@
+//$Id: $
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a float element
+ *
+ * @author Emmanuel Bernard
+ */
+public class FloatBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new Float( stringValue );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/IntegerBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/IntegerBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/IntegerBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,16 @@
+//$Id: $
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map an integer element
+ *
+ * @author Emmanuel Bernard
+ */
+public class IntegerBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new Integer( stringValue );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/LongBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/LongBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/LongBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,16 @@
+//$Id: $
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a long element
+ *
+ * @author Emmanuel Bernard
+ */
+public class LongBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new Long( stringValue );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/NumberBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/NumberBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/NumberBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,16 @@
+//$Id: $
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.search.bridge.StringBridge;
+import org.hibernate.search.bridge.TwoWayStringBridge;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class NumberBridge implements TwoWayStringBridge {
+ public String objectToString(Object object) {
+ return object != null ?
+ object.toString() :
+ null;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/ShortBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/ShortBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/ShortBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,16 @@
+//$Id: $
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a short element
+ *
+ * @author Emmanuel Bernard
+ */
+public class ShortBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new Short( stringValue );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/StringBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/StringBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/bridge/builtin/StringBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,18 @@
+//$Id: $
+package org.hibernate.search.bridge.builtin;
+
+
+/**
+ * Map a string element
+ *
+ * @author Emmanuel Bernard
+ */
+public class StringBridge implements org.hibernate.search.bridge.TwoWayStringBridge {
+ public Object stringToObject(String stringValue) {
+ return stringValue;
+ }
+
+ public String objectToString(Object object) {
+ return (String) object;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,329 @@
+//$Id: DocumentBuilder.java 10865 2006-11-23 23:30:01 +0100 (jeu., 23 nov. 2006) epbernard $
+package org.hibernate.search.engine;
+
+import java.io.Serializable;
+import java.lang.reflect.Modifier;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.Term;
+import org.hibernate.AssertionFailure;
+import org.hibernate.HibernateException;
+import org.hibernate.cfg.annotations.Version;
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Keyword;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.Text;
+import org.hibernate.search.annotations.Unstored;
+import org.hibernate.search.bridge.BridgeFactory;
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.search.bridge.TwoWayFieldBridge;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.util.BinderHelper;
+import org.hibernate.reflection.ReflectionManager;
+import org.hibernate.reflection.XAnnotatedElement;
+import org.hibernate.reflection.XClass;
+import org.hibernate.reflection.XMember;
+import org.hibernate.reflection.XProperty;
+import org.hibernate.util.ReflectHelper;
+
+/**
+ * Set up and provide a manager for indexes classes
+ *
+ * @author Gavin King
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ * @author Richard Hallier
+ */
+public class DocumentBuilder<T> {
+
+ static {
+ Version.touch(); //touch version
+ }
+
+ private final List<XMember> keywordGetters = new ArrayList<XMember>();
+ private final List<String> keywordNames = new ArrayList<String>();
+ private final List<FieldBridge> keywordBridges = new ArrayList<FieldBridge>();
+ private final List<XMember> unstoredGetters = new ArrayList<XMember>();
+ private final List<String> unstoredNames = new ArrayList<String>();
+ private final List<FieldBridge> unstoredBridges = new ArrayList<FieldBridge>();
+ private final List<XMember> textGetters = new ArrayList<XMember>();
+ private final List<String> textNames = new ArrayList<String>();
+ private final List<FieldBridge> textBridges = new ArrayList<FieldBridge>();
+ private final List<String> fieldNames = new ArrayList<String>();
+ private final List<XMember> fieldGetters = new ArrayList<XMember>();
+ private final List<FieldBridge> fieldBridges = new ArrayList<FieldBridge>();
+ private final List<Field.Store> fieldStore = new ArrayList<Field.Store>();
+ private final List<Field.Index> fieldIndex = new ArrayList<Field.Index>();
+
+ private final XClass beanClass;
+ private final DirectoryProvider directoryProvider;
+ private String idKeywordName;
+ private final Analyzer analyzer;
+ private Float idBoost;
+ public static final String CLASS_FIELDNAME = "_hibernate_class";
+ private TwoWayFieldBridge idBridge;
+ private Set<Class> mappedSubclasses = new HashSet<Class>();
+ private ReflectionManager reflectionManager;
+
+
+ public DocumentBuilder(XClass clazz, Analyzer analyzer, DirectoryProvider directory,
+ ReflectionManager reflectionManager) {
+ this.beanClass = clazz;
+ this.analyzer = analyzer;
+ this.directoryProvider = directory;
+ //FIXME get rid of it when boost is stored?
+ this.reflectionManager = reflectionManager;
+
+ if ( clazz == null ) throw new AssertionFailure( "Unable to build a DocumentBuilder with a null class" );
+
+ for ( XClass currClass = beanClass; currClass != null; currClass = currClass.getSuperclass() ) {
+ //rejecting non properties because the object is loaded from Hibernate, so indexing a non property does not make sense
+ List<XProperty> methods = currClass.getDeclaredProperties( XClass.ACCESS_PROPERTY );
+ for ( XProperty method : methods ) {
+ initializeMember( method );
+ }
+
+ List<XProperty> fields = currClass.getDeclaredProperties( XClass.ACCESS_FIELD );
+ for ( XProperty field : fields ) {
+ initializeMember( field );
+ }
+ }
+
+ if ( idKeywordName == null ) {
+ throw new HibernateException( "No document id for: " + clazz.getName() );
+ }
+ }
+
+ private void initializeMember(XProperty member) {
+ Keyword keywordAnn = member.getAnnotation( Keyword.class );
+ if ( keywordAnn != null ) {
+ String name = BinderHelper.getAttributeName( member, keywordAnn.name() );
+ if ( keywordAnn.id() ) {
+ idKeywordName = name;
+ idBoost = getBoost( member );
+ FieldBridge fieldBridge = BridgeFactory.guessType( member );
+ if ( fieldBridge instanceof TwoWayFieldBridge ) {
+ idBridge = (TwoWayFieldBridge) fieldBridge;
+ }
+ else {
+ throw new HibernateException(
+ "Bridge for document id does not implement IdFieldBridge: " + member.getName() );
+ }
+ }
+ else {
+ setAccessible( member );
+ keywordGetters.add( member );
+ keywordNames.add( name );
+ keywordBridges.add( BridgeFactory.guessType( member ) );
+ }
+ }
+
+ Unstored unstoredAnn = member.getAnnotation( Unstored.class );
+ if ( unstoredAnn != null ) {
+ setAccessible( member );
+ unstoredGetters.add( member );
+ unstoredNames.add( BinderHelper.getAttributeName( member, unstoredAnn.name() ) );
+ unstoredBridges.add( BridgeFactory.guessType( member ) );
+ }
+
+ Text textAnn = member.getAnnotation( Text.class );
+ if ( textAnn != null ) {
+ setAccessible( member );
+ textGetters.add( member );
+ textNames.add( BinderHelper.getAttributeName( member, textAnn.name() ) );
+ textBridges.add( BridgeFactory.guessType( member ) );
+ }
+
+ DocumentId documentIdAnn = member.getAnnotation( DocumentId.class );
+ if ( documentIdAnn != null ) {
+ if ( idKeywordName != null ) {
+ throw new AssertionFailure( "Two document id assigned: "
+ + idKeywordName + " and " + BinderHelper.getAttributeName( member, documentIdAnn.name() ) );
+ }
+ idKeywordName = BinderHelper.getAttributeName( member, documentIdAnn.name() );
+ FieldBridge fieldBridge = BridgeFactory.guessType( member );
+ if ( fieldBridge instanceof TwoWayFieldBridge ) {
+ idBridge = (TwoWayFieldBridge) fieldBridge;
+ }
+ else {
+ throw new HibernateException(
+ "Bridge for document id does not implement IdFieldBridge: " + member.getName() );
+ }
+ idBoost = getBoost( member );
+ }
+
+ org.hibernate.search.annotations.Field fieldAnn =
+ member.getAnnotation( org.hibernate.search.annotations.Field.class );
+ if ( fieldAnn != null ) {
+ setAccessible( member );
+ fieldGetters.add( member );
+ fieldNames.add( BinderHelper.getAttributeName( member, fieldAnn.name() ) );
+ fieldStore.add( getStore( fieldAnn.store() ) );
+ fieldIndex.add( getIndex( fieldAnn.index() ) );
+ fieldBridges.add( BridgeFactory.guessType( member ) );
+ }
+ }
+
+ private Field.Store getStore(Store store) {
+ switch (store) {
+ case NO:
+ return Field.Store.NO;
+ case YES:
+ return Field.Store.YES;
+ case COMPRESS:
+ return Field.Store.COMPRESS;
+ default:
+ throw new AssertionFailure( "Unexpected Store: " + store );
+ }
+ }
+
+ private Field.Index getIndex(Index index) {
+ switch (index) {
+ case NO:
+ return Field.Index.NO;
+ case NO_NORMS:
+ return Field.Index.NO_NORMS;
+ case TOKENIZED:
+ return Field.Index.TOKENIZED;
+ case UN_TOKENISED:
+ return Field.Index.UN_TOKENIZED;
+ default:
+ throw new AssertionFailure( "Unexpected Index: " + index );
+ }
+ }
+
+ private Float getBoost(XAnnotatedElement element) {
+ if ( element == null ) return null;
+ Boost boost = element.getAnnotation( Boost.class );
+ return boost != null ?
+ boost.value() :
+ null;
+ }
+
+ private Object getMemberValue(T bean, XMember getter) {
+ Object value;
+ try {
+ value = getter.invoke( bean );
+ }
+ catch (Exception e) {
+ throw new IllegalStateException( "Could not get property value", e );
+ }
+ return value;
+ }
+
+ public Document getDocument(T instance, Serializable id) {
+ Document doc = new Document();
+ XClass instanceClass = reflectionManager.toXClass( instance.getClass() );
+ Float boost = getBoost( instanceClass );
+ if ( boost != null ) {
+ doc.setBoost( boost );
+ }
+ {
+ Field classField =
+ new Field( CLASS_FIELDNAME, instanceClass.getName(), Field.Store.YES, Field.Index.UN_TOKENIZED );
+ doc.add( classField );
+ idBridge.set( idKeywordName, id, doc, Field.Store.YES, Field.Index.UN_TOKENIZED, idBoost );
+ }
+ for ( int i = 0; i < keywordNames.size(); i++ ) {
+ XMember member = keywordGetters.get( i );
+ Object value = getMemberValue( instance, member );
+ keywordBridges.get( i ).set(
+ keywordNames.get( i ), value, doc, Field.Store.YES,
+ Field.Index.UN_TOKENIZED, getBoost( member )
+ );
+ }
+ for ( int i = 0; i < textNames.size(); i++ ) {
+ XMember member = textGetters.get( i );
+ Object value = getMemberValue( instance, member );
+ textBridges.get( i ).set(
+ textNames.get( i ), value, doc, Field.Store.YES,
+ Field.Index.TOKENIZED, getBoost( member )
+ );
+ }
+ for ( int i = 0; i < unstoredNames.size(); i++ ) {
+ XMember member = unstoredGetters.get( i );
+ Object value = getMemberValue( instance, member );
+ unstoredBridges.get( i ).set(
+ unstoredNames.get( i ), value, doc, Field.Store.NO,
+ Field.Index.TOKENIZED, getBoost( member )
+ );
+ }
+ for ( int i = 0; i < fieldNames.size(); i++ ) {
+ XMember member = fieldGetters.get( i );
+ Object value = getMemberValue( instance, member );
+ fieldBridges.get( i ).set(
+ fieldNames.get( i ), value, doc, fieldStore.get( i ),
+ fieldIndex.get( i ), getBoost( member )
+ );
+ }
+ return doc;
+ }
+
+ public Term getTerm(Serializable id) {
+ return new Term( idKeywordName, idBridge.objectToString( id ) );
+ }
+
+ public DirectoryProvider getDirectoryProvider() {
+ return directoryProvider;
+ }
+
+ public Analyzer getAnalyzer() {
+ return analyzer;
+ }
+
+ private static void setAccessible(XMember member) {
+ if ( !Modifier.isPublic( member.getModifiers() ) ) {
+ member.setAccessible( true );
+ }
+ }
+
+ public TwoWayFieldBridge getIdBridge() {
+ return idBridge;
+ }
+
+ public String getIdKeywordName() {
+ return idKeywordName;
+ }
+
+ public static Class getDocumentClass(Document document) {
+ String className = document.get( DocumentBuilder.CLASS_FIELDNAME );
+ try {
+ return ReflectHelper.classForName( className );
+ }
+ catch (ClassNotFoundException e) {
+ throw new HibernateException( "Unable to load indexed class: " + className, e );
+ }
+ }
+
+ public static Serializable getDocumentId(FullTextIndexEventListener listener, Class clazz, Document document) {
+ DocumentBuilder builder = listener.getDocumentBuilders().get( clazz );
+ if ( builder == null ) throw new HibernateException( "No Lucene configuration set up for: " + clazz.getName() );
+ return (Serializable) builder.getIdBridge().get( builder.getIdKeywordName(), document );
+ }
+
+ public void postInitialize(Set<Class> indexedClasses) {
+ //this method does not requires synchronization
+ Class plainClass = reflectionManager.toClass( beanClass );
+ Set<Class> tempMappedSubclasses = new HashSet<Class>();
+ //together with the caller this creates a o(2), but I think it's still faster than create the up hierarchy for each class
+ for ( Class currentClass : indexedClasses ) {
+ if ( plainClass.isAssignableFrom( currentClass ) ) tempMappedSubclasses.add( currentClass );
+ }
+ mappedSubclasses = Collections.unmodifiableSet( tempMappedSubclasses );
+ }
+
+
+ public Set<Class> getMappedSubclasses() {
+ return mappedSubclasses;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,180 @@
+//$Id: FullTextIndexEventListener.java 10865 2006-11-23 23:30:01 +0100 (jeu., 23 nov. 2006) epbernard $
+package org.hibernate.search.event;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.locks.ReentrantLock;
+import javax.transaction.Status;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.hibernate.HibernateException;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.event.AbstractEvent;
+import org.hibernate.event.Initializable;
+import org.hibernate.event.PostDeleteEvent;
+import org.hibernate.event.PostDeleteEventListener;
+import org.hibernate.event.PostInsertEvent;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.event.PostUpdateEvent;
+import org.hibernate.event.PostUpdateEventListener;
+import org.hibernate.search.Environment;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.backend.AddWork;
+import org.hibernate.search.backend.DeleteWork;
+import org.hibernate.search.backend.UpdateWork;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
+import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.DirectoryProviderFactory;
+import org.hibernate.mapping.PersistentClass;
+import org.hibernate.reflection.ReflectionManager;
+import org.hibernate.reflection.XClass;
+import org.hibernate.util.ReflectHelper;
+
+/**
+ * This listener supports setting a parent directory for all generated index files.
+ * It also supports setting the analyzer class to be used.
+ *
+ * @author Gavin King
+ * @author Emmanuel Bernard
+ * @author Mattias Arbin
+ */
+//TODO work on sharing the same indexWriters and readers across a single post operation...
+//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
+public class FullTextIndexEventListener implements PostDeleteEventListener, PostInsertEventListener,
+ PostUpdateEventListener, Initializable {
+ ReflectionManager reflectionManager;
+
+ //FIXME keeping this here is a bad decision since you might want to search indexes wo maintain it
+ @Deprecated
+ public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
+ return documentBuilders;
+ }
+
+
+ private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
+ //keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
+ private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
+ new HashMap<DirectoryProvider, ReentrantLock>();
+ private boolean initialized;
+
+ private static final Log log = LogFactory.getLog( FullTextIndexEventListener.class );
+
+ public void initialize(Configuration cfg) {
+ if ( initialized ) return;
+ //yuk
+ reflectionManager = ( (AnnotationConfiguration) cfg ).createExtendedMappings().getReflectionManager();
+
+ Class analyzerClass;
+ String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
+ if ( analyzerClassName != null ) {
+ try {
+ analyzerClass = ReflectHelper.classForName( analyzerClassName );
+ }
+ catch (Exception e) {
+ throw new HibernateException(
+ "Lucene analyzer class '" + analyzerClassName + "' defined in property '" + Environment.ANALYZER_CLASS + "' could not be found.",
+ e
+ );
+ }
+ }
+ else {
+ analyzerClass = StandardAnalyzer.class;
+ }
+ // Initialize analyzer
+ Analyzer analyzer;
+ try {
+ analyzer = (Analyzer) analyzerClass.newInstance();
+ }
+ catch (ClassCastException e) {
+ throw new HibernateException(
+ "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClassName
+ );
+ }
+ catch (Exception e) {
+ throw new HibernateException( "Failed to instantiate lucene analyzer with type " + analyzerClassName );
+ }
+
+ Iterator iter = cfg.getClassMappings();
+ DirectoryProviderFactory factory = new DirectoryProviderFactory();
+ while ( iter.hasNext() ) {
+ PersistentClass clazz = (PersistentClass) iter.next();
+ Class<?> mappedClass = clazz.getMappedClass();
+ if ( mappedClass != null ) {
+ XClass mappedXClass = reflectionManager.toXClass( mappedClass );
+ if ( mappedXClass != null && mappedXClass.isAnnotationPresent( Indexed.class ) ) {
+ DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg );
+ if ( !lockableDirectoryProviders.containsKey( provider ) ) {
+ lockableDirectoryProviders.put( provider, new ReentrantLock() );
+ }
+ final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
+ mappedXClass, analyzer, provider, reflectionManager
+ );
+
+ documentBuilders.put( mappedClass, documentBuilder );
+ }
+ }
+ }
+ Set<Class> indexedClasses = documentBuilders.keySet();
+ for ( DocumentBuilder builder : documentBuilders.values() ) {
+ builder.postInitialize( indexedClasses );
+ }
+ initialized = true;
+ }
+
+ public void onPostDelete(PostDeleteEvent event) {
+ if ( documentBuilders.containsKey( event.getEntity().getClass() ) ) {
+ DeleteWork work = new DeleteWork( event.getId(), event.getEntity().getClass() );
+ processWork( work, event );
+ }
+ }
+
+ public void onPostInsert(PostInsertEvent event) {
+ final Object entity = event.getEntity();
+ DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+ if ( builder != null ) {
+ Serializable id = event.getId();
+ Document doc = builder.getDocument( entity, id );
+ AddWork work = new AddWork( id, entity.getClass(), doc );
+ processWork( work, event );
+ }
+ }
+
+ public void onPostUpdate(PostUpdateEvent event) {
+ final Object entity = event.getEntity();
+ DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+ if ( builder != null ) {
+ Serializable id = event.getId();
+ Document doc = builder.getDocument( entity, id );
+ UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
+ processWork( work, event );
+ }
+ }
+
+ private void processWork(Work work, AbstractEvent event) {
+ WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
+ workQueue.add( work );
+ PostTransactionWorkQueueSynchronization sync = new PostTransactionWorkQueueSynchronization( workQueue );
+ if ( event.getSession().isTransactionInProgress() ) {
+ event.getSession().getTransaction().registerSynchronization( sync );
+ }
+ else {
+ sync.afterCompletion( Status.STATUS_COMMITTED );
+ }
+ }
+
+ public Map<DirectoryProvider, ReentrantLock> getLockableDirectoryProviders() {
+ return lockableDirectoryProviders;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,466 @@
+//$Id: $
+package org.hibernate.search.impl;
+
+import java.io.Serializable;
+import java.sql.Connection;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+import javax.transaction.Status;
+
+import org.hibernate.CacheMode;
+import org.hibernate.Criteria;
+import org.hibernate.EntityMode;
+import org.hibernate.Filter;
+import org.hibernate.FlushMode;
+import org.hibernate.HibernateException;
+import org.hibernate.LockMode;
+import org.hibernate.Query;
+import org.hibernate.ReplicationMode;
+import org.hibernate.SQLQuery;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
+import org.hibernate.engine.query.ParameterMetadata;
+import org.hibernate.impl.SessionImpl;
+import org.hibernate.search.query.FullTextQueryImpl;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.util.ContextHelper;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.backend.UpdateWork;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
+import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.stat.SessionStatistics;
+import org.hibernate.type.Type;
+import org.apache.lucene.document.Document;
+
+/**
+ * Lucene Full text search aware session
+ *
+ * @author Emmanuel Bernard
+ */
+public class FullTextSessionImpl implements FullTextSession {
+ private final SessionImpl session;
+ private PostTransactionWorkQueueSynchronization postTransactionWorkQueueSynch;
+
+ public FullTextSessionImpl(Session session) {
+ this.session = (SessionImpl) session;
+ }
+
+ /**
+ * Execute a Lucene query and retrieve managed objects of type entities (or their indexed subclasses)
+ * If entities is empty, include all indexed entities
+ *
+ * @param entities must be immutable for the lifetime of the query object
+ */
+ public Query createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class... entities) {
+ return new FullTextQueryImpl( luceneQuery, entities, session, new ParameterMetadata(null, null) );
+ }
+
+ /**
+ * (re)index an entity.
+ * Non indexable entities are ignored
+ * The entity must be associated with the session
+ *
+ * @param entity must not be null
+ */
+ public void index(Object entity) {
+ if (entity == null) return;
+ Class clazz = entity.getClass();
+ FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );
+ DocumentBuilder<Object> builder = listener.getDocumentBuilders().get( clazz );
+ if ( builder != null ) {
+ Serializable id = session.getIdentifier( entity );
+ Document doc = builder.getDocument( entity, id );
+ UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
+ processWork( work, listener.getDocumentBuilders(), listener.getLockableDirectoryProviders() );
+ }
+ //TODO
+ //need to add elements in a queue kept at the Session level
+ //the queue will be processed by a Lucene(Auto)FlushEventListener
+ //note that we could keep this queue somewhere in the event listener in the mean time but that requires
+ // a synchronized hashmap holding this queue on a per session basis plus some session house keeping (yuk)
+ //an other solution would be to subclass SessionImpl instead of having this LuceneSession delecation model
+ // this is an open discussion
+ }
+
+ private void processWork(Work work, Map<Class, DocumentBuilder<Object>> documentBuilders,
+ Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ if ( session.isTransactionInProgress() ) {
+ if ( postTransactionWorkQueueSynch == null || postTransactionWorkQueueSynch.isConsumed() ) {
+ postTransactionWorkQueueSynch = createWorkQueueSync( documentBuilders, lockableDirectoryProviders);
+ session.getTransaction().registerSynchronization( postTransactionWorkQueueSynch );
+ }
+ postTransactionWorkQueueSynch.add( work );
+ }
+ else {
+ //no transaction work right away
+ PostTransactionWorkQueueSynchronization sync =
+ createWorkQueueSync( documentBuilders, lockableDirectoryProviders );
+ sync.add( work );
+ sync.afterCompletion( Status.STATUS_COMMITTED );
+ }
+ }
+
+ private PostTransactionWorkQueueSynchronization createWorkQueueSync(
+ Map<Class, DocumentBuilder<Object>> documentBuilders,
+ Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
+ return new PostTransactionWorkQueueSynchronization( workQueue );
+ }
+
+ public Query createSQLQuery(String sql, String returnAlias, Class returnClass) {
+ return session.createSQLQuery( sql, returnAlias, returnClass );
+ }
+
+ public Query createSQLQuery(String sql, String[] returnAliases, Class[] returnClasses) {
+ return session.createSQLQuery( sql, returnAliases, returnClasses );
+ }
+
+ public int delete(String query) throws HibernateException {
+ return session.delete( query );
+ }
+
+ public int delete(String query, Object value, Type type) throws HibernateException {
+ return session.delete( query, value, type );
+ }
+
+ public int delete(String query, Object[] values, Type[] types) throws HibernateException {
+ return session.delete( query, values, types );
+ }
+
+ public Collection filter(Object collection, String filter) throws HibernateException {
+ return session.filter( collection, filter );
+ }
+
+ public Collection filter(Object collection, String filter, Object value, Type type) throws HibernateException {
+ return session.filter( collection, filter, value, type );
+ }
+
+ public Collection filter(Object collection, String filter, Object[] values, Type[] types) throws HibernateException {
+ return session.filter( collection, filter, values, types );
+ }
+
+ public List find(String query) throws HibernateException {
+ return session.find( query );
+ }
+
+ public List find(String query, Object value, Type type) throws HibernateException {
+ return session.find( query, value, type );
+ }
+
+ public List find(String query, Object[] values, Type[] types) throws HibernateException {
+ return session.find( query, values, types );
+ }
+
+ public Iterator iterate(String query) throws HibernateException {
+ return session.iterate( query );
+ }
+
+ public Iterator iterate(String query, Object value, Type type) throws HibernateException {
+ return session.iterate( query, value, type );
+ }
+
+ public Iterator iterate(String query, Object[] values, Type[] types) throws HibernateException {
+ return session.iterate( query, values, types );
+ }
+
+ public void save(String entityName, Object object, Serializable id) throws HibernateException {
+ session.save( entityName, object, id );
+ }
+
+ public void save(Object object, Serializable id) throws HibernateException {
+ session.save( object, id );
+ }
+
+ public Object saveOrUpdateCopy(String entityName, Object object) throws HibernateException {
+ return session.saveOrUpdateCopy( entityName, object );
+ }
+
+ public Object saveOrUpdateCopy(String entityName, Object object, Serializable id) throws HibernateException {
+ return session.saveOrUpdateCopy( entityName, object, id );
+ }
+
+ public Object saveOrUpdateCopy(Object object) throws HibernateException {
+ return session.saveOrUpdateCopy( object );
+ }
+
+ public Object saveOrUpdateCopy(Object object, Serializable id) throws HibernateException {
+ return session.saveOrUpdateCopy( object, id );
+ }
+
+ public void update(String entityName, Object object, Serializable id) throws HibernateException {
+ session.update( entityName, object, id );
+ }
+
+ public void update(Object object, Serializable id) throws HibernateException {
+ session.update( object, id );
+ }
+
+ public Transaction beginTransaction() throws HibernateException {
+ return session.beginTransaction();
+ }
+
+ public void cancelQuery() throws HibernateException {
+ session.cancelQuery();
+ }
+
+ public void clear() {
+ session.clear();
+ }
+
+ public Connection close() throws HibernateException {
+ return session.close();
+ }
+
+ public Connection connection() throws HibernateException {
+ return session.connection();
+ }
+
+ public boolean contains(Object object) {
+ return session.contains( object );
+ }
+
+ public Criteria createCriteria(String entityName) {
+ return session.createCriteria( entityName );
+ }
+
+ public Criteria createCriteria(String entityName, String alias) {
+ return session.createCriteria( entityName, alias );
+ }
+
+ public Criteria createCriteria(Class persistentClass) {
+ return session.createCriteria( persistentClass );
+ }
+
+ public Criteria createCriteria(Class persistentClass, String alias) {
+ return session.createCriteria( persistentClass, alias );
+ }
+
+ public Query createFilter(Object collection, String queryString) throws HibernateException {
+ return session.createFilter( collection, queryString );
+ }
+
+ public Query createQuery(String queryString) throws HibernateException {
+ return session.createQuery( queryString );
+ }
+
+ public SQLQuery createSQLQuery(String queryString) throws HibernateException {
+ return session.createSQLQuery( queryString );
+ }
+
+ public void delete(String entityName, Object object) throws HibernateException {
+ session.delete( entityName, object );
+ }
+
+ public void delete(Object object) throws HibernateException {
+ session.delete( object );
+ }
+
+ public void disableFilter(String filterName) {
+ session.disableFilter( filterName );
+ }
+
+ public Connection disconnect() throws HibernateException {
+ return session.disconnect();
+ }
+
+ public Filter enableFilter(String filterName) {
+ return session.enableFilter( filterName );
+ }
+
+ public void evict(Object object) throws HibernateException {
+ session.evict( object );
+ }
+
+ public void flush() throws HibernateException {
+ session.flush();
+ }
+
+ public Object get(Class clazz, Serializable id) throws HibernateException {
+ return session.get( clazz, id );
+ }
+
+ public Object get(Class clazz, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.get( clazz, id, lockMode );
+ }
+
+ public Object get(String entityName, Serializable id) throws HibernateException {
+ return session.get( entityName, id );
+ }
+
+ public Object get(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.get( entityName, id, lockMode );
+ }
+
+ public CacheMode getCacheMode() {
+ return session.getCacheMode();
+ }
+
+ public LockMode getCurrentLockMode(Object object) throws HibernateException {
+ return session.getCurrentLockMode( object );
+ }
+
+ public Filter getEnabledFilter(String filterName) {
+ return session.getEnabledFilter( filterName );
+ }
+
+ public EntityMode getEntityMode() {
+ return session.getEntityMode();
+ }
+
+ public String getEntityName(Object object) throws HibernateException {
+ return session.getEntityName( object );
+ }
+
+ public FlushMode getFlushMode() {
+ return session.getFlushMode();
+ }
+
+ public Serializable getIdentifier(Object object) throws HibernateException {
+ return session.getIdentifier( object );
+ }
+
+ public Query getNamedQuery(String queryName) throws HibernateException {
+ return session.getNamedQuery( queryName );
+ }
+
+ public org.hibernate.Session getSession(EntityMode entityMode) {
+ return session.getSession( entityMode );
+ }
+
+ public SessionFactory getSessionFactory() {
+ return session.getSessionFactory();
+ }
+
+ public SessionStatistics getStatistics() {
+ return session.getStatistics();
+ }
+
+ public Transaction getTransaction() {
+ return session.getTransaction();
+ }
+
+ public boolean isConnected() {
+ return session.isConnected();
+ }
+
+ public boolean isDirty() throws HibernateException {
+ return session.isDirty();
+ }
+
+ public boolean isOpen() {
+ return session.isOpen();
+ }
+
+ public Object load(String entityName, Serializable id) throws HibernateException {
+ return session.load( entityName, id );
+ }
+
+ public Object load(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.load( entityName, id, lockMode );
+ }
+
+ public void load(Object object, Serializable id) throws HibernateException {
+ session.load( object, id );
+ }
+
+ public Object load(Class theClass, Serializable id) throws HibernateException {
+ return session.load( theClass, id );
+ }
+
+ public Object load(Class theClass, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.load( theClass, id, lockMode );
+ }
+
+ public void lock(String entityName, Object object, LockMode lockMode) throws HibernateException {
+ session.lock( entityName, object, lockMode );
+ }
+
+ public void lock(Object object, LockMode lockMode) throws HibernateException {
+ session.lock( object, lockMode );
+ }
+
+ public Object merge(String entityName, Object object) throws HibernateException {
+ return session.merge( entityName, object );
+ }
+
+ public Object merge(Object object) throws HibernateException {
+ return session.merge( object );
+ }
+
+ public void persist(String entityName, Object object) throws HibernateException {
+ session.persist( entityName, object );
+ }
+
+ public void persist(Object object) throws HibernateException {
+ session.persist( object );
+ }
+
+ public void reconnect() throws HibernateException {
+ session.reconnect();
+ }
+
+ public void reconnect(Connection connection) throws HibernateException {
+ session.reconnect( connection );
+ }
+
+ public void refresh(Object object) throws HibernateException {
+ session.refresh( object );
+ }
+
+ public void refresh(Object object, LockMode lockMode) throws HibernateException {
+ session.refresh( object, lockMode );
+ }
+
+ public void replicate(String entityName, Object object, ReplicationMode replicationMode) throws HibernateException {
+ session.replicate( entityName, object, replicationMode );
+ }
+
+ public void replicate(Object object, ReplicationMode replicationMode) throws HibernateException {
+ session.replicate( object, replicationMode );
+ }
+
+ public Serializable save(String entityName, Object object) throws HibernateException {
+ return session.save( entityName, object );
+ }
+
+ public Serializable save(Object object) throws HibernateException {
+ return session.save( object );
+ }
+
+ public void saveOrUpdate(String entityName, Object object) throws HibernateException {
+ session.saveOrUpdate( entityName, object );
+ }
+
+ public void saveOrUpdate(Object object) throws HibernateException {
+ session.saveOrUpdate( object );
+ }
+
+ public void setCacheMode(CacheMode cacheMode) {
+ session.setCacheMode( cacheMode );
+ }
+
+ public void setFlushMode(FlushMode flushMode) {
+ session.setFlushMode( flushMode );
+ }
+
+ public void setReadOnly(Object entity, boolean readOnly) {
+ session.setReadOnly( entity, readOnly );
+ }
+
+ public void update(String entityName, Object object) throws HibernateException {
+ session.update( entityName, object );
+ }
+
+ public void update(Object object) throws HibernateException {
+ session.update( object );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/EntityInfo.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/EntityInfo.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/EntityInfo.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,12 @@
+//$Id: $
+package org.hibernate.search.query;
+
+import java.io.Serializable;
+
+/**
+ * @author Emmanuel Bernard
+ */
+class EntityInfo {
+ public Class clazz;
+ public Serializable id;
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,296 @@
+//$Id: $
+package org.hibernate.search.query;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Hits;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MultiSearcher;
+import org.apache.lucene.search.Searcher;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.store.Directory;
+import org.hibernate.Hibernate;
+import org.hibernate.HibernateException;
+import org.hibernate.LockMode;
+import org.hibernate.Query;
+import org.hibernate.ScrollMode;
+import org.hibernate.ScrollableResults;
+import org.hibernate.Session;
+import org.hibernate.engine.SessionImplementor;
+import org.hibernate.engine.query.ParameterMetadata;
+import org.hibernate.impl.AbstractQueryImpl;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.util.ContextHelper;
+
+/**
+ * @author Emmanuel Bernard
+ */
+//TODO implements setParameter()
+public class FullTextQueryImpl extends AbstractQueryImpl {
+ private static final Log log = LogFactory.getLog( FullTextQueryImpl.class );
+ private org.apache.lucene.search.Query luceneQuery;
+ private Class[] classes;
+ private Set<Class> classesAndSubclasses;
+ private Integer firstResult;
+ private Integer maxResults;
+ private int resultSize;
+
+ /**
+ * classes must be immutable
+ */
+ public FullTextQueryImpl(org.apache.lucene.search.Query query, Class[] classes, SessionImplementor session,
+ ParameterMetadata parameterMetadata) {
+ //TODO handle flushMode
+ super( query.toString(), null, session, parameterMetadata );
+ this.luceneQuery = query;
+ this.classes = classes;
+ }
+
+ /**
+ * Return an interator on the results.
+ * Retrieve the object one by one (initialize it during the next() operation)
+ */
+ public Iterator iterate() throws HibernateException {
+ //implement an interator which keep the id/class for each hit and get the object on demand
+ //cause I can't keep the searcher and hence the hit opened. I dont have any hook to know when the
+ //user stop using it
+ //scrollable is better in this area
+
+ FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );
+ //find the directories
+ Searcher searcher = buildSearcher( listener );
+ try {
+ org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
+ Hits hits = searcher.search( query );
+ setResultSize( hits );
+ int first = first();
+ int max = max( first, hits );
+ List<EntityInfo> entityInfos = new ArrayList<EntityInfo>( max - first + 1 );
+ for ( int index = first; index <= max; index++ ) {
+ Document document = hits.doc( index );
+ EntityInfo entityInfo = new EntityInfo();
+ entityInfo.clazz = DocumentBuilder.getDocumentClass( document );
+ entityInfo.id = DocumentBuilder.getDocumentId( listener, entityInfo.clazz, document );
+ entityInfos.add( entityInfo );
+ }
+ return new IteratorImpl( entityInfos, (Session) this.session );
+ }
+ catch (IOException e) {
+ throw new HibernateException( "Unable to query Lucene index", e );
+ }
+ finally {
+ if ( searcher != null ) try {
+ searcher.close();
+ }
+ catch (IOException e) {
+ log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
+ }
+ }
+ }
+
+ public ScrollableResults scroll() throws HibernateException {
+ //keep the searcher open until the resultset is closed
+ FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );;
+ //find the directories
+ Searcher searcher = buildSearcher( listener );
+ Hits hits;
+ try {
+ org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
+ hits = searcher.search( query );
+ setResultSize( hits );
+ int first = first();
+ int max = max( first, hits );
+ return new ScrollableResultsImpl( searcher, hits, first, max, (Session) this.session, listener );
+ }
+ catch (IOException e) {
+ try {
+ if ( searcher != null ) searcher.close();
+ }
+ catch (IOException ee) {
+ //we have the initial issue already
+ }
+ throw new HibernateException( "Unable to query Lucene index", e );
+ }
+ }
+
+ public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException {
+ //TODO think about this scrollmode
+ return scroll();
+ }
+
+ public List list() throws HibernateException {
+ FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );;
+ //find the directories
+ Searcher searcher = buildSearcher( listener );
+ Hits hits;
+ try {
+ org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
+ hits = searcher.search( query );
+ setResultSize( hits );
+ int first = first();
+ int max = max( first, hits );
+ List result = new ArrayList( max - first + 1 );
+ Session sess = (Session) this.session;
+ for ( int index = first; index <= max; index++ ) {
+ Document document = hits.doc( index );
+ Class clazz = DocumentBuilder.getDocumentClass( document );
+ Serializable id = DocumentBuilder.getDocumentId( listener, clazz, document );
+ result.add( sess.load( clazz, id ) );
+ //use load to benefit from the batch-size
+ //we don't face proxy casting issues since the exact class is extracted from the index
+ }
+ //then initialize the objects
+ for ( Object element : result ) {
+ Hibernate.initialize( element );
+ }
+ return result;
+ }
+ catch (IOException e) {
+ throw new HibernateException( "Unable to query Lucene index", e );
+ }
+ finally {
+ if ( searcher != null ) try {
+ searcher.close();
+ }
+ catch (IOException e) {
+ log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
+ }
+ }
+ }
+
+ private org.apache.lucene.search.Query filterQueryByClasses(org.apache.lucene.search.Query luceneQuery) {
+ //A query filter is more practical than a manual class filtering post query (esp on scrollable resultsets)
+ //it also probably minimise the memory footprint
+ if ( classesAndSubclasses == null ) {
+ return luceneQuery;
+ }
+ else {
+ BooleanQuery classFilter = new BooleanQuery();
+ //annihilate the scoring impact of DocumentBuilder.CLASS_FIELDNAME
+ classFilter.setBoost( 0 );
+ for ( Class clazz : classesAndSubclasses ) {
+ Term t = new Term( DocumentBuilder.CLASS_FIELDNAME, clazz.getName() );
+ TermQuery termQuery = new TermQuery( t );
+ classFilter.add( termQuery, BooleanClause.Occur.SHOULD );
+ }
+ BooleanQuery filteredQuery = new BooleanQuery();
+ filteredQuery.add( luceneQuery, BooleanClause.Occur.MUST );
+ filteredQuery.add( classFilter, BooleanClause.Occur.MUST );
+ return filteredQuery;
+ }
+ }
+
+ private int max(int first, Hits hits) {
+ return maxResults == null ?
+ hits.length() - 1 :
+ maxResults + first < hits.length() ?
+ first + maxResults - 1 :
+ hits.length() - 1;
+ }
+
+ private int first() {
+ return firstResult != null ?
+ firstResult :
+ 0;
+ }
+
+ //TODO change classesAndSubclasses by side effect, which is a mismatch with the Searcher return, fix that.
+ private Searcher buildSearcher(FullTextIndexEventListener listener) {
+ Map<Class, DocumentBuilder<Object>> builders = listener.getDocumentBuilders();
+ Set<Directory> directories = new HashSet<Directory>();
+ if ( classes == null || classes.length == 0 ) {
+ //no class means all classes
+ for ( DocumentBuilder builder : builders.values() ) {
+ directories.add( builder.getDirectoryProvider().getDirectory() );
+ }
+ classesAndSubclasses = null;
+ }
+ else {
+ Set<Class> involvedClasses = new HashSet<Class>( classes.length );
+ Collections.addAll( involvedClasses, classes );
+ for ( Class clazz : classes ) {
+ DocumentBuilder builder = builders.get( clazz );
+ if ( builder != null ) involvedClasses.addAll( builder.getMappedSubclasses() );
+ }
+ for ( Class clazz : involvedClasses ) {
+ DocumentBuilder builder = builders.get( clazz );
+ //TODO should we rather choose a polymorphic path and allow non mapped entities
+ if ( builder == null ) throw new HibernateException( "Not a mapped entity: " + clazz );
+ directories.add( builder.getDirectoryProvider().getDirectory() );
+ }
+ classesAndSubclasses = involvedClasses;
+ }
+
+ //set up the searcher
+ Searcher searcher;
+ int dirNbr = directories.size();
+ if ( dirNbr > 1 ) {
+ try {
+ IndexSearcher[] searchers = new IndexSearcher[dirNbr];
+ Iterator<Directory> it = directories.iterator();
+ for ( int index = 0; index < dirNbr; index++ ) {
+ searchers[index] = new IndexSearcher( it.next() );
+ }
+ searcher = new MultiSearcher( searchers );
+ }
+ catch (IOException e) {
+ throw new HibernateException( "Unable to read Lucene directory", e );
+ }
+ }
+ else {
+ try {
+ searcher = new IndexSearcher( directories.iterator().next() );
+ }
+ catch (IOException e) {
+ throw new HibernateException( "Unable to read Lucene directory", e );
+ }
+ }
+ return searcher;
+ }
+
+ private void setResultSize(Hits hits) {
+ resultSize = hits.length();
+ }
+
+ //FIXME does it make sense
+ public int resultSize() {
+ return this.resultSize;
+ }
+
+ public Query setFirstResult(int firstResult) {
+ this.firstResult = firstResult;
+ return this;
+ }
+
+ public Query setMaxResults(int maxResults) {
+ this.maxResults = maxResults;
+ return this;
+ }
+
+ public int executeUpdate() throws HibernateException {
+ throw new HibernateException( "Not supported operation" );
+ }
+
+ public Query setLockMode(String alias, LockMode lockMode) {
+ return null;
+ }
+
+ protected Map getLockModes() {
+ return null;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,40 @@
+//$Id: $
+package org.hibernate.search.query;
+
+import java.util.Iterator;
+import java.util.List;
+
+import org.hibernate.Session;
+
+/**
+ * @author Emmanuel Bernard
+ */
+//TODO load the next batch-size elements to benefit from batch-size
+public class IteratorImpl implements Iterator {
+
+ private final List<EntityInfo> entityInfos;
+ private final Session session;
+ private int index = 0;
+ private final int size;
+
+ public IteratorImpl(List<EntityInfo> entityInfos, Session session) {
+ this.entityInfos = entityInfos;
+ this.session = session;
+ this.size = entityInfos.size();
+ }
+
+ public boolean hasNext() {
+ return index < size;
+ }
+
+ public Object next() {
+ Object object = session.get( entityInfos.get( index ).clazz, entityInfos.get( index ).id );
+ index++;
+ return object;
+ }
+
+ public void remove() {
+ //TODO this is theorically doable
+ throw new UnsupportedOperationException( "Cannot remove from a lucene query iterator" );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,218 @@
+//$Id: $
+package org.hibernate.search.query;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.sql.Blob;
+import java.sql.Clob;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.Locale;
+import java.util.TimeZone;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.search.Hits;
+import org.apache.lucene.search.Searcher;
+import org.hibernate.HibernateException;
+import org.hibernate.ScrollableResults;
+import org.hibernate.Session;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.type.Type;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ScrollableResultsImpl implements ScrollableResults {
+ private final Searcher searcher;
+ private final Hits hits;
+ private final int first;
+ private final int max;
+ private int current;
+ private final Session session;
+ private final FullTextIndexEventListener listener;
+ private EntityInfo[] entityInfos;
+
+ public ScrollableResultsImpl(
+ Searcher searcher, Hits hits, int first, int max, Session session, FullTextIndexEventListener listener
+ ) {
+ this.searcher = searcher;
+ this.hits = hits;
+ this.first = first;
+ this.max = max;
+ this.current = first;
+ this.session = session;
+ this.listener = listener;
+ entityInfos = new EntityInfo[max - first + 1];
+ }
+
+ public boolean next() throws HibernateException {
+ return ++current <= max;
+ }
+
+ public boolean previous() throws HibernateException {
+ return --current >= first;
+ }
+
+ public boolean scroll(int i) throws HibernateException {
+ current = current + i;
+ return current >= first && current <= max;
+ }
+
+ public boolean last() throws HibernateException {
+ current = max;
+ return max >= first;
+ }
+
+ public boolean first() throws HibernateException {
+ current = first;
+ return max >= first;
+ }
+
+ public void beforeFirst() throws HibernateException {
+ current = first - 1;
+ }
+
+ public void afterLast() throws HibernateException {
+ current = max + 1;
+ }
+
+ public boolean isFirst() throws HibernateException {
+ return current == first;
+ }
+
+ public boolean isLast() throws HibernateException {
+ return current == max;
+ }
+
+ public void close() throws HibernateException {
+ try {
+ searcher.close();
+ }
+ catch (IOException e) {
+ throw new HibernateException( "Unable to close Lucene searcher", e );
+ }
+ }
+
+ public Object[] get() throws HibernateException {
+ if ( current < first || current > max ) return null; //or exception?
+ EntityInfo info = entityInfos[current - first];
+ if ( info == null ) {
+ info = new EntityInfo();
+ Document document = null;
+ try {
+ document = hits.doc( current );
+ }
+ catch (IOException e) {
+ throw new HibernateException( "Unable to read Lucene hits[" + current + "]", e );
+ }
+ info.clazz = DocumentBuilder.getDocumentClass( document );
+ //FIXME should check that clazz match classes but this complexify a lot the firstResult/maxResult
+ info.id = DocumentBuilder.getDocumentId( listener, info.clazz, document );
+ entityInfos[current - first] = info;
+ }
+ return new Object[]{
+ session.get( info.clazz, info.id )
+ };
+ }
+
+ public Object get(int i) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Type getType(int i) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Integer getInteger(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Long getLong(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Float getFloat(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Boolean getBoolean(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Double getDouble(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Short getShort(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Byte getByte(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Character getCharacter(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public byte[] getBinary(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public String getText(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Blob getBlob(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Clob getClob(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public String getString(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public BigDecimal getBigDecimal(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public BigInteger getBigInteger(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Date getDate(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Locale getLocale(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public Calendar getCalendar(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public TimeZone getTimeZone(int col) throws HibernateException {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public int getRowNumber() throws HibernateException {
+ if ( max < first ) return -1;
+ return current - first;
+ }
+
+ public boolean setRowNumber(int rowNumber) throws HibernateException {
+ if ( rowNumber >= 0 ) {
+ current = first + rowNumber;
+ }
+ else {
+ current = max + rowNumber + 1; //max row start at -1
+ }
+ return current >= first && current <= max;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProvider.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProvider.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProvider.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,30 @@
+//$Id: $
+package org.hibernate.search.store;
+
+import java.util.Properties;
+
+import org.apache.lucene.store.Directory;
+
+
+/**
+ * Set up and provide a Lucene <code>Directory</code>
+ * <code>equals()</code> and <code>hashCode()</code> must guaranty equality
+ * between two providers pointing to the same underlying Lucene Store
+ * This class must be thread safe regarding <code>getDirectory()</code>
+ * calls
+ *
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ */
+public interface DirectoryProvider<TDirectory extends Directory> {
+ /**
+ * get the information to initialize the directory and build its hashCode
+ */
+ void initialize(String directoryProviderName, Properties properties);
+
+ /**
+ * Returns an initialized Lucene Directory. This method call <b>must</b> be threadsafe
+ */
+ TDirectory getDirectory();
+}
+
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,128 @@
+//$Id: $
+package org.hibernate.search.store;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.hibernate.HibernateException;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.mapping.PersistentClass;
+import org.hibernate.reflection.ReflectionManager;
+import org.hibernate.reflection.XClass;
+import org.hibernate.util.ReflectHelper;
+import org.hibernate.util.StringHelper;
+
+/**
+ * Create a Lucene directory provider
+ * <p/>
+ * Lucene directory providers are configured through properties
+ * - hibernate.search.default.* and
+ * - hibernate.search.<indexname>.*
+ * <p/>
+ * <indexname> properties have precedence over default
+ * <p/>
+ * The implementation is described by
+ * hibernate.search.[default|indexname].directory_provider
+ * <p/>
+ * If none is defined the default value is FSDirectory
+ *
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ */
+public class DirectoryProviderFactory {
+ public List<DirectoryProvider<?>> providers = new ArrayList<DirectoryProvider<?>>();
+ private static String LUCENE_PREFIX = "hibernate.search.";
+ private static String LUCENE_DEFAULT = LUCENE_PREFIX + "default.";
+ private static String DEFAULT_DIRECTORY_PROVIDER = FSDirectoryProvider.class.getName();
+
+ //TODO for the public?
+ //public DirectoryProvider<?> createDirectoryProvider(XClass entity, Configuration cfg) {
+
+ public DirectoryProvider<?> createDirectoryProvider(XClass entity, Configuration cfg) {
+ //get properties
+ String directoryProviderName = getDirectoryProviderName( entity, cfg );
+ Properties indexProps = getDirectoryProperties( cfg, directoryProviderName );
+
+ //set up the directory
+ String className = indexProps.getProperty( "directory_provider" );
+ if ( StringHelper.isEmpty( className ) ) {
+ className = DEFAULT_DIRECTORY_PROVIDER;
+ }
+ DirectoryProvider<?> provider = null;
+ try {
+ @SuppressWarnings( "unchecked" )
+ Class<DirectoryProvider> directoryClass = ReflectHelper.classForName(
+ className, DirectoryProviderFactory.class
+ );
+ provider = directoryClass.newInstance();
+ }
+ catch (Exception e) {
+ throw new HibernateException( "Unable to instanciate directory provider: " + className, e );
+ }
+ provider.initialize( directoryProviderName, indexProps );
+ int index = providers.indexOf( provider );
+ if ( index != -1 ) {
+ //share the same Directory provider for the same underlying store
+ return providers.get( index );
+ }
+ else {
+ providers.add( provider );
+ return provider;
+ }
+ }
+
+ private static Properties getDirectoryProperties(Configuration cfg, String directoryProviderName) {
+ Properties props = cfg.getProperties();
+ String indexName = LUCENE_PREFIX + directoryProviderName;
+ Properties indexProps = new Properties();
+ Properties indexSpecificProps = new Properties();
+ for ( Map.Entry entry : props.entrySet() ) {
+ String key = (String) entry.getKey();
+ if ( key.startsWith( LUCENE_DEFAULT ) ) {
+ indexProps.setProperty( key.substring( LUCENE_DEFAULT.length() ), (String) entry.getValue() );
+ }
+ else if ( key.startsWith( indexName ) ) {
+ indexSpecificProps.setProperty( key.substring( indexName.length() ), (String) entry.getValue() );
+ }
+ }
+ indexProps.putAll( indexSpecificProps );
+ return indexProps;
+ }
+
+ public static String getDirectoryProviderName(XClass clazz, Configuration cfg) {
+ //yuk
+ ReflectionManager reflectionManager =
+ ( (AnnotationConfiguration) cfg ).createExtendedMappings().getReflectionManager();
+ //get the most specialized (ie subclass > superclass) non default index name
+ //if none extract the name from the most generic (superclass > subclass) @Indexed class in the hierarchy
+ //FIXME I'm inclined to get rid of the default value
+ PersistentClass pc = cfg.getClassMapping( clazz.getName() );
+ XClass rootIndex = null;
+ do {
+ XClass currentClazz = reflectionManager.toXClass( pc.getMappedClass() );
+ Indexed indexAnn = currentClazz.getAnnotation( Indexed.class );
+ if ( indexAnn != null ) {
+ if ( indexAnn.index().length() != 0 ) {
+ return indexAnn.index();
+ }
+ else {
+ rootIndex = currentClazz;
+ }
+ }
+ pc = pc.getSuperclass();
+ }
+ while ( pc != null );
+ //there is nobody outthere with a non default @Indexed.index
+ if ( rootIndex != null ) {
+ return rootIndex.getName();
+ }
+ else {
+ throw new HibernateException(
+ "Trying to extract the index name from a non @Indexed class: " + clazz.getName() );
+ }
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/FSDirectoryProvider.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/FSDirectoryProvider.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/FSDirectoryProvider.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,80 @@
+//$Id: $
+package org.hibernate.search.store;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.FSDirectory;
+import org.hibernate.HibernateException;
+
+/**
+ * Use a Lucene FSDirectory
+ * The base directory is represented by hibernate.search.<index>.indexBase
+ * The index is created in <base directory>/<index name>
+ *
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ */
+public class FSDirectoryProvider implements DirectoryProvider<FSDirectory> {
+ private FSDirectory directory;
+ private static Log log = LogFactory.getLog( FSDirectoryProvider.class );
+ private String indexName;
+
+ public void initialize(String directoryProviderName, Properties properties) {
+ String indexBase = properties.getProperty( "indexBase", "." );
+ File indexDir = new File( indexBase );
+
+ if ( !( indexDir.exists() && indexDir.isDirectory() ) ) {
+ //TODO create the directory?
+ throw new HibernateException( MessageFormat.format( "Index directory does not exists: {0}", indexBase ) );
+ }
+ if ( !indexDir.canWrite() ) {
+ throw new HibernateException( "Cannot write into index directory: " + indexBase );
+ }
+ log.info( "Setting index dir to " + indexDir );
+
+ File file = new File( indexDir, directoryProviderName );
+
+ try {
+ boolean create = !file.exists();
+ indexName = file.getCanonicalPath();
+ directory = FSDirectory.getDirectory( indexName, create );
+ if ( create ) {
+ IndexWriter iw = new IndexWriter( directory, new StandardAnalyzer(), create );
+ iw.close();
+ }
+ }
+ catch (IOException e) {
+ throw new HibernateException( "Unable to initialize index: " + directoryProviderName, e );
+ }
+
+ }
+
+ public FSDirectory getDirectory() {
+ return directory;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ if ( obj == this ) return true;
+ if ( obj == null || !( obj instanceof FSDirectoryProvider ) ) return false;
+ return indexName.equals( ( (FSDirectoryProvider) obj ).indexName );
+ }
+
+ @Override
+ public int hashCode() {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ return indexName.hashCode();
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/RAMDirectoryProvider.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/RAMDirectoryProvider.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/RAMDirectoryProvider.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,57 @@
+//$Id: $
+package org.hibernate.search.store;
+
+import java.io.IOException;
+import java.util.Properties;
+
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.RAMDirectory;
+import org.hibernate.HibernateException;
+
+/**
+ * Use a Lucene RAMDirectory
+ *
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ */
+public class RAMDirectoryProvider implements DirectoryProvider<RAMDirectory> {
+
+ private RAMDirectory directory;
+ private String indexName;
+
+ public void initialize(String directoryProviderName, Properties properties) {
+ indexName = directoryProviderName;
+ directory = new RAMDirectory();
+ try {
+ IndexWriter iw = new IndexWriter( directory, new StandardAnalyzer(), true );
+ iw.close();
+ }
+ catch (IOException e) {
+ throw new HibernateException( "Unable to initialize index: " + indexName, e );
+ }
+ }
+
+ public RAMDirectory getDirectory() {
+ return directory;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ if ( obj == this ) return true;
+ if ( obj == null || !( obj instanceof RAMDirectoryProvider ) ) return false;
+ return indexName.equals( ( (RAMDirectoryProvider) obj ).indexName );
+ }
+
+ @Override
+ public int hashCode() {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ return indexName.hashCode();
+ }
+
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/BinderHelper.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/BinderHelper.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/BinderHelper.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,23 @@
+//$Id: $
+package org.hibernate.search.util;
+
+import org.hibernate.reflection.XMember;
+import org.hibernate.util.StringHelper;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class BinderHelper {
+
+ private BinderHelper() {
+ }
+
+ /**
+ * Get attribute name out of member unless overriden by <code>name</code>
+ */
+ public static String getAttributeName(XMember member, String name) {
+ return StringHelper.isNotEmpty( name ) ?
+ name :
+ member.getName(); //explicit field name
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,27 @@
+//$Id: $
+package org.hibernate.search.util;
+
+import org.hibernate.HibernateException;
+import org.hibernate.engine.SessionImplementor;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class ContextHelper {
+
+ public static FullTextIndexEventListener getLuceneEventListener(SessionImplementor session) {
+ PostInsertEventListener[] listeners = session.getListeners().getPostInsertEventListeners();
+ FullTextIndexEventListener listener = null;
+ //FIXME this sucks since we mandante the event listener use
+ for ( PostInsertEventListener candidate : listeners ) {
+ if ( candidate instanceof FullTextIndexEventListener ) {
+ listener = (FullTextIndexEventListener) candidate;
+ break;
+ }
+ }
+ if ( listener == null ) throw new HibernateException( "Lucene event listener not initialized" );
+ return listener;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/AlternateDocument.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/AlternateDocument.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/AlternateDocument.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,81 @@
+//$Id: $
+package org.hibernate.search.test;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Lob;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Boost;
+
+/**
+ * Example of 2 entities mapped in the same index
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index = "Documents")
+public class AlternateDocument {
+ private Long id;
+ private String title;
+ private String summary;
+ private String text;
+
+ AlternateDocument() {
+ }
+
+ public AlternateDocument(Long id, String title, String summary, String text) {
+ super();
+ this.id = id;
+ this.summary = summary;
+ this.text = text;
+ this.title = title;
+ }
+
+ @Id
+ //@Keyword(id = true)
+ @DocumentId()
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ //@Text
+ @Field( name = "alt_title", store = Store.YES, index = Index.TOKENIZED )
+ @Boost(2)
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ //@Unstored(name = "Abstract")
+ @Field( name="Abstract", store = Store.NO, index = Index.TOKENIZED )
+ public String getSummary() {
+ return summary;
+ }
+
+ public void setSummary(String summary) {
+ this.summary = summary;
+ }
+
+ @Lob
+ //@Unstored
+ @Field( store = Store.NO, index = Index.TOKENIZED )
+ public String getText() {
+ return text;
+ }
+
+ public void setText(String text) {
+ this.text = text;
+ }
+}
+
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/Clock.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/Clock.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/Clock.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,41 @@
+//$Id: $
+package org.hibernate.search.test;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Text;
+import org.hibernate.search.annotations.Keyword;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Clock {
+ private Integer id;
+ private String brand;
+
+ public Clock(Integer id, String brand) {
+ this.id = id;
+ this.brand = brand;
+ }
+
+ @Text public String getBrand() {
+ return brand;
+ }
+
+ public void setBrand(String brand) {
+ this.brand = brand;
+ }
+
+ @Id @Keyword
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/Document.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/Document.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/Document.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,77 @@
+//$Id: Document.java 10742 2006-11-07 01:03:16Z epbernard $
+package org.hibernate.search.test;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.Lob;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Store;
+
+@Entity
+@Indexed(index = "Documents")
+public class Document {
+ private Long id;
+ private String title;
+ private String summary;
+ private String text;
+
+ Document() {
+ }
+
+ public Document(String title, String summary, String text) {
+ super();
+ this.summary = summary;
+ this.text = text;
+ this.title = title;
+ }
+
+ @Id
+ @GeneratedValue
+ //@Keyword(id = true)
+ @DocumentId
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ //@Text
+ @Field( store = Store.YES, index = Index.TOKENIZED )
+ @Boost(2)
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ //@Unstored(name = "Abstract")
+ @Field( name="Abstract", store = Store.NO, index = Index.TOKENIZED )
+ public String getSummary() {
+ return summary;
+ }
+
+ public void setSummary(String summary) {
+ this.summary = summary;
+ }
+
+ @Lob
+ //@Unstored
+ @Field( store = Store.NO, index = Index.TOKENIZED )
+ public String getText() {
+ return text;
+ }
+
+ public void setText(String text) {
+ this.text = text;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,193 @@
+//$Id: LuceneTest.java 10014 2006-06-12 09:56:27 -0700 (lun., 12 juin 2006) epbernard $
+package org.hibernate.search.test;
+
+import java.io.File;
+import java.util.List;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Hits;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.hibernate.Session;
+import org.hibernate.event.PostDeleteEventListener;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.event.PostUpdateEventListener;
+import org.hibernate.search.Environment;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.event.FullTextIndexEventListener;
+
+/**
+ * @author Gavin King
+ */
+public class FSDirectoryTest extends TestCase {
+
+
+ protected void setUp() throws Exception {
+ File sub = getBaseIndexDir();
+ sub.mkdir();
+ File[] files = sub.listFiles();
+ for (File file : files) {
+ if ( file.isDirectory() ) {
+ delete( file );
+ }
+ }
+ //super.setUp(); //we need a fresh session factory each time for index set up
+ buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+ }
+
+ private File getBaseIndexDir() {
+ File current = new File( "." );
+ File sub = new File( current, "indextemp" );
+ return sub;
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ File sub = getBaseIndexDir();
+ delete( sub );
+ }
+
+ private void delete(File sub) {
+ if ( sub.isDirectory() ) {
+ for ( File file : sub.listFiles() ) {
+ delete( file );
+ }
+ sub.delete();
+ }
+ else {
+ sub.delete();
+ }
+ }
+
+ public void testEventIntegration() throws Exception {
+
+
+ Session s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.persist(
+ new Document( "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" )
+ );
+ s.getTransaction().commit();
+ s.close();
+ IndexReader reader = IndexReader.open( new File( getBaseIndexDir(), "Documents" ) );
+ try {
+ int num = reader.numDocs();
+ assertEquals( 1, num );
+ TermDocs docs = reader.termDocs( new Term( "Abstract", "Hibernate" ) );
+ org.apache.lucene.document.Document doc = reader.document( docs.doc() );
+ assertFalse( docs.next() );
+ docs = reader.termDocs( new Term( "Title", "Action" ) );
+ doc = reader.document( docs.doc() );
+ assertFalse( docs.next() );
+ assertEquals( "1", doc.getField( "id" ).stringValue() );
+ }
+ finally {
+ reader.close();
+ }
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ Document entity = (Document) s.get( Document.class, new Long( 1 ) );
+ entity.setSummary( "Object/relational mapping with EJB3" );
+ s.persist( new Document( "Seam in Action", "", "blah blah blah blah" ) );
+ s.getTransaction().commit();
+ s.close();
+
+ reader = IndexReader.open( new File( getBaseIndexDir(), "Documents" ) );
+ try {
+ int num = reader.numDocs();
+ assertEquals( 2, num );
+ TermDocs docs = reader.termDocs( new Term( "Abstract", "EJB3" ) );
+ org.apache.lucene.document.Document doc = reader.document( docs.doc() );
+ assertFalse( docs.next() );
+ }
+ finally {
+ reader.close();
+ }
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.delete( entity );
+ s.getTransaction().commit();
+ s.close();
+
+ reader = IndexReader.open( new File( getBaseIndexDir(), "Documents" ) );
+ try {
+ int num = reader.numDocs();
+ assertEquals( 1, num );
+ TermDocs docs = reader.termDocs( new Term( "Title", "Seam" ) );
+ org.apache.lucene.document.Document doc = reader.document( docs.doc() );
+ assertFalse( docs.next() );
+ assertEquals( "2", doc.getField( "id" ).stringValue() );
+ }
+ finally {
+ reader.close();
+ }
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.delete( s.createCriteria( Document.class ).uniqueResult() );
+ s.getTransaction().commit();
+ s.close();
+ }
+
+ public void testBoost() throws Exception {
+ Session s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.persist(
+ new Document( "Hibernate in Action", "Object and Relational", "blah blah blah" )
+ );
+ s.persist(
+ new Document( "Object and Relational", "Hibernate in Action", "blah blah blah" )
+ );
+ s.getTransaction().commit();
+ s.close();
+
+ IndexSearcher searcher = new IndexSearcher( new File( getBaseIndexDir(), "Documents" ).getCanonicalPath() );
+ try {
+ QueryParser qp = new QueryParser( "id", new StandardAnalyzer() );
+ Query query = qp.parse( "title:Action OR Abstract:Action" );
+ Hits hits = searcher.search( query );
+ assertEquals( 2, hits.length() );
+ assertTrue( hits.score( 0 ) == 2 * hits.score( 1 ) );
+ assertEquals( "Hibernate in Action", hits.doc( 0 ).get( "title" ) );
+ }
+ finally {
+ if ( searcher != null ) searcher.close();
+ }
+
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ List list = s.createQuery( "from Document" ).list();
+ for ( Document document : (List<Document>) list ) {
+ s.delete( document );
+ }
+ s.getTransaction().commit();
+ s.close();
+ }
+
+ protected Class[] getMappings() {
+ return new Class[]{
+ Document.class
+ };
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ File sub = getBaseIndexDir();
+ cfg.setProperty( "hibernate.search.default.indexBase", sub.getAbsolutePath() );
+ cfg.setProperty( "hibernate.search.Clock.directory_provider", FSDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ FullTextIndexEventListener del = new FullTextIndexEventListener();
+ cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{del} );
+ cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{del} );
+ cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{del} );
+ }
+
+}
+
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/RamDirectoryTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/RamDirectoryTest.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/RamDirectoryTest.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,71 @@
+//$Id: $
+package org.hibernate.search.test;
+
+import java.io.File;
+import java.util.List;
+
+import org.hibernate.Session;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Hits;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class RamDirectoryTest extends TestCase {
+
+ public void testMultipleEntitiesPerIndex() throws Exception {
+
+
+ Session s = getSessions().openSession();
+ s.getTransaction().begin();
+ Document document =
+ new Document( "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" );
+ s.persist(document);
+ s.flush();
+ s.persist(
+ new AlternateDocument( document.getId(), "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" )
+ );
+ s.getTransaction().commit();
+ s.close();
+
+ assertEquals( 2, getDocumentNbr() );
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.delete( s.get( AlternateDocument.class, document.getId() ) );
+ s.getTransaction().commit();
+ s.close();
+
+ assertEquals( 1, getDocumentNbr() );
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.delete( s.createCriteria( Document.class ).uniqueResult() );
+ s.getTransaction().commit();
+ s.close();
+ }
+
+ private int getDocumentNbr() throws Exception {
+ IndexReader reader = IndexReader.open( getDirectory( Document.class ) );
+ try {
+ return reader.numDocs();
+ }
+ finally {
+ reader.close();
+ }
+ }
+
+ protected Class[] getMappings() {
+ return new Class[]{
+ Document.class,
+ AlternateDocument.class
+ };
+ }
+
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,50 @@
+//$Id: $
+package org.hibernate.search.test;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.store.Directory;
+import org.hibernate.event.PostDeleteEventListener;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.event.PostUpdateEventListener;
+import org.hibernate.search.Environment;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.HibernateException;
+import org.hibernate.impl.SessionFactoryImpl;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class TestCase extends org.hibernate.test.annotations.TestCase {
+ protected void setUp() throws Exception {
+ //super.setUp(); //we need a fresh session factory each time for index set up
+ buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+ }
+
+ protected Directory getDirectory(Class clazz) {
+ return getLuceneEventListener().getDocumentBuilders().get( clazz ).getDirectoryProvider().getDirectory();
+ }
+
+ private FullTextIndexEventListener getLuceneEventListener() {
+ PostInsertEventListener[] listeners = ( (SessionFactoryImpl) getSessions() ).getEventListeners().getPostInsertEventListeners();
+ FullTextIndexEventListener listener = null;
+ //FIXME this sucks since we mandante the event listener use
+ for (PostInsertEventListener candidate : listeners) {
+ if (candidate instanceof FullTextIndexEventListener ) {
+ listener = (FullTextIndexEventListener) candidate;
+ break;
+ }
+ }
+ if (listener == null) throw new HibernateException("Lucene event listener not initialized");
+ return listener;
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ FullTextIndexEventListener del = new FullTextIndexEventListener();
+ cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{del} );
+ cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{del} );
+ cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{del} );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TransactionTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TransactionTest.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TransactionTest.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,61 @@
+//$Id: $
+package org.hibernate.search.test;
+
+import java.io.IOException;
+
+import org.hibernate.Session;
+import org.apache.lucene.index.IndexReader;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class TransactionTest extends TestCase {
+
+ public void testTransactionCommit() throws Exception {
+ Session s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.persist(
+ new Document( "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" )
+ );
+ s.getTransaction().commit();
+ s.close();
+
+ assertEquals( "transaction.commit() should no index", 1, getDocumentNumber() );
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.persist(
+ new Document( "Java Persistence with Hibernate", "Object/relational mapping with Hibernate", "blah blah blah" )
+ );
+ s.flush();
+ s.getTransaction().rollback();
+ s.close();
+
+ assertEquals( "rollback() should not index", 1, getDocumentNumber() );
+
+ s = getSessions().openSession();
+ s.persist(
+ new Document( "Java Persistence with Hibernate", "Object/relational mapping with Hibernate", "blah blah blah" )
+ );
+ s.flush();
+ s.close();
+
+ assertEquals( "no transaction should index", 2, getDocumentNumber() );
+
+ }
+
+ private int getDocumentNumber() throws IOException {
+ IndexReader reader = IndexReader.open( getDirectory( Document.class ) );
+ try {
+ return reader.numDocs();
+ }
+ finally {
+ reader.close();
+ }
+ }
+
+
+ protected Class[] getMappings() {
+ return new Class[]{Document.class};
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,150 @@
+//$Id: $
+package org.hibernate.search.test.bridge;
+
+import java.util.Date;
+import java.util.List;
+import java.util.GregorianCalendar;
+import java.util.Calendar;
+import java.util.TimeZone;
+
+import org.hibernate.Transaction;
+import org.hibernate.cfg.Configuration;
+
+import org.hibernate.search.test.TestCase;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.analysis.SimpleAnalyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class BridgeTest extends TestCase {
+ public void testDefaultAndNullBridges() throws Exception {
+ Cloud cloud = new Cloud();
+ cloud.setDate( null );
+ cloud.setDouble1( null );
+ cloud.setDouble2( 2.1d );
+ cloud.setInt1( null );
+ cloud.setInt2( 2 );
+ cloud.setFloat1( null );
+ cloud.setFloat2( 2.1f );
+ cloud.setLong1( null );
+ cloud.setLong2( 2l );
+ cloud.setString(null);
+ org.hibernate.Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist(cloud);
+ s.flush();
+ tx.commit();
+
+ tx = s.beginTransaction();
+ FullTextSession session = Search.createFullTextSession(s);
+ QueryParser parser = new QueryParser("id", new StandardAnalyzer() );
+ Query query;
+ List result;
+
+ query = parser.parse("double2:[2.1 TO 2.1] AND float2:[2.1 TO 2.1] AND int2:[2 TO 2.1] AND long2:[2 TO 2.1]");
+ result = session.createFullTextQuery(query).list();
+ assertEquals( "find primitives and do not fail on null", 1, result.size() );
+
+ query = parser.parse("double1:[2.1 TO 2.1] OR float1:[2.1 TO 2.1] OR int1:[2 TO 2.1] OR long1:[2 TO 2.1]");
+ result = session.createFullTextQuery(query).list();
+ assertEquals( "null elements should not be stored", 0, result.size() ); //the query is dumb because restrictive
+
+ s.delete( s.get( Cloud.class, cloud.getId() ) );
+ tx.commit();
+ s.close();
+
+ }
+
+ public void testCustomBridges() throws Exception {
+ Cloud cloud = new Cloud();
+ cloud.setCustomFieldBridge( "This is divided by 2");
+ cloud.setCustomStringBridge( "This is div by 4");
+ org.hibernate.Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist(cloud);
+ s.flush();
+ tx.commit();
+
+ tx = s.beginTransaction();
+ FullTextSession session = Search.createFullTextSession(s);
+ QueryParser parser = new QueryParser("id", new SimpleAnalyzer() );
+ Query query;
+ List result;
+
+ query = parser.parse("customFieldBridge:This AND customStringBridge:This");
+ result = session.createFullTextQuery(query).list();
+ assertEquals( "Properties not mapped", 1, result.size() );
+
+ query = parser.parse("customFieldBridge:by AND customStringBridge:is");
+ result = session.createFullTextQuery(query).list();
+ assertEquals( "Custom types not taken into account", 0, result.size() );
+
+ s.delete( s.get( Cloud.class, cloud.getId() ) );
+ tx.commit();
+ s.close();
+
+ }
+
+ public void testDateBridge() throws Exception {
+ Cloud cloud = new Cloud();
+ Calendar c = GregorianCalendar.getInstance();
+ c.setTimeZone( TimeZone.getTimeZone( "GMT" ) ); //for the sake of tests
+ c.set(2000, 11, 15, 3, 43, 2);
+ c.set( Calendar.MILLISECOND, 5 );
+
+ Date date = new Date( c.getTimeInMillis() );
+ cloud.setDate( date ); //5 millisecond
+ cloud.setDateDay( date );
+ cloud.setDateHour( date );
+ cloud.setDateMillisecond( date );
+ cloud.setDateMinute( date );
+ cloud.setDateMonth( date );
+ cloud.setDateSecond( date );
+ cloud.setDateYear( date );
+ org.hibernate.Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist(cloud);
+ s.flush();
+ tx.commit();
+
+ tx = s.beginTransaction();
+ FullTextSession session = Search.createFullTextSession(s);
+ QueryParser parser = new QueryParser("id", new StandardAnalyzer() );
+ Query query;
+ List result;
+
+ query = parser.parse("date:[19900101 TO 20060101]"
+ + " AND dateDay:[20001214 TO 2000121501]"
+ + " AND dateMonth:[200012 TO 20001201]"
+ + " AND dateYear:[2000 TO 200001]"
+ + " AND dateHour:[20001214 TO 2000121503]"
+ + " AND dateMinute:[20001214 TO 200012150343]"
+ + " AND dateSecond:[20001214 TO 20001215034302]"
+ + " AND dateMillisecond:[20001214 TO 20001215034302005]"
+ );
+ result = session.createFullTextQuery(query).list();
+ assertEquals( "Date not found or not property truncated", 1, result.size() );
+
+ s.delete( s.get( Cloud.class, cloud.getId() ) );
+ tx.commit();
+ s.close();
+
+ }
+ protected Class[] getMappings() {
+ return new Class[] {
+ Cloud.class
+ };
+ }
+
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( Environment.ANALYZER_CLASS, SimpleAnalyzer.class.getName() );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/Cloud.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/Cloud.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/Cloud.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,233 @@
+//$Id: $
+package org.hibernate.search.test.bridge;
+
+import java.util.Date;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Keyword;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Text;
+import org.hibernate.search.annotations.DateBridge;
+import org.hibernate.search.annotations.FieldBridge;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.annotations.Parameter;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Cloud {
+ private int id;
+ private Long long1;
+ private long long2;
+ private Integer int1;
+ private int int2;
+ private Double double1;
+ private double double2;
+ private Float float1;
+ private float float2;
+ private String string;
+ private Date date;
+ private Date dateYear;
+ private Date dateMonth;
+ private Date dateDay;
+ private Date dateHour;
+ private Date dateMinute;
+ private Date dateSecond;
+ private Date dateMillisecond;
+ private String customFieldBridge;
+ private String customStringBridge;
+
+ @Text
+ @FieldBridge(impl = TruncateFieldBridge.class)
+ public String getCustomFieldBridge() {
+ return customFieldBridge;
+ }
+
+ public void setCustomFieldBridge(String customFieldBridge) {
+ this.customFieldBridge = customFieldBridge;
+ }
+
+ @Text
+ @FieldBridge(impl = TruncateStringBridge.class, params = @Parameter( name="dividedBy", value="4" ) )
+ public String getCustomStringBridge() {
+ return customStringBridge;
+ }
+
+ public void setCustomStringBridge(String customStringBridge) {
+ this.customStringBridge = customStringBridge;
+ }
+
+ @Id @GeneratedValue @Keyword(id=true)
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ @Keyword
+ public Long getLong1() {
+ return long1;
+ }
+
+ public void setLong1(Long long1) {
+ this.long1 = long1;
+ }
+
+ @Keyword
+ public long getLong2() {
+ return long2;
+ }
+
+ public void setLong2(long long2) {
+ this.long2 = long2;
+ }
+
+ @Keyword
+ public Integer getInt1() {
+ return int1;
+ }
+
+ public void setInt1(Integer int1) {
+ this.int1 = int1;
+ }
+
+ @Keyword
+ public int getInt2() {
+ return int2;
+ }
+
+ public void setInt2(int int2) {
+ this.int2 = int2;
+ }
+
+ @Keyword
+ public Double getDouble1() {
+ return double1;
+ }
+
+ public void setDouble1(Double double1) {
+ this.double1 = double1;
+ }
+
+ @Keyword
+ public double getDouble2() {
+ return double2;
+ }
+
+ public void setDouble2(double double2) {
+ this.double2 = double2;
+ }
+
+ @Keyword
+ public Float getFloat1() {
+ return float1;
+ }
+
+ public void setFloat1(Float float1) {
+ this.float1 = float1;
+ }
+
+ @Keyword
+ public float getFloat2() {
+ return float2;
+ }
+
+ public void setFloat2(float float2) {
+ this.float2 = float2;
+ }
+
+ @Text
+ public String getString() {
+ return string;
+ }
+
+ public void setString(String string) {
+ this.string = string;
+ }
+
+ @Keyword
+ public Date getDate() {
+ return date;
+ }
+
+ public void setDate(Date date) {
+ this.date = date;
+ }
+
+ @Keyword
+ @DateBridge( resolution = Resolution.YEAR )
+ public Date getDateYear() {
+ return dateYear;
+ }
+
+ public void setDateYear(Date dateYear) {
+ this.dateYear = dateYear;
+ }
+
+ @Keyword
+ @DateBridge( resolution = Resolution.MONTH )
+ public Date getDateMonth() {
+ return dateMonth;
+ }
+
+ public void setDateMonth(Date dateMonth) {
+ this.dateMonth = dateMonth;
+ }
+
+ @Keyword
+ @DateBridge( resolution = Resolution.DAY )
+ public Date getDateDay() {
+ return dateDay;
+ }
+
+ public void setDateDay(Date dateDay) {
+ this.dateDay = dateDay;
+ }
+
+ @Keyword
+ @DateBridge( resolution = Resolution.HOUR )
+ public Date getDateHour() {
+ return dateHour;
+ }
+
+ public void setDateHour(Date dateHour) {
+ this.dateHour = dateHour;
+ }
+
+
+ @Keyword
+ @DateBridge( resolution = Resolution.MINUTE )
+ public Date getDateMinute() {
+ return dateMinute;
+ }
+
+ public void setDateMinute(Date dateMinute) {
+ this.dateMinute = dateMinute;
+ }
+
+ @Keyword
+ @DateBridge( resolution = Resolution.SECOND )
+ public Date getDateSecond() {
+ return dateSecond;
+ }
+
+ public void setDateSecond(Date dateSecond) {
+ this.dateSecond = dateSecond;
+ }
+
+ @Keyword
+ @DateBridge( resolution = Resolution.MILLISECOND )
+ public Date getDateMillisecond() {
+ return dateMillisecond;
+ }
+
+ public void setDateMillisecond(Date dateMillisecond) {
+ this.dateMillisecond = dateMillisecond;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,43 @@
+//$Id: $
+package org.hibernate.search.test.bridge;
+
+import java.util.Date;
+import java.util.Calendar;
+import java.util.GregorianCalendar;
+import java.util.TimeZone;
+
+import org.hibernate.search.bridge.FieldBridge;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+
+/**
+ * Store the date in 3 different field year, month, day
+ * to ease Range Query per year, month or day
+ * (eg get all the elements of december for the last 5 years)
+ *
+ * @author Emmanuel Bernard
+ */
+public class DateSplitBridge implements FieldBridge {
+ private final static TimeZone GMT = TimeZone.getTimeZone("GMT");
+
+ public void set(String name, Object value, Document document, Field.Store store, Field.Index index, Float boost) {
+ Date date = (Date) value;
+ Calendar cal = GregorianCalendar.getInstance( GMT );
+ cal.setTime( date );
+ int year = cal.get( Calendar.YEAR );
+ int month = cal.get( Calendar.MONTH ) + 1;
+ int day = cal.get( Calendar.DAY_OF_MONTH );
+ //set year
+ Field field = new Field( name + ".year", String.valueOf(year), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ //set month and pad it if needed
+ field = new Field( name + ".month", month < 10 ? "0" : "" + String.valueOf(month), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ //set day and pad it if needed
+ field = new Field( name + ".day", day < 10 ? "0" : "" + String.valueOf(day), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,44 @@
+//$Id: $
+package org.hibernate.search.test.bridge;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.HashMap;
+
+import org.hibernate.search.bridge.StringBridge;
+import org.hibernate.search.bridge.ParameterizedBridge;
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.search.annotations.FieldBridge;
+import org.hibernate.annotations.Parameter;
+
+/**
+ * Padding Integer bridge.
+ * All numbers will be padded with 0 to match 5 digits
+ *
+ * @author Emmanuel Bernard
+ */
+public class PaddedIntegerBridge implements TwoWayStringBridge, ParameterizedBridge {
+
+ public static String PADDING_PROPERTY = "padding";
+
+ private int padding = 5; //default
+
+ public void setParameterValues(Map parameters) {
+ Object padding = parameters.get( PADDING_PROPERTY );
+ if (padding != null) this.padding = (Integer) padding;
+ }
+
+ public String objectToString(Object object) {
+ String rawInteger = ( (Integer) object ).toString();
+ if (rawInteger.length() > padding) throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+
+ public Object stringToObject(String stringValue) {
+ return new Integer(stringValue);
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/TruncateFieldBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/TruncateFieldBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/TruncateFieldBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,27 @@
+//$Id: $
+package org.hibernate.search.test.bridge;
+
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.util.StringHelper;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class TruncateFieldBridge implements FieldBridge {
+ public Object get(String name, Document document) {
+ Field field = document.getField( name );
+ return field.stringValue();
+ }
+
+ public void set(String name, Object value, Document document, Field.Store store, Field.Index index, Float boost) {
+ String indexedString = (String) value;
+ //Do not add fields on empty strings, seems a sensible default in most situations
+ if ( StringHelper.isNotEmpty( indexedString ) ) {
+ Field field = new Field(name, indexedString.substring(0, indexedString.length() / 2), store, index);
+ if (boost != null) field.setBoost( boost );
+ document.add( field );
+ }
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/TruncateStringBridge.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/TruncateStringBridge.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/TruncateStringBridge.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,26 @@
+//$Id: $
+package org.hibernate.search.test.bridge;
+
+import java.util.Map;
+
+import org.hibernate.search.bridge.StringBridge;
+import org.hibernate.search.bridge.ParameterizedBridge;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class TruncateStringBridge implements StringBridge, ParameterizedBridge {
+ private int div;
+ public Object stringToObject(String stringValue) {
+ return stringValue;
+ }
+
+ public String objectToString(Object object) {
+ String string = (String) object;
+ return object != null ? string.substring( 0, string.length() / div ) : null;
+ }
+
+ public void setParameterValues(Map parameters) {
+ div = Integer.valueOf( (String) parameters.get( "dividedBy" ) );
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/Document.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/Document.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/Document.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,79 @@
+//$Id: $
+package org.hibernate.search.test.fieldAccess;
+
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Entity;
+import javax.persistence.Lob;
+
+import org.hibernate.search.annotations.Unstored;
+import org.hibernate.search.annotations.Text;
+import org.hibernate.search.annotations.Keyword;
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.Indexed;
+
+/**
+ * @author Richard Hallier
+ */
+@Entity
+@Indexed(index = "DocumentField")
+public class Document {
+ @Id
+ @GeneratedValue
+ @Keyword(id = true)
+ private Long id;
+
+ @Text
+ @Boost(2)
+ private String title;
+
+ @Unstored(name = "Abstract")
+ private String summary;
+
+ @Lob
+ @Unstored
+ private String text;
+
+ Document() {
+ }
+
+ public Document(String title, String summary, String text) {
+ super();
+ this.summary = summary;
+ this.text = text;
+ this.title = title;
+ }
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ public String getSummary() {
+ return summary;
+ }
+
+ public void setSummary(String summary) {
+ this.summary = summary;
+ }
+
+ public String getText() {
+ return text;
+ }
+
+ public void setText(String text) {
+ this.text = text;
+ }
+}
+
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,69 @@
+//$Id: $
+package org.hibernate.search.test.fieldAccess;
+
+import java.util.List;
+
+import org.hibernate.search.test.TestCase;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class FieldAccessTest extends TestCase {
+
+ public void testFields() throws Exception {
+ Document doc = new Document( "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" );
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( doc );
+ tx.commit();
+
+ s.clear();
+
+ FullTextSession session = Search.createFullTextSession(s);
+ tx = session.beginTransaction();
+ QueryParser p = new QueryParser("id", new StandardAnalyzer( ) );
+ List result = session.createFullTextQuery( p.parse( "Abstract:Hibernate" ) ).list();
+ assertEquals( "Query by field", 1, result.size() );
+ s.delete( result.get( 0 ) );
+ tx.commit();
+ s.close();
+
+ }
+
+ public void testFieldBoost() throws Exception {
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist(
+ new Document( "Hibernate in Action", "Object and Relational", "blah blah blah" )
+ );
+ s.persist(
+ new Document( "Object and Relational", "Hibernate in Action", "blah blah blah" )
+ );
+ tx.commit();
+
+ s.clear();
+
+ FullTextSession session = Search.createFullTextSession(s);
+ tx = session.beginTransaction();
+ QueryParser p = new QueryParser("id", new StandardAnalyzer( ) );
+ List result = session.createFullTextQuery( p.parse( "title:Action OR Abstract:Action" ) ).list();
+ assertEquals( "Query by field", 2, result.size() );
+ assertEquals( "@Boost fails", "Hibernate in Action", ( (Document) result.get( 0 ) ).getTitle() );
+ s.delete( result.get( 0 ) );
+ tx.commit();
+ s.close();
+
+ }
+
+ protected Class[] getMappings() {
+ return new Class[] {
+ Document.class
+ };
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/Animal.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/Animal.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/Animal.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,38 @@
+//$Id: $
+package org.hibernate.search.test.inheritance;
+
+import org.hibernate.search.annotations.Keyword;
+import org.hibernate.search.annotations.Text;
+import org.hibernate.search.annotations.Indexed;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Animal {
+ private Long id;
+ private String name;
+
+ @Id @GeneratedValue @Keyword(id=true)
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ @Text
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,67 @@
+//$Id: $
+package org.hibernate.search.test.inheritance;
+
+import org.hibernate.search.test.TestCase;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.Transaction;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.search.Query;
+
+import java.util.List;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class InheritanceTest extends TestCase {
+
+ public void testInheritance() throws Exception {
+ FullTextSession s = Search.createFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Animal a = new Animal();
+ a.setName("Shark Jr");
+ s.save( a );
+ Mammal m = new Mammal();
+ m.setMammalNbr(2);
+ m.setName("Elephant Jr");
+ s.save(m);
+ tx.commit();//post commit events for lucene
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser("name", new StopAnalyzer() );
+
+ Query query;
+ org.hibernate.Query hibQuery;
+
+ query = parser.parse( "Elephant" );
+ hibQuery = s.createFullTextQuery( query, Mammal.class );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query subclass by superclass attribute", 1, result.size() );
+
+ query = parser.parse( "mammalNbr:[2 TO 2]" );
+ hibQuery = s.createFullTextQuery( query, Animal.class, Mammal.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query subclass by subclass attribute", 1, result.size() );
+
+ query = parser.parse( "Jr" );
+ hibQuery = s.createFullTextQuery( query, Animal.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query filtering on superclass return mapped subclasses", 2, result.size() );
+ for (Object managedEntity : result) {
+ s.delete(managedEntity);
+ }
+ tx.commit();
+ s.close();
+ }
+
+ protected Class[] getMappings() {
+ return new Class[] {
+ Animal.class,
+ Mammal.class
+ };
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/Mammal.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/Mammal.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/Mammal.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,25 @@
+//$Id: $
+package org.hibernate.search.test.inheritance;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Keyword;
+
+import javax.persistence.Entity;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Mammal extends Animal {
+ private int mammalNbr;
+
+ @Keyword
+ public int getMammalNbr() {
+ return mammalNbr;
+ }
+
+ public void setMammalNbr(int mammalNbr) {
+ this.mammalNbr = mammalNbr;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/AlternateBook.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/AlternateBook.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/AlternateBook.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,47 @@
+//$Id: $
+package org.hibernate.search.test.query;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index="Book")
+public class AlternateBook {
+ @Id @DocumentId
+ private Integer id;
+ @Field(index = Index.TOKENIZED)
+ private String summary;
+
+
+ public AlternateBook() {
+ }
+
+ public AlternateBook(Integer id, String summary) {
+ this.id = id;
+ this.summary = summary;
+ }
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getSummary() {
+ return summary;
+ }
+
+ public void setSummary(String summary) {
+ this.summary = summary;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/Book.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/Book.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/Book.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,58 @@
+//$Id: $
+package org.hibernate.search.test.query;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Text;
+import org.hibernate.search.annotations.Keyword;
+import org.hibernate.search.annotations.Unstored;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index = "Book" )
+public class Book {
+
+ private Integer id;
+ private String body;
+ private String summary;
+
+ public Book() {
+ }
+
+ public Book(Integer id, String summary, String body) {
+ this.id = id;
+ this.summary = summary;
+ this.body = body;
+ }
+
+ @Unstored
+ public String getBody() {
+ return body;
+ }
+
+ public void setBody(String body) {
+ this.body = body;
+ }
+
+ @Id @Keyword(id=true)
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ @Text
+ public String getSummary() {
+ return summary;
+ }
+
+ public void setSummary(String summary) {
+ this.summary = summary;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/Clock.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/Clock.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/Clock.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,44 @@
+//$Id: $
+package org.hibernate.search.test.query;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Text;
+import org.hibernate.search.annotations.Keyword;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Clock {
+ private Integer id;
+ private String brand;
+
+ public Clock() {
+ }
+
+ public Clock(Integer id, String brand) {
+ this.id = id;
+ this.brand = brand;
+ }
+
+ @Text public String getBrand() {
+ return brand;
+ }
+
+ public void setBrand(String brand) {
+ this.brand = brand;
+ }
+
+ @Id @Keyword(id=true)
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,255 @@
+//$Id: $
+package org.hibernate.search.test.query;
+
+import java.util.List;
+import java.util.Iterator;
+
+import org.hibernate.search.test.TestCase;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.Transaction;
+import org.hibernate.Hibernate;
+import org.hibernate.ScrollableResults;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.analysis.StopAnalyzer;
+
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class LuceneQueryTest extends TestCase {
+
+ public void testList() throws Exception {
+ FullTextSession s = Search.createFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock(1, "Seiko");
+ s.save( clock );
+ clock = new Clock( 2, "Festina");
+ s.save( clock );
+ Book book = new Book(1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah");
+ s.save(book);
+ book = new Book(2, "La gloire de mon p�re", "Les deboires de mon p�re en v�lo");
+ s.save(book);
+ tx.commit();
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser("title", new StopAnalyzer() );
+
+ Query query = parser.parse( "summary:noword" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( 0, result.size() );
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with explicit class filter", 2, result.size() );
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with no class filter", 2, result.size() );
+ for (Object element : result) {
+ assertTrue( Hibernate.isInitialized( element ) );
+ s.delete( element );
+ }
+ for (Object element : s.createQuery( "from java.lang.Object" ).list() ) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testFirstMax() throws Exception {
+ FullTextSession s = Search.createFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock(1, "Seiko");
+ s.save( clock );
+ clock = new Clock( 2, "Festina");
+ s.save( clock );
+ Book book = new Book(1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah");
+ s.save(book);
+ book = new Book(2, "La gloire de mon p�re", "Les deboires de mon p�re en v�lo");
+ s.save(book);
+ tx.commit();
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser("title", new StopAnalyzer() );
+
+ Query query = parser.parse( "summary:Festina Or brand:Seiko" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ hibQuery.setFirstResult( 1 );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "first result no max result", 1, result.size() );
+
+ hibQuery.setFirstResult( 0 );
+ hibQuery.setMaxResults( 1 );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "max result set", 1, result.size() );
+
+ hibQuery.setFirstResult( 0 );
+ hibQuery.setMaxResults( 3 );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "max result out of limit", 2, result.size() );
+
+ hibQuery.setFirstResult( 2 );
+ hibQuery.setMaxResults( 3 );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "first result out of limit", 0, result.size() );
+
+ for (Object element : s.createQuery( "from java.lang.Object" ).list() ) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testIterator() throws Exception {
+ FullTextSession s = Search.createFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock(1, "Seiko");
+ s.save( clock );
+ clock = new Clock( 2, "Festina");
+ s.save( clock );
+ Book book = new Book(1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah");
+ s.save(book);
+ book = new Book(2, "La gloire de mon p�re", "Les deboires de mon p�re en v�lo");
+ s.save(book);
+ tx.commit();//post commit events for lucene
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser("title", new StopAnalyzer() );
+
+ Query query = parser.parse( "summary:noword" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ Iterator result = hibQuery.iterate();
+ assertNotNull( result );
+ assertFalse( result.hasNext() );
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ result = hibQuery.iterate();
+ assertNotNull( result );
+ int index = 0;
+ while ( result.hasNext() ) {
+ index++;
+ s.delete( result.next() );
+ }
+ assertEquals( 2, index );
+ for (Object element : s.createQuery( "from java.lang.Object" ).list() ) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testScrollableResultSet() throws Exception {
+ FullTextSession s = Search.createFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock(1, "Seiko");
+ s.save( clock );
+ clock = new Clock( 2, "Festina");
+ s.save( clock );
+ Book book = new Book(1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah");
+ s.save(book);
+ book = new Book(2, "La gloire de mon p�re", "Les deboires de mon p�re en v�lo");
+ s.save(book);
+ tx.commit();//post commit events for lucene
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser("title", new StopAnalyzer() );
+
+ Query query = parser.parse( "summary:noword" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ ScrollableResults result = hibQuery.scroll();
+ assertNotNull( result );
+ assertEquals(-1, result.getRowNumber() );
+ assertEquals(false, result.next() );
+ result.close();
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ result = hibQuery.scroll();
+ assertEquals(0, result.getRowNumber() );
+ result.beforeFirst();
+ assertEquals( true, result.next() );
+ assertTrue( result.isFirst() );
+ assertTrue( result.scroll( 1 ) );
+ assertTrue( result.isLast() );
+ assertFalse( result.scroll( 1 ) );
+ result.beforeFirst();
+ while ( result.next() ) {
+ s.delete( result.get()[0] );
+ }
+ for (Object element : s.createQuery( "from java.lang.Object" ).list() ) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testMultipleEntityPerIndex() throws Exception {
+ FullTextSession s = Search.createFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock(1, "Seiko");
+ s.save( clock );
+ Book book = new Book(1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah");
+ s.save(book);
+ AlternateBook alternateBook = new AlternateBook(1, "La chute de la petite reine a travers les yeux de Festina");
+ s.save(alternateBook);
+ tx.commit();
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser("title", new StopAnalyzer() );
+
+ Query query = parser.parse( "summary:Festina" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with explicit class filter", 1, result.size() );
+
+ query = parser.parse( "summary:Festina" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ Iterator it = hibQuery.iterate();
+ assertTrue( it.hasNext() );
+ assertNotNull( it.next() );
+ assertFalse( it.hasNext() );
+
+ query = parser.parse( "summary:Festina" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ ScrollableResults sr = hibQuery.scroll();
+ assertTrue( sr.first() );
+ assertNotNull( sr.get() );
+ assertFalse( sr.next() );
+ sr.close();
+
+ query = parser.parse( "summary:Festina OR brand:seiko" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ hibQuery.setMaxResults( 2 );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with explicit class filter and limit", 2, result.size() );
+
+ query = parser.parse( "summary:Festina" );
+ hibQuery = s.createFullTextQuery( query );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with no class filter", 2, result.size() );
+ for (Object element : result) {
+ assertTrue( Hibernate.isInitialized( element ) );
+ s.delete( element );
+ }
+ for (Object element : s.createQuery( "from java.lang.Object" ).list() ) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+
+ protected Class[] getMappings() {
+ return new Class[] {
+ Book.class,
+ AlternateBook.class,
+ Clock.class
+ };
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/session/Email.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/session/Email.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/session/Email.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,64 @@
+//$Id: $
+package org.hibernate.search.test.session;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Email {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Long id;
+
+ @Field(index = Index.TOKENIZED)
+ private String title;
+
+ @Field(index = Index.TOKENIZED)
+ private String body;
+
+ private String header;
+
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ public String getBody() {
+ return body;
+ }
+
+ public void setBody(String body) {
+ this.body = body;
+ }
+
+ public String getHeader() {
+ return header;
+ }
+
+ public void setHeader(String header) {
+ this.header = header;
+ }
+}
Added: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java 2006-11-23 22:41:27 UTC (rev 10866)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java 2006-11-24 00:37:46 UTC (rev 10867)
@@ -0,0 +1,68 @@
+//$Id: $
+package org.hibernate.search.test.session;
+
+import java.util.List;
+
+import org.hibernate.search.test.TestCase;
+import org.hibernate.search.impl.FullTextSessionImpl;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.Transaction;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.analysis.StopAnalyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class MassIndexTest extends TestCase {
+
+ public void testTransactional() throws Exception {
+ FullTextSession s = Search.createFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ int loop = 4;
+ for (int i = 0 ; i < loop; i++) {
+ Email email = new Email();
+ email.setTitle( "JBoss World Berlin" );
+ email.setBody( "Meet the guys who wrote the software");
+ s.persist( email );
+ }
+ tx.commit();
+ s.close();
+
+ s = new FullTextSessionImpl( openSession() );
+ s.getTransaction().begin();
+ s.connection().createStatement().executeUpdate( "update Email set body='Meet the guys who write the software'");
+ s.getTransaction().commit();
+ s.close();
+
+ s = new FullTextSessionImpl( openSession() );
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser("id", new StopAnalyzer() );
+ List result = s.createFullTextQuery( parser.parse( "body:write" ) ).list();
+ assertEquals( 0, result.size() );
+ result = s.createCriteria( Email.class ).list();
+ for (int i = 0 ; i < loop/2 ; i++)
+ s.index( result.get( i ) );
+ tx.commit(); //do the process
+ s.index( result.get(loop/2) ); //do the process out of tx
+ tx = s.beginTransaction();
+ for (int i = loop/2+1 ; i < loop; i++)
+ s.index( result.get( i ) );
+ tx.commit(); //do the process
+ s.close();
+
+ s = new FullTextSessionImpl( openSession() );
+ tx = s.beginTransaction();
+ result = s.createFullTextQuery( parser.parse( "body:write" ) ).list();
+ assertEquals( loop, result.size() );
+ for (Object o : result) s.delete( o );
+ tx.commit();
+ s.close();
+ }
+
+ protected Class[] getMappings() {
+ return new Class[] {
+ Email.class
+ };
+ }
+}
18 years, 1 month
Hibernate SVN: r10866 - branches/Lucene_Integration/HibernateExt/metadata/doc/reference/en/modules
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2006-11-23 17:41:27 -0500 (Thu, 23 Nov 2006)
New Revision: 10866
Modified:
branches/Lucene_Integration/HibernateExt/metadata/doc/reference/en/modules/lucene.xml
Log:
Hibernate Search documentation
Modified: branches/Lucene_Integration/HibernateExt/metadata/doc/reference/en/modules/lucene.xml
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/doc/reference/en/modules/lucene.xml 2006-11-23 22:30:01 UTC (rev 10865)
+++ branches/Lucene_Integration/HibernateExt/metadata/doc/reference/en/modules/lucene.xml 2006-11-23 22:41:27 UTC (rev 10866)
@@ -1,91 +1,63 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
-<chapter id="lucene" revision="1">
- <title>Hibernate Lucene Integration</title>
+<chapter id="lucene" revision="2">
+ <title>Hibernate Search: Apache <trademark>Lucene</trademark>
+ Integration</title>
- <para>Lucene is a high-performance Java search engine library available from
- the Apache Software Foundation. Hibernate Annotations includes a package of
- annotations that allows you to mark any domain model object as indexable and
- have Hibernate maintain a Lucene index of any instances persisted via
- Hibernate.</para>
+ <para><ulink url="http://lucene.apache.org">Apache Lucene</ulink> is a
+ high-performance Java search engine library available at the Apache Software
+ Foundation. Hibernate Annotations includes a package of annotations that
+ allows you to mark any domain model object as indexable and have Hibernate
+ maintain a Lucene index of any instances persisted via Hibernate. Apache
+ Lucene is also integrated with the Hibernate query facility.</para>
- <para>Hibernate Lucene is a work in progress and new features are cooking in
+ <para>Hibernate Search is a work in progress and new features are cooking in
this area. So expect some compatibility changes in subsequent
versions.</para>
- <section id="lucene-mapping">
- <title>Mapping the entities to the index</title>
+ <section id="lucene-architecture">
+ <title>Architecture</title>
- <para>First, we must declare a persistent class as indexable. This is done
- by annotating the class with <literal>@Indexed</literal>:</para>
+ <para>Hibernate Search is made of an indexing engine and an index search
+ engine. Both are backed by Apache Lucene.</para>
- <programlisting>@Entity
-@Indexed(index="indexes/essays")
-public class Essay {
- ...
-}</programlisting>
+ <para>When an entity is inserted, updated or removed to/from the database,
+ <productname>Hibernate Search</productname> will keep track of this event
+ (through the Hibernate event system) and schedule an index update. When
+ out of transaction, the update is executed right after the actual database
+ operation. It is however recommended, for both your database and Hibernate
+ Search, to execute your operation in a transaction (whether JDBC or JTA).
+ When in a transaction, the index update is schedule for the transaction
+ commit (and discarded in case of transaction rollback). You can think of
+ this as the regular (infamous) autocommit vs transactional behavior. From
+ a performance perspective, the <emphasis>in transaction</emphasis> mode is
+ recommended. All the index updates are handled for you without you having
+ to use the Apache Lucene APIs.</para>
- <para>The <literal>index</literal> attribute tells Hibernate what the
- lucene directory name is (usually a directory on your file system). If you
- wish to define a base directory for all lucene indexes, you can use the
- <literal>hibernate.lucene.default.indexDir</literal> property in your
- configuration file.</para>
+ <para>To interact with Apache Lucene indexes, Hibernate Search has the
+ notion of <classname>DirectoryProvider</classname>. A directory provider
+ will manage a given Lucene <classname>Directory</classname> type. You can
+ configure directory providers to adjust the directory target.</para>
- <para>Lucene indexes contain four kinds of fields:
- <emphasis>keyword</emphasis> fields, <emphasis>text</emphasis> fields,
- <emphasis>unstored</emphasis> fields and <emphasis>unindexed</emphasis>
- fields. Hibernate Annotations provides annotations to mark a property of
- an entity as one of the first three kinds of indexed fields.</para>
-
- <programlisting>@Entity
-@Indexed(index="indexes/essays")
-public class Essay {
- ...
-
- @Id
- @Keyword(id=true)
- public Long getId() { return id; }
-
- @Text(name="Abstract")
- public String getSummary() { return summary; }
-
- @Lob
- @Unstored
- public String getText() { return text; }
-
-}</programlisting>
-
- <para>These annotations define an index with three fields:
- <literal>id</literal>, <literal>Abstract</literal> and
- <literal>text</literal>. Note that by default the field name is
- decapitalized, following the JavaBean specification.</para>
-
- <para>Note: you <emphasis>must</emphasis> specify
- <literal>@Keyword(id=true)</literal> on the identifier property of your
- entity class.</para>
-
- <para>Lucene has the notion of <emphasis>boost factor</emphasis>. It's a
- way to give more weigth to a field or to an indexed element over an other
- during the indexation process. You can use <literal>@Boost</literal> at
- the field or the class level.</para>
-
- <para>The analyzer class used to index the elements is configurable
- through the <literal>hibernate.lucene.analyzer</literal> property. If none
- defined,
- <classname>org.apache.lucene.analysis.standard.StandardAnalyzer</classname>
- is used as the default.</para>
+ <para><productname>Hibernate Search</productname> can also use a Lucene
+ index to search an entity and return a (list of) managed entity saving you
+ from the tedious Object / Lucene Document mapping and low level Lucene
+ APIs. The application code use the unified
+ <classname>org.hibernate.Query</classname> API exactly the way a HQL or
+ native query would be done.</para>
</section>
<section id="lucene-configuration">
<title>Configuration</title>
<section id="lucene-configuration-directory">
- <title>directory configuration</title>
+ <title>Directory configuration</title>
- <para>Lucene has a notion of Directory where the index is stored. The
- Directory implementation can be customized but Lucene comes bundled with
- a file system and a full memory implementation. Hibernate Lucene has the
- notion of <literal>DirectoryProvider</literal> that handle the
- configuration and the initialization of the Lucene Directory.</para>
+ <para>Apache Lucene has a notion of Directory where the index is stored.
+ The Directory implementation can be customized but Lucene comes bundled
+ with a file system and a full memory implementation.
+ <productname>Hibernate Search</productname> has the notion of
+ <literal>DirectoryProvider</literal> that handle the configuration and
+ the initialization of the Lucene Directory.</para>
<table>
<title>List of built-in Directory Providers</title>
@@ -103,19 +75,19 @@
<tbody>
<row>
- <entry>org.hibernate.lucene.store.FSDirectoryProvider</entry>
+ <entry>org.hibernate.search.store.FSDirectoryProvider</entry>
<entry>File system based directory. The directory used will be
- <indexBase>/<<literal>@Index.name</literal>></entry>
+ <indexBase>/<<literal>@Indexed.name</literal>></entry>
<entry><literal>indexBase</literal>: Base directory</entry>
</row>
<row>
- <entry>org.hibernate.lucene.store.RAMDirectoryProvider</entry>
+ <entry>org.hibernate.search.store.RAMDirectoryProvider</entry>
<entry>Memory based directory, the directory will be uniquely
- indentified by the <literal>@Index.name</literal>
+ indentified by the <literal>@Indexed.name</literal>
element</entry>
<entry>none</entry>
@@ -132,17 +104,17 @@
<para>Each indexed entity is associated to a Lucene index (an index can
be shared by several entities but this is not usually the case). You can
configure the index through properties prefixed by
- <literal><literal>hibernate.lucene.<indexname></literal></literal>.
+ <constant>hibernate.search.</constant><replaceable>indexname</replaceable>.
Default properties inherited to all indexes can be defined using the
- prefix hibernate.lucene.default.</para>
+ prefix <constant>hibernate.search.default.</constant></para>
<para>To define the directory provider of a given index, you use the
- <literal>hibernate.lucene.<indexname>.directory_provider</literal></para>
+ <constant>hibernate.search.<replaceable>indexname</replaceable>.directory_provider</constant></para>
- <programlisting>hibernate.lucene.default.directory_provider org.hibernate.lucene.store.FSDirectoryProvider
-hibernate.lucene.default.indexDir=/usr/lucene/indexes
+ <programlisting>hibernate.search.default.directory_provider org.hibernate.search.store.FSDirectoryProvider
+hibernate.search.default.indexDir=/usr/lucene/indexes
-hibernate.lucene.Rules.directory_provider org.hibernate.lucene.store.RAMDirectoryProvider
+hibernate.search.Rules.directory_provider org.hibernate.search.store.RAMDirectoryProvider
</programlisting>
<para>applied on</para>
@@ -162,32 +134,537 @@
and base directory, and overide those default later on on a per index
basis.</para>
- <para>Writing your own DirectoryProvider, you can benefit this
- configuration mechanism too.</para>
+ <para>Writing your own <classname>DirectoryProvider</classname>, you can
+ benefit this configuration mechanism too.</para>
</section>
- <section id="lucene-configuration-event">
+ <section id="lucene-configuration-event" revision="1">
<title>Enabling automatic indexing</title>
- <para>Finally, we enable the <literal>LuceneEventListener</literal> for
- the three Hibernate events that occur after changes are committed to the
+ <para>Finally, we enable the <literal>SearchEventListener</literal> for
+ the three Hibernate events that occur after changes are executed to the
database.</para>
<programlisting><hibernate-configuration>
...
- <event type="post-commit-update"
- <listener
- class="org.hibernate.lucene.event.LuceneEventListener"/>
+ <event type="post-update"
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
</event>
- <event type="post-commit-insert"
- <listener
- class="org.hibernate.lucene.event.LuceneEventListener"/>
+ <event type="post-insert"
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
</event>
- <event type="post-commit-delete"
- <listener
- class="org.hibernate.lucene.event.LuceneEventListener"/>
+ <event type="post-delete"
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
</event>
</hibernate-configuration></programlisting>
</section>
</section>
+
+ <section id="lucene-mapping" revision="1">
+ <title>Mapping entities to the index structure</title>
+
+ <para>All the metadata information related to indexed entities is
+ described through some Java annotations. There is no need for xml mapping
+ files nor a list of indexed entities. The list is discovered at startup
+ time scanning the Hibernate mapped entities.</para>
+
+ <para>First, we must declare a persistent class as indexable. This is done
+ by annotating the class with <literal>@Indexed</literal> (all entities not
+ annotated with <literal>@Indexed</literal> will be ignored by the indexing
+ process):</para>
+
+ <programlisting>@Entity
+<emphasis role="bold">@Indexed(index="indexes/essays")</emphasis>
+public class Essay {
+ ...
+}</programlisting>
+
+ <para>The <literal>index</literal> attribute tells Hibernate what the
+ Lucene directory name is (usually a directory on your file system). If you
+ wish to define a base directory for all Lucene indexes, you can use the
+ <literal>hibernate.search.default.indexDir</literal> property in your
+ configuration file. Each entity instance will be represented by a Lucene
+ <classname>Document</classname> inside the given index (aka
+ Directory).</para>
+
+ <para>For each property (or attribute) of your entity, you have the
+ ability to describe how it will be indexed. The default (ie no annotation)
+ means that the property is completly ignored by the indexing process.
+ <literal>@Field</literal> does declare a property as indexed. When
+ indexing an element to a Lucene document you can specify how it is
+ indexed:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para><literal>name</literal>: describe under which name, the property
+ should be stored in the Lucene Document. The default value is the
+ property name (following the JavaBeans convention)</para>
+ </listitem>
+
+ <listitem>
+ <para><literal>store</literal>: describe whether or not the property
+ is stored in the Lucene index. You can store the value
+ <literal>Store.YES</literal> (comsuming more space in the index),
+ store it in a compressed way <literal>Store.COMPRESS</literal> (this
+ does consume more CPU), or avoid any storage
+ <literal>Store.NO</literal> (this is the default value). When a
+ property is stored, you can retrieve it from the Lucene Document (note
+ that this is not related to whether the element is indexed or
+ not).</para>
+ </listitem>
+
+ <listitem>
+ <para>index: describe how the element is indexed (ie the process used
+ to index the property and the type of information store). The
+ different values are <literal>Index.NO</literal> (no indexing, ie
+ cannot be found by a query), <literal>Index.TOKENIZED</literal> (use
+ an analyzer to process the property),
+ <literal>Index.UN_TOKENISED</literal> (no analyzer pre processing),
+ <literal>Index.NO_NORM</literal> (do not store the normalization
+ data).</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>These attributes are part of the <literal>@Field</literal>
+ annotation.</para>
+
+ <para>Whether or not you want to store the data depends on how you wish to
+ use the index query result. As of today, for a pure <productname>Hibernate
+ Search</productname> usage, storing is not necessary. Whether or not you
+ want to tokenize a property or not depends on whether you wish to search
+ the element as is, or only normalized part of it. It make sense to
+ tokenize a text field, but it does not to do it for a date field (or an id
+ field).</para>
+
+ <para>Finally, the id property of an entity is a special property used by
+ <productname>Hibernate Search</productname> to ensure index unicity of a
+ given entity. By design, an id has to be stored and must not be tokenized.
+ To mark a property as index id, use the <literal>@DocumentId</literal>
+ annotation.</para>
+
+ <programlisting>@Entity
+@Indexed(index="indexes/essays")
+public class Essay {
+ ...
+
+ @Id
+ <emphasis role="bold">@DocumentId</emphasis>
+ public Long getId() { return id; }
+
+ <emphasis role="bold">@Field(name="Abstract", index=Index.TOKENIZED, store=Store.YES)</emphasis>
+ public String getSummary() { return summary; }
+
+ @Lob
+ <emphasis role="bold">@Field(index=Index.TOKENIZED)</emphasis>
+ public String getText() { return text; }
+
+}</programlisting>
+
+ <para>These annotations define an index with three fields:
+ <literal>id</literal>, <literal>Abstract</literal> and
+ <literal>text</literal>. Note that by default the field name is
+ decapitalized, following the JavaBean specification.</para>
+
+ <para>Note: you <emphasis>must</emphasis> specify
+ <literal>@DocumentId</literal> on the identifier property of your entity
+ class.</para>
+
+ <para>Lucene has the notion of <emphasis>boost factor</emphasis>. It's a
+ way to give more weigth to a field or to an indexed element over an other
+ during the indexation process. You can use <literal>@Boost</literal> at
+ the field or the class level.</para>
+
+ <programlisting>@Entity
+@Indexed(index="indexes/essays")
+<emphasis role="bold">@Boost(2)</emphasis>
+public class Essay {
+ ...
+
+ @Id
+ @DocumentId
+ public Long getId() { return id; }
+
+ @Field(name="Abstract", index=Index.TOKENIZED, store=Store.YES)
+ <emphasis role="bold">@Boost(2.5f)</emphasis>
+ public String getSummary() { return summary; }
+
+ @Lob
+ @Field(index=Index.TOKENIZED)
+ public String getText() { return text; }
+
+}</programlisting>
+
+ <para>In our example, Essay's probability to reach the top of the search
+ list will be multiplied by 2 and the summary field will be 2.5 more
+ important than the test field. Note that this explaination is actually
+ wrong, but it is simple and close enought to the reality. Please check the
+ Lucene documentation or the excellent <citetitle>Lucene In
+ Action</citetitle> from Otis Gospodnetic and Erik Hatcher.</para>
+
+ <para>The analyzer class used to index the elements is configurable
+ through the <literal>hibernate.search.analyzer</literal> property. If none
+ defined,
+ <classname>org.apache.lucene.analysis.standard.StandardAnalyzer</classname>
+ is used as the default.</para>
+ </section>
+
+ <section id="lucene-bridge">
+ <title>Property/Field Bridge</title>
+
+ <para>All field of a full text index in Lucene have to be represented as
+ Strings. Ones Java properties have to be indexed in a String form. For
+ most of your properties, <productname>Hibernate Search</productname> does
+ the translation job for you thanks to a built-in set of bridges. In some
+ cases, though you need a fine grain control over the translation
+ process.</para>
+
+ <section>
+ <title>Built-in bridges</title>
+
+ <para><literal>Hibernate Search</literal> comes bundled with a set of
+ built-in bridges between a Java property type and its full text
+ representation.</para>
+
+ <para><literal>Null</literal> elements are not indexed (Lucene does not
+ support null elements and it does not make much sense either)</para>
+
+ <variablelist>
+ <varlistentry>
+ <term>null</term>
+
+ <listitem>
+ <para>null elements are not indexed. Lucene does not support null
+ elements and this does not make much sense either.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>java.lang.String</term>
+
+ <listitem>
+ <para>String are indexed as is</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>short, Short, integer, Integer, long, Long, float, Float,
+ double, Double, BigInteger, BigDecimal</term>
+
+ <listitem>
+ <para>Numbers are converted in their String representation. Note
+ that numbers cannot be compared by Lucene (ie used in ranged
+ queries) out of the box: they have to be padded <footnote>
+ <para>Using a Range query is debattable and has drawbacks, an
+ alternative approach is to use a Filter query which will
+ filter the result query to the appropriate range.</para>
+
+ <para><productname>Hibernate Search</productname> will support
+ a padding mechanism</para>
+ </footnote></para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>java.util.Date</term>
+
+ <listitem>
+ <para>Dates are stored as yyyyMMddHHmmssSSS in GMT time
+ (200611072203012 for Nov 7th of 2006 4:03PM and 12ms EST). You
+ shouldn't really bother with the internal format. What is
+ important is that when using a DateRange Query, you should know
+ that the dates have to be expressed in GMT time.</para>
+
+ <para>Usually, storing the date up to the milisecond is not
+ necessary. <literal>@DateBridge</literal> defines the appropriate
+ resolution you are willing to store in the index
+ (<literal><literal>@DateBridge(resolution=Resolution.DAY)</literal></literal>).
+ The date pattern will then be truncated accordingly.</para>
+
+ <programlisting>@Entity @Indexed
+public class Meeting {
+ @Field(index=Index.UN_TOKENIZED)
+ <emphasis role="bold">@DateBridge(resolution=Resolution.MINUTE)</emphasis>
+ private Date date;
+ ...
+}</programlisting>
+
+ <warning>
+ <para>A Date whose resolution is lower than
+ <literal>MILLISECOND</literal> cannot be a
+ <literal>@DocumentId</literal></para>
+ </warning>
+ </listitem>
+ </varlistentry>
+ </variablelist>
+
+ <para></para>
+ </section>
+
+ <section>
+ <title>Custom Bridge</title>
+
+ <para>It can happen that the built-in bridges of Hibernate Search does
+ not cover some of your property types, or that the String representation
+ used is not what you expect.</para>
+
+ <section>
+ <title>StringBridge</title>
+
+ <para>The simpliest custom solution is to give <productname>Hibernate
+ Search</productname> an implementation of your expected
+ <emphasis>object to String</emphasis> bridge. To do so you need to
+ implements the
+ <literal>org.hibernate.search.bridge.StringBridge</literal>
+ interface</para>
+
+ <programlisting>/**
+ * Padding Integer bridge.
+ * All numbers will be padded with 0 to match 5 digits
+ *
+ * @author Emmanuel Bernard
+ */
+public class PaddedIntegerBridge implements <emphasis role="bold">StringBridge</emphasis> {
+
+ private int PADDING = 5;
+
+ <emphasis role="bold">public String objectToString(Object object)</emphasis> {
+ String rawInteger = ( (Integer) object ).toString();
+ if (rawInteger.length() > PADDING) throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < PADDING ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+}</programlisting>
+
+ <para>Then any property or field can use this bridge thanks to the
+ <literal>@FieldBridge</literal> annotation</para>
+
+ <programlisting><emphasis role="bold">@FieldBridge(impl = PaddedIntegerBridge.class)</emphasis>
+private Integer length;</programlisting>
+
+ <para>Parameters can be passed to the Bridge implementation making it
+ more flexible. The Bridge implementation implements a
+ <classname>ParameterizedBridge</classname> interface, and the
+ parameters are passed through the <literal>@FieldBridge</literal>
+ annotation.</para>
+
+ <programlisting>public class PaddedIntegerBridge implements StringBridge, <emphasis
+ role="bold">ParameterizedBridge</emphasis> {
+
+ public static String PADDING_PROPERTY = "padding";
+ private int padding = 5; //default
+
+ <emphasis role="bold">public void setParameterValues(Map parameters)</emphasis> {
+ Object padding = parameters.get( PADDING_PROPERTY );
+ if (padding != null) this.padding = (Integer) padding;
+ }
+
+ public String objectToString(Object object) {
+ String rawInteger = ( (Integer) object ).toString();
+ if (rawInteger.length() > padding) throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+}
+
+
+//property
+@FieldBridge(impl = PaddedIntegerBridge.class,
+ <emphasis role="bold">params = @Parameter(name="padding", value="10")</emphasis> )
+private Integer length;</programlisting>
+
+ <para>The <classname>ParameterizedBridge</classname> interface can be
+ implemented by <classname>StringBridge</classname>,
+ <classname>TwoWayStringBridge</classname>,
+ <classname>FieldBridge</classname> implementations (see
+ bellow).</para>
+
+ <para>If you expect to use your bridge implementation on for an id
+ property (ie annotated with <literal>@DocumentId</literal>), you need
+ to use a slightly extended version of <literal>StringBridge</literal>
+ named <classname>TwoWayStringBridge</classname>. <literal>Hibernate
+ Search</literal> needs to read the string representation of the
+ identifier and generate the object out of it. There is not difference
+ in the way the <literal>@FieldBridge</literal> annotation is
+ used.</para>
+
+ <programlisting>public class PaddedIntegerBridge implements TwoWayStringBridge, ParameterizedBridge {
+
+ public static String PADDING_PROPERTY = "padding";
+ private int padding = 5; //default
+
+ public void setParameterValues(Map parameters) {
+ Object padding = parameters.get( PADDING_PROPERTY );
+ if (padding != null) this.padding = (Integer) padding;
+ }
+
+ public String objectToString(Object object) {
+ String rawInteger = ( (Integer) object ).toString();
+ if (rawInteger.length() > padding) throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+
+ <emphasis role="bold">public Object stringToObject(String stringValue)</emphasis> {
+ return new Integer(stringValue);
+ }
+}
+
+
+//id property
+@DocumentId
+@FieldBridge(impl = PaddedIntegerBridge.class,
+ params = @Parameter(name="padding", value="10") )
+private Integer id;</programlisting>
+
+ <para>It is critically important for the two-way process to be
+ idempotent (ie object = stringToObject( objectToString( object ) )
+ ).</para>
+ </section>
+
+ <section>
+ <title>FieldBridge</title>
+
+ <para>Some usecase requires more than a simple object to string
+ translation when mapping a property to a Lucene index. To give you
+ most of the flexibility you can also implement a bridge as a
+ <classname>FieldBridge</classname>. This interface give you a property
+ value and let you map it the way you want in your Lucene
+ <classname>Document</classname>.This interface is very similar in its
+ concept to the <productname>Hibernate</productname>
+ <classname>UserType</classname>.</para>
+
+ <para>You can for example store a given property in two different
+ document fields</para>
+
+ <programlisting>/**
+ * Store the date in 3 different field year, month, day
+ * to ease Range Query per year, month or day
+ * (eg get all the elements of december for the last 5 years)
+ *
+ * @author Emmanuel Bernard
+ */
+public class DateSplitBridge implements FieldBridge {
+ private final static TimeZone GMT = TimeZone.getTimeZone("GMT");
+
+ <emphasis role="bold">public void set(String name, Object value, Document document, Field.Store store, Field.Index index, Float boost) {</emphasis>
+ Date date = (Date) value;
+ Calendar cal = GregorianCalendar.getInstance( GMT );
+ cal.setTime( date );
+ int year = cal.get( Calendar.YEAR );
+ int month = cal.get( Calendar.MONTH ) + 1;
+ int day = cal.get( Calendar.DAY_OF_MONTH );
+ //set year
+ Field field = new Field( name + ".year", String.valueOf(year), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ //set month and pad it if needed
+ field = new Field( name + ".month", month < 10 ? "0" : "" + String.valueOf(month), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ //set day and pad it if needed
+ field = new Field( name + ".day", day < 10 ? "0" : "" + String.valueOf(day), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ }
+}
+
+
+//property
+<emphasis role="bold">@FieldBridge(impl = DateSplitBridge.class)</emphasis>
+private Integer length;</programlisting>
+
+ <para></para>
+ </section>
+ </section>
+ </section>
+
+ <section id="lucene-query">
+ <title>Querying</title>
+
+ <para>The second most important capability of <productname>Hibernate
+ Search</productname> is the ability to execute a Lucene query and retrieve
+ entities managed by an Hibernate session, providing the power of Lucene
+ without living the Hibernate paradygm, and giving another dimension to the
+ Hibernate classic search mechanisms (HQL, Criteria query, native SQL
+ query).</para>
+
+ <para>To access the <productname>Hibernate Search</productname> querying
+ facilities, you have to use an Hibernate
+ <classname>FullTextSession</classname>. A SearchSession wrap an regular
+ <classname>org.hibernate.Session</classname> to provide query and indexing
+ capabilities.</para>
+
+ <programlisting>Session session = sessionFactory.openSession();
+...
+FullTextSession fullTextSession = Search.createFullTextSession(session);</programlisting>
+
+ <para>The search facility is built on native Lucene queries.</para>
+
+ <programlisting>org.apache.lucene.QueryParser parser = new QueryParser("title", new StopAnalyzer() );
+
+org.hibernate.lucene.search.Query luceneQuery = parser.parse( "summary:Festina Or brand:Seiko" );
+<emphasis role="bold">org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery );</emphasis>
+
+List result = fullTextQuery.list(); //return a list of managed objects</programlisting>
+
+ <para>The Hibernate query built on top of the Lucene query is a regular
+ <literal>org.hibernate.Query</literal>, you are is the same paradygm as
+ the other Hibernate query facilities (HQL, Native or Criteria). The
+ regular <literal>list()</literal>, <literal>uniqueResult()</literal>,
+ <literal>iterate()</literal> and <literal>scroll()</literal> can be
+ used.</para>
+
+ <para>If you expect a reasonnable result number and expect to work on all
+ of them, <methodname>list()</methodname> or
+ <methodname>uniqueResult()</methodname> are recommanded.
+ <methodname>list()</methodname> work best if the entity
+ <literal>batch-size</literal> is set up properly. Note that Hibernate
+ Seach has to process all Lucene Hits elements when using
+ <methodname>list()</methodname>, <methodname>uniqueResult()</methodname>
+ and <methodname>iterate()</methodname>. If you wish to minimize Lucene
+ document loading, <methodname>scroll()</methodname> is more appropriate,
+ Don't forget to close the <classname>ScrollableResults</classname> object
+ when you're done, since it keeps Lucene resources.</para>
+
+ <para>An efficient way to work with queries is to use pagination. The
+ pagination API is exactly the one available in
+ <classname>org.hibernate.Query</classname>:</para>
+
+ <programlisting><emphasis role="bold">org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery );</emphasis>
+fullTextQuery.setFirstResult(30);
+fullTextQuery.setMaxResult(20);
+fullTextQuery.list(); //will return a list of 20 elements starting from the 30th</programlisting>
+
+ <para>Only the relevant Lucene Documents are accessed.</para>
+ </section>
+
+ <section id="lucene-index">
+ <title>Indexing</title>
+
+ <para>It is sometimes useful to index an object event if this object is
+ not inserted nor updated to the database. This is especially true when you
+ want to build your index the first time. You can achieve that goal using
+ the <classname>FullTextSession</classname>.</para>
+
+ <programlisting>FullTextSession fullTextSession = Search.createFullTextSession(session);
+Transaction tx = fullTextSession.beginTransaction();
+for (Customer customer : customers) {
+ <emphasis role="bold">fullTextSession.index(customer);</emphasis>
+}
+tx.commit(); //index are written at commit time</programlisting>
+
+ <para>For maximum efficiency, Hibernate Search batch index operations
+ which and execute them at commit time (Note: you don't need to use
+ <classname>org.hibernate.Transaction</classname> in a JTA
+ environment).</para>
+ </section>
</chapter>
\ No newline at end of file
18 years, 1 month
Hibernate SVN: r10865 - in branches/Lucene_Integration/HibernateExt/metadata/src: java/org/hibernate/search java/org/hibernate/search/bridge java/org/hibernate/search/engine java/org/hibernate/search/event java/org/hibernate/search/impl java/org/hibernate/search/query java/org/hibernate/search/util test/org/hibernate/search/test test/org/hibernate/search/test/bridge test/org/hibernate/search/test/fieldAccess test/org/hibernate/search/test/inheritance test/org/hibernate/search/test/query
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2006-11-23 17:30:01 -0500 (Thu, 23 Nov 2006)
New Revision: 10865
Added:
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/Search.java
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/impl/
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java
Removed:
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java
Modified:
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java
Log:
Finish getting rid of Lucene when it is unappropriate.
Polish renaming
Deleted: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -1,465 +0,0 @@
-//$Id: $
-package org.hibernate.search;
-
-import java.io.Serializable;
-import java.sql.Connection;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.locks.ReentrantLock;
-
-import javax.transaction.Status;
-
-import org.hibernate.CacheMode;
-import org.hibernate.Criteria;
-import org.hibernate.EntityMode;
-import org.hibernate.Filter;
-import org.hibernate.FlushMode;
-import org.hibernate.HibernateException;
-import org.hibernate.LockMode;
-import org.hibernate.Query;
-import org.hibernate.ReplicationMode;
-import org.hibernate.SQLQuery;
-import org.hibernate.Session;
-import org.hibernate.SessionFactory;
-import org.hibernate.Transaction;
-import org.hibernate.engine.query.ParameterMetadata;
-import org.hibernate.impl.SessionImpl;
-import org.hibernate.search.query.FullTextQueryImpl;
-import org.hibernate.search.event.FullTextEventListener;
-import org.hibernate.search.util.ContextHelper;
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.backend.UpdateWork;
-import org.hibernate.search.backend.Work;
-import org.hibernate.search.backend.WorkQueue;
-import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
-import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
-import org.hibernate.search.store.DirectoryProvider;
-import org.hibernate.stat.SessionStatistics;
-import org.hibernate.type.Type;
-import org.apache.lucene.document.Document;
-
-/**
- * Lucene aware session that allows lucene query creations
- *
- * @author Emmanuel Bernard
- */
-public class FullTextSession implements Session {
- private final SessionImpl session;
- private PostTransactionWorkQueueSynchronization postTransactionWorkQueueSynch;
-
- public FullTextSession(Session session) {
- this.session = (SessionImpl) session;
- }
-
- /**
- * Execute a Lucene query and retrieve managed objects of type entities (or their indexed subclasses)
- * If entities is empty, include all indexed entities
- *
- * @param entities must be immutable for the lifetime of the query object
- */
- public Query createLuceneQuery(org.apache.lucene.search.Query luceneQuery, Class... entities) {
- return new FullTextQueryImpl( luceneQuery, entities, session, new ParameterMetadata(null, null) );
- }
-
- /**
- * (re)index an entity.
- * Non indexable entities are ignored
- * The entity must be associated with the session
- *
- * @param entity must not be null
- */
- public void index(Object entity) {
- if (entity == null) return;
- Class clazz = entity.getClass();
- FullTextEventListener listener = ContextHelper.getLuceneEventListener( session );
- DocumentBuilder<Object> builder = listener.getDocumentBuilders().get( clazz );
- if ( builder != null ) {
- Serializable id = session.getIdentifier( entity );
- Document doc = builder.getDocument( entity, id );
- UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
- processWork( work, listener.getDocumentBuilders(), listener.getLockableDirectoryProviders() );
- }
- //TODO
- //need to add elements in a queue kept at the Session level
- //the queue will be processed by a Lucene(Auto)FlushEventListener
- //note that we could keep this queue somewhere in the event listener in the mean time but that requires
- // a synchronized hashmap holding this queue on a per session basis plus some session house keeping (yuk)
- //an other solution would be to subclass SessionImpl instead of having this LuceneSession delecation model
- // this is an open discussion
- }
-
- private void processWork(Work work, Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
- if ( session.isTransactionInProgress() ) {
- if ( postTransactionWorkQueueSynch == null || postTransactionWorkQueueSynch.isConsumed() ) {
- postTransactionWorkQueueSynch = createWorkQueueSync( documentBuilders, lockableDirectoryProviders);
- session.getTransaction().registerSynchronization( postTransactionWorkQueueSynch );
- }
- postTransactionWorkQueueSynch.add( work );
- }
- else {
- //no transaction work right away
- PostTransactionWorkQueueSynchronization sync =
- createWorkQueueSync( documentBuilders, lockableDirectoryProviders );
- sync.add( work );
- sync.afterCompletion( Status.STATUS_COMMITTED );
- }
- }
-
- private PostTransactionWorkQueueSynchronization createWorkQueueSync(
- Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
- WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
- return new PostTransactionWorkQueueSynchronization( workQueue );
- }
-
- public Query createSQLQuery(String sql, String returnAlias, Class returnClass) {
- return session.createSQLQuery( sql, returnAlias, returnClass );
- }
-
- public Query createSQLQuery(String sql, String[] returnAliases, Class[] returnClasses) {
- return session.createSQLQuery( sql, returnAliases, returnClasses );
- }
-
- public int delete(String query) throws HibernateException {
- return session.delete( query );
- }
-
- public int delete(String query, Object value, Type type) throws HibernateException {
- return session.delete( query, value, type );
- }
-
- public int delete(String query, Object[] values, Type[] types) throws HibernateException {
- return session.delete( query, values, types );
- }
-
- public Collection filter(Object collection, String filter) throws HibernateException {
- return session.filter( collection, filter );
- }
-
- public Collection filter(Object collection, String filter, Object value, Type type) throws HibernateException {
- return session.filter( collection, filter, value, type );
- }
-
- public Collection filter(Object collection, String filter, Object[] values, Type[] types) throws HibernateException {
- return session.filter( collection, filter, values, types );
- }
-
- public List find(String query) throws HibernateException {
- return session.find( query );
- }
-
- public List find(String query, Object value, Type type) throws HibernateException {
- return session.find( query, value, type );
- }
-
- public List find(String query, Object[] values, Type[] types) throws HibernateException {
- return session.find( query, values, types );
- }
-
- public Iterator iterate(String query) throws HibernateException {
- return session.iterate( query );
- }
-
- public Iterator iterate(String query, Object value, Type type) throws HibernateException {
- return session.iterate( query, value, type );
- }
-
- public Iterator iterate(String query, Object[] values, Type[] types) throws HibernateException {
- return session.iterate( query, values, types );
- }
-
- public void save(String entityName, Object object, Serializable id) throws HibernateException {
- session.save( entityName, object, id );
- }
-
- public void save(Object object, Serializable id) throws HibernateException {
- session.save( object, id );
- }
-
- public Object saveOrUpdateCopy(String entityName, Object object) throws HibernateException {
- return session.saveOrUpdateCopy( entityName, object );
- }
-
- public Object saveOrUpdateCopy(String entityName, Object object, Serializable id) throws HibernateException {
- return session.saveOrUpdateCopy( entityName, object, id );
- }
-
- public Object saveOrUpdateCopy(Object object) throws HibernateException {
- return session.saveOrUpdateCopy( object );
- }
-
- public Object saveOrUpdateCopy(Object object, Serializable id) throws HibernateException {
- return session.saveOrUpdateCopy( object, id );
- }
-
- public void update(String entityName, Object object, Serializable id) throws HibernateException {
- session.update( entityName, object, id );
- }
-
- public void update(Object object, Serializable id) throws HibernateException {
- session.update( object, id );
- }
-
- public Transaction beginTransaction() throws HibernateException {
- return session.beginTransaction();
- }
-
- public void cancelQuery() throws HibernateException {
- session.cancelQuery();
- }
-
- public void clear() {
- session.clear();
- }
-
- public Connection close() throws HibernateException {
- return session.close();
- }
-
- public Connection connection() throws HibernateException {
- return session.connection();
- }
-
- public boolean contains(Object object) {
- return session.contains( object );
- }
-
- public Criteria createCriteria(String entityName) {
- return session.createCriteria( entityName );
- }
-
- public Criteria createCriteria(String entityName, String alias) {
- return session.createCriteria( entityName, alias );
- }
-
- public Criteria createCriteria(Class persistentClass) {
- return session.createCriteria( persistentClass );
- }
-
- public Criteria createCriteria(Class persistentClass, String alias) {
- return session.createCriteria( persistentClass, alias );
- }
-
- public Query createFilter(Object collection, String queryString) throws HibernateException {
- return session.createFilter( collection, queryString );
- }
-
- public Query createQuery(String queryString) throws HibernateException {
- return session.createQuery( queryString );
- }
-
- public SQLQuery createSQLQuery(String queryString) throws HibernateException {
- return session.createSQLQuery( queryString );
- }
-
- public void delete(String entityName, Object object) throws HibernateException {
- session.delete( entityName, object );
- }
-
- public void delete(Object object) throws HibernateException {
- session.delete( object );
- }
-
- public void disableFilter(String filterName) {
- session.disableFilter( filterName );
- }
-
- public Connection disconnect() throws HibernateException {
- return session.disconnect();
- }
-
- public Filter enableFilter(String filterName) {
- return session.enableFilter( filterName );
- }
-
- public void evict(Object object) throws HibernateException {
- session.evict( object );
- }
-
- public void flush() throws HibernateException {
- session.flush();
- }
-
- public Object get(Class clazz, Serializable id) throws HibernateException {
- return session.get( clazz, id );
- }
-
- public Object get(Class clazz, Serializable id, LockMode lockMode) throws HibernateException {
- return session.get( clazz, id, lockMode );
- }
-
- public Object get(String entityName, Serializable id) throws HibernateException {
- return session.get( entityName, id );
- }
-
- public Object get(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
- return session.get( entityName, id, lockMode );
- }
-
- public CacheMode getCacheMode() {
- return session.getCacheMode();
- }
-
- public LockMode getCurrentLockMode(Object object) throws HibernateException {
- return session.getCurrentLockMode( object );
- }
-
- public Filter getEnabledFilter(String filterName) {
- return session.getEnabledFilter( filterName );
- }
-
- public EntityMode getEntityMode() {
- return session.getEntityMode();
- }
-
- public String getEntityName(Object object) throws HibernateException {
- return session.getEntityName( object );
- }
-
- public FlushMode getFlushMode() {
- return session.getFlushMode();
- }
-
- public Serializable getIdentifier(Object object) throws HibernateException {
- return session.getIdentifier( object );
- }
-
- public Query getNamedQuery(String queryName) throws HibernateException {
- return session.getNamedQuery( queryName );
- }
-
- public org.hibernate.Session getSession(EntityMode entityMode) {
- return session.getSession( entityMode );
- }
-
- public SessionFactory getSessionFactory() {
- return session.getSessionFactory();
- }
-
- public SessionStatistics getStatistics() {
- return session.getStatistics();
- }
-
- public Transaction getTransaction() {
- return session.getTransaction();
- }
-
- public boolean isConnected() {
- return session.isConnected();
- }
-
- public boolean isDirty() throws HibernateException {
- return session.isDirty();
- }
-
- public boolean isOpen() {
- return session.isOpen();
- }
-
- public Object load(String entityName, Serializable id) throws HibernateException {
- return session.load( entityName, id );
- }
-
- public Object load(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
- return session.load( entityName, id, lockMode );
- }
-
- public void load(Object object, Serializable id) throws HibernateException {
- session.load( object, id );
- }
-
- public Object load(Class theClass, Serializable id) throws HibernateException {
- return session.load( theClass, id );
- }
-
- public Object load(Class theClass, Serializable id, LockMode lockMode) throws HibernateException {
- return session.load( theClass, id, lockMode );
- }
-
- public void lock(String entityName, Object object, LockMode lockMode) throws HibernateException {
- session.lock( entityName, object, lockMode );
- }
-
- public void lock(Object object, LockMode lockMode) throws HibernateException {
- session.lock( object, lockMode );
- }
-
- public Object merge(String entityName, Object object) throws HibernateException {
- return session.merge( entityName, object );
- }
-
- public Object merge(Object object) throws HibernateException {
- return session.merge( object );
- }
-
- public void persist(String entityName, Object object) throws HibernateException {
- session.persist( entityName, object );
- }
-
- public void persist(Object object) throws HibernateException {
- session.persist( object );
- }
-
- public void reconnect() throws HibernateException {
- session.reconnect();
- }
-
- public void reconnect(Connection connection) throws HibernateException {
- session.reconnect( connection );
- }
-
- public void refresh(Object object) throws HibernateException {
- session.refresh( object );
- }
-
- public void refresh(Object object, LockMode lockMode) throws HibernateException {
- session.refresh( object, lockMode );
- }
-
- public void replicate(String entityName, Object object, ReplicationMode replicationMode) throws HibernateException {
- session.replicate( entityName, object, replicationMode );
- }
-
- public void replicate(Object object, ReplicationMode replicationMode) throws HibernateException {
- session.replicate( object, replicationMode );
- }
-
- public Serializable save(String entityName, Object object) throws HibernateException {
- return session.save( entityName, object );
- }
-
- public Serializable save(Object object) throws HibernateException {
- return session.save( object );
- }
-
- public void saveOrUpdate(String entityName, Object object) throws HibernateException {
- session.saveOrUpdate( entityName, object );
- }
-
- public void saveOrUpdate(Object object) throws HibernateException {
- session.saveOrUpdate( object );
- }
-
- public void setCacheMode(CacheMode cacheMode) {
- session.setCacheMode( cacheMode );
- }
-
- public void setFlushMode(FlushMode flushMode) {
- session.setFlushMode( flushMode );
- }
-
- public void setReadOnly(Object entity, boolean readOnly) {
- session.setReadOnly( entity, readOnly );
- }
-
- public void update(String entityName, Object object) throws HibernateException {
- session.update( entityName, object );
- }
-
- public void update(Object object) throws HibernateException {
- session.update( object );
- }
-}
Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,25 @@
+//$Id: $
+package org.hibernate.search;
+
+import org.hibernate.classic.Session;
+import org.hibernate.Query;
+
+/**
+ * Extends the Hibernate {@link Session} with Full text search and indexing capabilities
+ *
+ * @author Emmanuel Bernard
+ */
+public interface FullTextSession extends Session {
+ /**
+ * Create a Query on top of a native Lucene Query returning the matching objects
+ * of type <code>entities</code> and their respective subclasses.
+ * If no entity is provided, no type filtering is done.
+ */
+ Query createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class... entities);
+
+ /**
+ * Force the (re)indexing of a given <b>managed</b> object.
+ * Indexation is batched per transaction
+ */
+ void index(Object entity);
+}
Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/Search.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/Search.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/Search.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,19 @@
+//$Id: $
+package org.hibernate.search;
+
+import org.hibernate.Session;
+import org.hibernate.search.impl.FullTextSessionImpl;
+
+/**
+ * Helper class to get a FullTextSession out of a regular session
+ * @author Emmanuel Bernard
+ */
+public final class Search {
+
+ private Search() {
+ }
+
+ public static FullTextSession createFullTextSession(Session session) {
+ return new FullTextSessionImpl(session);
+ }
+}
\ No newline at end of file
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -17,7 +17,7 @@
* Manipulate the document to index the given value.
* A common implementation is to add a Field <code>name</code> to the given document following
* the parameters (<code>store</code>, <code>index</code>, <code>boost</code>) if the
- * <code>value></code> is not null
+ * <code>value</code> is not null
*/
void set(String name, Object value, Document document, Field.Store store, Field.Index index, Float boost);
}
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -26,7 +26,7 @@
import org.hibernate.search.bridge.BridgeFactory;
import org.hibernate.search.bridge.FieldBridge;
import org.hibernate.search.bridge.TwoWayFieldBridge;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.util.BinderHelper;
import org.hibernate.reflection.ReflectionManager;
@@ -305,7 +305,7 @@
}
}
- public static Serializable getDocumentId(FullTextEventListener listener, Class clazz, Document document) {
+ public static Serializable getDocumentId(FullTextIndexEventListener listener, Class clazz, Document document) {
DocumentBuilder builder = listener.getDocumentBuilders().get( clazz );
if ( builder == null ) throw new HibernateException( "No Lucene configuration set up for: " + clazz.getName() );
return (Serializable) builder.getIdBridge().get( builder.getIdKeywordName(), document );
Deleted: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -1,180 +0,0 @@
-//$Id$
-package org.hibernate.search.event;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.locks.ReentrantLock;
-import javax.transaction.Status;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.document.Document;
-import org.hibernate.HibernateException;
-import org.hibernate.cfg.AnnotationConfiguration;
-import org.hibernate.cfg.Configuration;
-import org.hibernate.event.AbstractEvent;
-import org.hibernate.event.Initializable;
-import org.hibernate.event.PostDeleteEvent;
-import org.hibernate.event.PostDeleteEventListener;
-import org.hibernate.event.PostInsertEvent;
-import org.hibernate.event.PostInsertEventListener;
-import org.hibernate.event.PostUpdateEvent;
-import org.hibernate.event.PostUpdateEventListener;
-import org.hibernate.search.Environment;
-import org.hibernate.search.annotations.Indexed;
-import org.hibernate.search.backend.AddWork;
-import org.hibernate.search.backend.DeleteWork;
-import org.hibernate.search.backend.UpdateWork;
-import org.hibernate.search.backend.Work;
-import org.hibernate.search.backend.WorkQueue;
-import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
-import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.store.DirectoryProvider;
-import org.hibernate.search.store.DirectoryProviderFactory;
-import org.hibernate.mapping.PersistentClass;
-import org.hibernate.reflection.ReflectionManager;
-import org.hibernate.reflection.XClass;
-import org.hibernate.util.ReflectHelper;
-
-/**
- * This listener supports setting a parent directory for all generated index files.
- * It also supports setting the analyzer class to be used.
- *
- * @author Gavin King
- * @author Emmanuel Bernard
- * @author Mattias Arbin
- */
-//TODO work on sharing the same indexWriters and readers across a single post operation...
-//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
-public class FullTextEventListener implements PostDeleteEventListener, PostInsertEventListener,
- PostUpdateEventListener, Initializable {
- ReflectionManager reflectionManager;
-
- //FIXME keeping this here is a bad decision since you might want to search indexes wo maintain it
- @Deprecated
- public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
- return documentBuilders;
- }
-
-
- private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
- //keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
- private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
- new HashMap<DirectoryProvider, ReentrantLock>();
- private boolean initialized;
-
- private static final Log log = LogFactory.getLog( FullTextEventListener.class );
-
- public void initialize(Configuration cfg) {
- if ( initialized ) return;
- //yuk
- reflectionManager = ( (AnnotationConfiguration) cfg ).createExtendedMappings().getReflectionManager();
-
- Class analyzerClass;
- String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
- if ( analyzerClassName != null ) {
- try {
- analyzerClass = ReflectHelper.classForName( analyzerClassName );
- }
- catch (Exception e) {
- throw new HibernateException(
- "Lucene analyzer class '" + analyzerClassName + "' defined in property '" + Environment.ANALYZER_CLASS + "' could not be found.",
- e
- );
- }
- }
- else {
- analyzerClass = StandardAnalyzer.class;
- }
- // Initialize analyzer
- Analyzer analyzer;
- try {
- analyzer = (Analyzer) analyzerClass.newInstance();
- }
- catch (ClassCastException e) {
- throw new HibernateException(
- "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClassName
- );
- }
- catch (Exception e) {
- throw new HibernateException( "Failed to instantiate lucene analyzer with type " + analyzerClassName );
- }
-
- Iterator iter = cfg.getClassMappings();
- DirectoryProviderFactory factory = new DirectoryProviderFactory();
- while ( iter.hasNext() ) {
- PersistentClass clazz = (PersistentClass) iter.next();
- Class<?> mappedClass = clazz.getMappedClass();
- if ( mappedClass != null ) {
- XClass mappedXClass = reflectionManager.toXClass( mappedClass );
- if ( mappedXClass != null && mappedXClass.isAnnotationPresent( Indexed.class ) ) {
- DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg );
- if ( !lockableDirectoryProviders.containsKey( provider ) ) {
- lockableDirectoryProviders.put( provider, new ReentrantLock() );
- }
- final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
- mappedXClass, analyzer, provider, reflectionManager
- );
-
- documentBuilders.put( mappedClass, documentBuilder );
- }
- }
- }
- Set<Class> indexedClasses = documentBuilders.keySet();
- for ( DocumentBuilder builder : documentBuilders.values() ) {
- builder.postInitialize( indexedClasses );
- }
- initialized = true;
- }
-
- public void onPostDelete(PostDeleteEvent event) {
- if ( documentBuilders.containsKey( event.getEntity().getClass() ) ) {
- DeleteWork work = new DeleteWork( event.getId(), event.getEntity().getClass() );
- processWork( work, event );
- }
- }
-
- public void onPostInsert(PostInsertEvent event) {
- final Object entity = event.getEntity();
- DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
- if ( builder != null ) {
- Serializable id = event.getId();
- Document doc = builder.getDocument( entity, id );
- AddWork work = new AddWork( id, entity.getClass(), doc );
- processWork( work, event );
- }
- }
-
- public void onPostUpdate(PostUpdateEvent event) {
- final Object entity = event.getEntity();
- DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
- if ( builder != null ) {
- Serializable id = event.getId();
- Document doc = builder.getDocument( entity, id );
- UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
- processWork( work, event );
- }
- }
-
- private void processWork(Work work, AbstractEvent event) {
- WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
- workQueue.add( work );
- PostTransactionWorkQueueSynchronization sync = new PostTransactionWorkQueueSynchronization( workQueue );
- if ( event.getSession().isTransactionInProgress() ) {
- event.getSession().getTransaction().registerSynchronization( sync );
- }
- else {
- sync.afterCompletion( Status.STATUS_COMMITTED );
- }
- }
-
- public Map<DirectoryProvider, ReentrantLock> getLockableDirectoryProviders() {
- return lockableDirectoryProviders;
- }
-}
Copied: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java (from rev 10743, branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java)
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java 2006-11-07 01:16:53 UTC (rev 10743)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,180 @@
+//$Id$
+package org.hibernate.search.event;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.locks.ReentrantLock;
+import javax.transaction.Status;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.hibernate.HibernateException;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.event.AbstractEvent;
+import org.hibernate.event.Initializable;
+import org.hibernate.event.PostDeleteEvent;
+import org.hibernate.event.PostDeleteEventListener;
+import org.hibernate.event.PostInsertEvent;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.event.PostUpdateEvent;
+import org.hibernate.event.PostUpdateEventListener;
+import org.hibernate.search.Environment;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.backend.AddWork;
+import org.hibernate.search.backend.DeleteWork;
+import org.hibernate.search.backend.UpdateWork;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
+import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.DirectoryProviderFactory;
+import org.hibernate.mapping.PersistentClass;
+import org.hibernate.reflection.ReflectionManager;
+import org.hibernate.reflection.XClass;
+import org.hibernate.util.ReflectHelper;
+
+/**
+ * This listener supports setting a parent directory for all generated index files.
+ * It also supports setting the analyzer class to be used.
+ *
+ * @author Gavin King
+ * @author Emmanuel Bernard
+ * @author Mattias Arbin
+ */
+//TODO work on sharing the same indexWriters and readers across a single post operation...
+//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
+public class FullTextIndexEventListener implements PostDeleteEventListener, PostInsertEventListener,
+ PostUpdateEventListener, Initializable {
+ ReflectionManager reflectionManager;
+
+ //FIXME keeping this here is a bad decision since you might want to search indexes wo maintain it
+ @Deprecated
+ public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
+ return documentBuilders;
+ }
+
+
+ private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
+ //keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
+ private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
+ new HashMap<DirectoryProvider, ReentrantLock>();
+ private boolean initialized;
+
+ private static final Log log = LogFactory.getLog( FullTextIndexEventListener.class );
+
+ public void initialize(Configuration cfg) {
+ if ( initialized ) return;
+ //yuk
+ reflectionManager = ( (AnnotationConfiguration) cfg ).createExtendedMappings().getReflectionManager();
+
+ Class analyzerClass;
+ String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
+ if ( analyzerClassName != null ) {
+ try {
+ analyzerClass = ReflectHelper.classForName( analyzerClassName );
+ }
+ catch (Exception e) {
+ throw new HibernateException(
+ "Lucene analyzer class '" + analyzerClassName + "' defined in property '" + Environment.ANALYZER_CLASS + "' could not be found.",
+ e
+ );
+ }
+ }
+ else {
+ analyzerClass = StandardAnalyzer.class;
+ }
+ // Initialize analyzer
+ Analyzer analyzer;
+ try {
+ analyzer = (Analyzer) analyzerClass.newInstance();
+ }
+ catch (ClassCastException e) {
+ throw new HibernateException(
+ "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClassName
+ );
+ }
+ catch (Exception e) {
+ throw new HibernateException( "Failed to instantiate lucene analyzer with type " + analyzerClassName );
+ }
+
+ Iterator iter = cfg.getClassMappings();
+ DirectoryProviderFactory factory = new DirectoryProviderFactory();
+ while ( iter.hasNext() ) {
+ PersistentClass clazz = (PersistentClass) iter.next();
+ Class<?> mappedClass = clazz.getMappedClass();
+ if ( mappedClass != null ) {
+ XClass mappedXClass = reflectionManager.toXClass( mappedClass );
+ if ( mappedXClass != null && mappedXClass.isAnnotationPresent( Indexed.class ) ) {
+ DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg );
+ if ( !lockableDirectoryProviders.containsKey( provider ) ) {
+ lockableDirectoryProviders.put( provider, new ReentrantLock() );
+ }
+ final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
+ mappedXClass, analyzer, provider, reflectionManager
+ );
+
+ documentBuilders.put( mappedClass, documentBuilder );
+ }
+ }
+ }
+ Set<Class> indexedClasses = documentBuilders.keySet();
+ for ( DocumentBuilder builder : documentBuilders.values() ) {
+ builder.postInitialize( indexedClasses );
+ }
+ initialized = true;
+ }
+
+ public void onPostDelete(PostDeleteEvent event) {
+ if ( documentBuilders.containsKey( event.getEntity().getClass() ) ) {
+ DeleteWork work = new DeleteWork( event.getId(), event.getEntity().getClass() );
+ processWork( work, event );
+ }
+ }
+
+ public void onPostInsert(PostInsertEvent event) {
+ final Object entity = event.getEntity();
+ DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+ if ( builder != null ) {
+ Serializable id = event.getId();
+ Document doc = builder.getDocument( entity, id );
+ AddWork work = new AddWork( id, entity.getClass(), doc );
+ processWork( work, event );
+ }
+ }
+
+ public void onPostUpdate(PostUpdateEvent event) {
+ final Object entity = event.getEntity();
+ DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+ if ( builder != null ) {
+ Serializable id = event.getId();
+ Document doc = builder.getDocument( entity, id );
+ UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
+ processWork( work, event );
+ }
+ }
+
+ private void processWork(Work work, AbstractEvent event) {
+ WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
+ workQueue.add( work );
+ PostTransactionWorkQueueSynchronization sync = new PostTransactionWorkQueueSynchronization( workQueue );
+ if ( event.getSession().isTransactionInProgress() ) {
+ event.getSession().getTransaction().registerSynchronization( sync );
+ }
+ else {
+ sync.afterCompletion( Status.STATUS_COMMITTED );
+ }
+ }
+
+ public Map<DirectoryProvider, ReentrantLock> getLockableDirectoryProviders() {
+ return lockableDirectoryProviders;
+ }
+}
Property changes on: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Author Date Id Revision
Name: svn:eol-style
+ native
Copied: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java (from rev 10743, branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java)
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java 2006-11-07 01:16:53 UTC (rev 10743)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,466 @@
+//$Id: $
+package org.hibernate.search.impl;
+
+import java.io.Serializable;
+import java.sql.Connection;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+import javax.transaction.Status;
+
+import org.hibernate.CacheMode;
+import org.hibernate.Criteria;
+import org.hibernate.EntityMode;
+import org.hibernate.Filter;
+import org.hibernate.FlushMode;
+import org.hibernate.HibernateException;
+import org.hibernate.LockMode;
+import org.hibernate.Query;
+import org.hibernate.ReplicationMode;
+import org.hibernate.SQLQuery;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
+import org.hibernate.engine.query.ParameterMetadata;
+import org.hibernate.impl.SessionImpl;
+import org.hibernate.search.query.FullTextQueryImpl;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.util.ContextHelper;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.backend.UpdateWork;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
+import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.stat.SessionStatistics;
+import org.hibernate.type.Type;
+import org.apache.lucene.document.Document;
+
+/**
+ * Lucene Full text search aware session
+ *
+ * @author Emmanuel Bernard
+ */
+public class FullTextSessionImpl implements FullTextSession {
+ private final SessionImpl session;
+ private PostTransactionWorkQueueSynchronization postTransactionWorkQueueSynch;
+
+ public FullTextSessionImpl(Session session) {
+ this.session = (SessionImpl) session;
+ }
+
+ /**
+ * Execute a Lucene query and retrieve managed objects of type entities (or their indexed subclasses)
+ * If entities is empty, include all indexed entities
+ *
+ * @param entities must be immutable for the lifetime of the query object
+ */
+ public Query createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class... entities) {
+ return new FullTextQueryImpl( luceneQuery, entities, session, new ParameterMetadata(null, null) );
+ }
+
+ /**
+ * (re)index an entity.
+ * Non indexable entities are ignored
+ * The entity must be associated with the session
+ *
+ * @param entity must not be null
+ */
+ public void index(Object entity) {
+ if (entity == null) return;
+ Class clazz = entity.getClass();
+ FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );
+ DocumentBuilder<Object> builder = listener.getDocumentBuilders().get( clazz );
+ if ( builder != null ) {
+ Serializable id = session.getIdentifier( entity );
+ Document doc = builder.getDocument( entity, id );
+ UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
+ processWork( work, listener.getDocumentBuilders(), listener.getLockableDirectoryProviders() );
+ }
+ //TODO
+ //need to add elements in a queue kept at the Session level
+ //the queue will be processed by a Lucene(Auto)FlushEventListener
+ //note that we could keep this queue somewhere in the event listener in the mean time but that requires
+ // a synchronized hashmap holding this queue on a per session basis plus some session house keeping (yuk)
+ //an other solution would be to subclass SessionImpl instead of having this LuceneSession delecation model
+ // this is an open discussion
+ }
+
+ private void processWork(Work work, Map<Class, DocumentBuilder<Object>> documentBuilders,
+ Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ if ( session.isTransactionInProgress() ) {
+ if ( postTransactionWorkQueueSynch == null || postTransactionWorkQueueSynch.isConsumed() ) {
+ postTransactionWorkQueueSynch = createWorkQueueSync( documentBuilders, lockableDirectoryProviders);
+ session.getTransaction().registerSynchronization( postTransactionWorkQueueSynch );
+ }
+ postTransactionWorkQueueSynch.add( work );
+ }
+ else {
+ //no transaction work right away
+ PostTransactionWorkQueueSynchronization sync =
+ createWorkQueueSync( documentBuilders, lockableDirectoryProviders );
+ sync.add( work );
+ sync.afterCompletion( Status.STATUS_COMMITTED );
+ }
+ }
+
+ private PostTransactionWorkQueueSynchronization createWorkQueueSync(
+ Map<Class, DocumentBuilder<Object>> documentBuilders,
+ Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
+ return new PostTransactionWorkQueueSynchronization( workQueue );
+ }
+
+ public Query createSQLQuery(String sql, String returnAlias, Class returnClass) {
+ return session.createSQLQuery( sql, returnAlias, returnClass );
+ }
+
+ public Query createSQLQuery(String sql, String[] returnAliases, Class[] returnClasses) {
+ return session.createSQLQuery( sql, returnAliases, returnClasses );
+ }
+
+ public int delete(String query) throws HibernateException {
+ return session.delete( query );
+ }
+
+ public int delete(String query, Object value, Type type) throws HibernateException {
+ return session.delete( query, value, type );
+ }
+
+ public int delete(String query, Object[] values, Type[] types) throws HibernateException {
+ return session.delete( query, values, types );
+ }
+
+ public Collection filter(Object collection, String filter) throws HibernateException {
+ return session.filter( collection, filter );
+ }
+
+ public Collection filter(Object collection, String filter, Object value, Type type) throws HibernateException {
+ return session.filter( collection, filter, value, type );
+ }
+
+ public Collection filter(Object collection, String filter, Object[] values, Type[] types) throws HibernateException {
+ return session.filter( collection, filter, values, types );
+ }
+
+ public List find(String query) throws HibernateException {
+ return session.find( query );
+ }
+
+ public List find(String query, Object value, Type type) throws HibernateException {
+ return session.find( query, value, type );
+ }
+
+ public List find(String query, Object[] values, Type[] types) throws HibernateException {
+ return session.find( query, values, types );
+ }
+
+ public Iterator iterate(String query) throws HibernateException {
+ return session.iterate( query );
+ }
+
+ public Iterator iterate(String query, Object value, Type type) throws HibernateException {
+ return session.iterate( query, value, type );
+ }
+
+ public Iterator iterate(String query, Object[] values, Type[] types) throws HibernateException {
+ return session.iterate( query, values, types );
+ }
+
+ public void save(String entityName, Object object, Serializable id) throws HibernateException {
+ session.save( entityName, object, id );
+ }
+
+ public void save(Object object, Serializable id) throws HibernateException {
+ session.save( object, id );
+ }
+
+ public Object saveOrUpdateCopy(String entityName, Object object) throws HibernateException {
+ return session.saveOrUpdateCopy( entityName, object );
+ }
+
+ public Object saveOrUpdateCopy(String entityName, Object object, Serializable id) throws HibernateException {
+ return session.saveOrUpdateCopy( entityName, object, id );
+ }
+
+ public Object saveOrUpdateCopy(Object object) throws HibernateException {
+ return session.saveOrUpdateCopy( object );
+ }
+
+ public Object saveOrUpdateCopy(Object object, Serializable id) throws HibernateException {
+ return session.saveOrUpdateCopy( object, id );
+ }
+
+ public void update(String entityName, Object object, Serializable id) throws HibernateException {
+ session.update( entityName, object, id );
+ }
+
+ public void update(Object object, Serializable id) throws HibernateException {
+ session.update( object, id );
+ }
+
+ public Transaction beginTransaction() throws HibernateException {
+ return session.beginTransaction();
+ }
+
+ public void cancelQuery() throws HibernateException {
+ session.cancelQuery();
+ }
+
+ public void clear() {
+ session.clear();
+ }
+
+ public Connection close() throws HibernateException {
+ return session.close();
+ }
+
+ public Connection connection() throws HibernateException {
+ return session.connection();
+ }
+
+ public boolean contains(Object object) {
+ return session.contains( object );
+ }
+
+ public Criteria createCriteria(String entityName) {
+ return session.createCriteria( entityName );
+ }
+
+ public Criteria createCriteria(String entityName, String alias) {
+ return session.createCriteria( entityName, alias );
+ }
+
+ public Criteria createCriteria(Class persistentClass) {
+ return session.createCriteria( persistentClass );
+ }
+
+ public Criteria createCriteria(Class persistentClass, String alias) {
+ return session.createCriteria( persistentClass, alias );
+ }
+
+ public Query createFilter(Object collection, String queryString) throws HibernateException {
+ return session.createFilter( collection, queryString );
+ }
+
+ public Query createQuery(String queryString) throws HibernateException {
+ return session.createQuery( queryString );
+ }
+
+ public SQLQuery createSQLQuery(String queryString) throws HibernateException {
+ return session.createSQLQuery( queryString );
+ }
+
+ public void delete(String entityName, Object object) throws HibernateException {
+ session.delete( entityName, object );
+ }
+
+ public void delete(Object object) throws HibernateException {
+ session.delete( object );
+ }
+
+ public void disableFilter(String filterName) {
+ session.disableFilter( filterName );
+ }
+
+ public Connection disconnect() throws HibernateException {
+ return session.disconnect();
+ }
+
+ public Filter enableFilter(String filterName) {
+ return session.enableFilter( filterName );
+ }
+
+ public void evict(Object object) throws HibernateException {
+ session.evict( object );
+ }
+
+ public void flush() throws HibernateException {
+ session.flush();
+ }
+
+ public Object get(Class clazz, Serializable id) throws HibernateException {
+ return session.get( clazz, id );
+ }
+
+ public Object get(Class clazz, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.get( clazz, id, lockMode );
+ }
+
+ public Object get(String entityName, Serializable id) throws HibernateException {
+ return session.get( entityName, id );
+ }
+
+ public Object get(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.get( entityName, id, lockMode );
+ }
+
+ public CacheMode getCacheMode() {
+ return session.getCacheMode();
+ }
+
+ public LockMode getCurrentLockMode(Object object) throws HibernateException {
+ return session.getCurrentLockMode( object );
+ }
+
+ public Filter getEnabledFilter(String filterName) {
+ return session.getEnabledFilter( filterName );
+ }
+
+ public EntityMode getEntityMode() {
+ return session.getEntityMode();
+ }
+
+ public String getEntityName(Object object) throws HibernateException {
+ return session.getEntityName( object );
+ }
+
+ public FlushMode getFlushMode() {
+ return session.getFlushMode();
+ }
+
+ public Serializable getIdentifier(Object object) throws HibernateException {
+ return session.getIdentifier( object );
+ }
+
+ public Query getNamedQuery(String queryName) throws HibernateException {
+ return session.getNamedQuery( queryName );
+ }
+
+ public org.hibernate.Session getSession(EntityMode entityMode) {
+ return session.getSession( entityMode );
+ }
+
+ public SessionFactory getSessionFactory() {
+ return session.getSessionFactory();
+ }
+
+ public SessionStatistics getStatistics() {
+ return session.getStatistics();
+ }
+
+ public Transaction getTransaction() {
+ return session.getTransaction();
+ }
+
+ public boolean isConnected() {
+ return session.isConnected();
+ }
+
+ public boolean isDirty() throws HibernateException {
+ return session.isDirty();
+ }
+
+ public boolean isOpen() {
+ return session.isOpen();
+ }
+
+ public Object load(String entityName, Serializable id) throws HibernateException {
+ return session.load( entityName, id );
+ }
+
+ public Object load(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.load( entityName, id, lockMode );
+ }
+
+ public void load(Object object, Serializable id) throws HibernateException {
+ session.load( object, id );
+ }
+
+ public Object load(Class theClass, Serializable id) throws HibernateException {
+ return session.load( theClass, id );
+ }
+
+ public Object load(Class theClass, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.load( theClass, id, lockMode );
+ }
+
+ public void lock(String entityName, Object object, LockMode lockMode) throws HibernateException {
+ session.lock( entityName, object, lockMode );
+ }
+
+ public void lock(Object object, LockMode lockMode) throws HibernateException {
+ session.lock( object, lockMode );
+ }
+
+ public Object merge(String entityName, Object object) throws HibernateException {
+ return session.merge( entityName, object );
+ }
+
+ public Object merge(Object object) throws HibernateException {
+ return session.merge( object );
+ }
+
+ public void persist(String entityName, Object object) throws HibernateException {
+ session.persist( entityName, object );
+ }
+
+ public void persist(Object object) throws HibernateException {
+ session.persist( object );
+ }
+
+ public void reconnect() throws HibernateException {
+ session.reconnect();
+ }
+
+ public void reconnect(Connection connection) throws HibernateException {
+ session.reconnect( connection );
+ }
+
+ public void refresh(Object object) throws HibernateException {
+ session.refresh( object );
+ }
+
+ public void refresh(Object object, LockMode lockMode) throws HibernateException {
+ session.refresh( object, lockMode );
+ }
+
+ public void replicate(String entityName, Object object, ReplicationMode replicationMode) throws HibernateException {
+ session.replicate( entityName, object, replicationMode );
+ }
+
+ public void replicate(Object object, ReplicationMode replicationMode) throws HibernateException {
+ session.replicate( object, replicationMode );
+ }
+
+ public Serializable save(String entityName, Object object) throws HibernateException {
+ return session.save( entityName, object );
+ }
+
+ public Serializable save(Object object) throws HibernateException {
+ return session.save( object );
+ }
+
+ public void saveOrUpdate(String entityName, Object object) throws HibernateException {
+ session.saveOrUpdate( entityName, object );
+ }
+
+ public void saveOrUpdate(Object object) throws HibernateException {
+ session.saveOrUpdate( object );
+ }
+
+ public void setCacheMode(CacheMode cacheMode) {
+ session.setCacheMode( cacheMode );
+ }
+
+ public void setFlushMode(FlushMode flushMode) {
+ session.setFlushMode( flushMode );
+ }
+
+ public void setReadOnly(Object entity, boolean readOnly) {
+ session.setReadOnly( entity, readOnly );
+ }
+
+ public void update(String entityName, Object object) throws HibernateException {
+ session.update( entityName, object );
+ }
+
+ public void update(Object object) throws HibernateException {
+ session.update( object );
+ }
+}
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -34,7 +34,7 @@
import org.hibernate.engine.query.ParameterMetadata;
import org.hibernate.impl.AbstractQueryImpl;
import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
import org.hibernate.search.util.ContextHelper;
/**
@@ -71,7 +71,7 @@
//user stop using it
//scrollable is better in this area
- FullTextEventListener listener = ContextHelper.getLuceneEventListener( session );
+ FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );
//find the directories
Searcher searcher = buildSearcher( listener );
try {
@@ -103,9 +103,9 @@
}
}
- public ScrollableResults scroll() throws HibernateException {
+ public ScrollableResults scroll() throws HibernateException {
//keep the searcher open until the resultset is closed
- FullTextEventListener listener = ContextHelper.getLuceneEventListener( session );;
+ FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );;
//find the directories
Searcher searcher = buildSearcher( listener );
Hits hits;
@@ -134,7 +134,7 @@
}
public List list() throws HibernateException {
- FullTextEventListener listener = ContextHelper.getLuceneEventListener( session );;
+ FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );;
//find the directories
Searcher searcher = buildSearcher( listener );
Hits hits;
@@ -210,7 +210,7 @@
}
//TODO change classesAndSubclasses by side effect, which is a mismatch with the Searcher return, fix that.
- private Searcher buildSearcher(FullTextEventListener listener) {
+ private Searcher buildSearcher(FullTextIndexEventListener listener) {
Map<Class, DocumentBuilder<Object>> builders = listener.getDocumentBuilders();
Set<Directory> directories = new HashSet<Directory>();
if ( classes == null || classes.length == 0 ) {
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -9,6 +9,7 @@
/**
* @author Emmanuel Bernard
*/
+//TODO load the next batch-size elements to benefit from batch-size
public class IteratorImpl implements Iterator {
private final List<EntityInfo> entityInfos;
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -18,7 +18,7 @@
import org.hibernate.ScrollableResults;
import org.hibernate.Session;
import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
import org.hibernate.type.Type;
/**
@@ -31,11 +31,11 @@
private final int max;
private int current;
private final Session session;
- private final FullTextEventListener listener;
+ private final FullTextIndexEventListener listener;
private EntityInfo[] entityInfos;
public ScrollableResultsImpl(
- Searcher searcher, Hits hits, int first, int max, Session session, FullTextEventListener listener
+ Searcher searcher, Hits hits, int first, int max, Session session, FullTextIndexEventListener listener
) {
this.searcher = searcher;
this.hits = hits;
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -4,20 +4,20 @@
import org.hibernate.HibernateException;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.event.PostInsertEventListener;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
/**
* @author Emmanuel Bernard
*/
public abstract class ContextHelper {
- public static FullTextEventListener getLuceneEventListener(SessionImplementor session) {
+ public static FullTextIndexEventListener getLuceneEventListener(SessionImplementor session) {
PostInsertEventListener[] listeners = session.getListeners().getPostInsertEventListeners();
- FullTextEventListener listener = null;
+ FullTextIndexEventListener listener = null;
//FIXME this sucks since we mandante the event listener use
for ( PostInsertEventListener candidate : listeners ) {
- if ( candidate instanceof FullTextEventListener ) {
- listener = (FullTextEventListener) candidate;
+ if ( candidate instanceof FullTextIndexEventListener ) {
+ listener = (FullTextIndexEventListener) candidate;
break;
}
}
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -19,7 +19,7 @@
import org.hibernate.event.PostUpdateEventListener;
import org.hibernate.search.Environment;
import org.hibernate.search.store.FSDirectoryProvider;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
/**
* @author Gavin King
@@ -183,7 +183,7 @@
cfg.setProperty( "hibernate.search.default.indexBase", sub.getAbsolutePath() );
cfg.setProperty( "hibernate.search.Clock.directory_provider", FSDirectoryProvider.class.getName() );
cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
- FullTextEventListener del = new FullTextEventListener();
+ FullTextIndexEventListener del = new FullTextIndexEventListener();
cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{del} );
cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{del} );
cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{del} );
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -8,7 +8,7 @@
import org.hibernate.event.PostUpdateEventListener;
import org.hibernate.search.Environment;
import org.hibernate.search.store.RAMDirectoryProvider;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
import org.hibernate.HibernateException;
import org.hibernate.impl.SessionFactoryImpl;
@@ -25,13 +25,13 @@
return getLuceneEventListener().getDocumentBuilders().get( clazz ).getDirectoryProvider().getDirectory();
}
- private FullTextEventListener getLuceneEventListener() {
+ private FullTextIndexEventListener getLuceneEventListener() {
PostInsertEventListener[] listeners = ( (SessionFactoryImpl) getSessions() ).getEventListeners().getPostInsertEventListeners();
- FullTextEventListener listener = null;
+ FullTextIndexEventListener listener = null;
//FIXME this sucks since we mandante the event listener use
for (PostInsertEventListener candidate : listeners) {
- if (candidate instanceof FullTextEventListener ) {
- listener = (FullTextEventListener) candidate;
+ if (candidate instanceof FullTextIndexEventListener ) {
+ listener = (FullTextIndexEventListener) candidate;
break;
}
}
@@ -42,7 +42,7 @@
protected void configure(org.hibernate.cfg.Configuration cfg) {
cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
- FullTextEventListener del = new FullTextEventListener();
+ FullTextIndexEventListener del = new FullTextIndexEventListener();
cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{del} );
cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{del} );
cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{del} );
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -11,8 +11,9 @@
import org.hibernate.cfg.Configuration;
import org.hibernate.search.test.TestCase;
+import org.hibernate.search.Environment;
import org.hibernate.search.FullTextSession;
-import org.hibernate.search.Environment;
+import org.hibernate.search.Search;
import org.apache.lucene.search.Query;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
@@ -41,17 +42,17 @@
tx.commit();
tx = s.beginTransaction();
- FullTextSession session = new FullTextSession(s);
+ FullTextSession session = Search.createFullTextSession(s);
QueryParser parser = new QueryParser("id", new StandardAnalyzer() );
Query query;
List result;
query = parser.parse("double2:[2.1 TO 2.1] AND float2:[2.1 TO 2.1] AND int2:[2 TO 2.1] AND long2:[2 TO 2.1]");
- result = session.createLuceneQuery(query).list();
+ result = session.createFullTextQuery(query).list();
assertEquals( "find primitives and do not fail on null", 1, result.size() );
query = parser.parse("double1:[2.1 TO 2.1] OR float1:[2.1 TO 2.1] OR int1:[2 TO 2.1] OR long1:[2 TO 2.1]");
- result = session.createLuceneQuery(query).list();
+ result = session.createFullTextQuery(query).list();
assertEquals( "null elements should not be stored", 0, result.size() ); //the query is dumb because restrictive
s.delete( s.get( Cloud.class, cloud.getId() ) );
@@ -71,17 +72,17 @@
tx.commit();
tx = s.beginTransaction();
- FullTextSession session = new FullTextSession(s);
+ FullTextSession session = Search.createFullTextSession(s);
QueryParser parser = new QueryParser("id", new SimpleAnalyzer() );
Query query;
List result;
query = parser.parse("customFieldBridge:This AND customStringBridge:This");
- result = session.createLuceneQuery(query).list();
+ result = session.createFullTextQuery(query).list();
assertEquals( "Properties not mapped", 1, result.size() );
query = parser.parse("customFieldBridge:by AND customStringBridge:is");
- result = session.createLuceneQuery(query).list();
+ result = session.createFullTextQuery(query).list();
assertEquals( "Custom types not taken into account", 0, result.size() );
s.delete( s.get( Cloud.class, cloud.getId() ) );
@@ -113,7 +114,7 @@
tx.commit();
tx = s.beginTransaction();
- FullTextSession session = new FullTextSession(s);
+ FullTextSession session = Search.createFullTextSession(s);
QueryParser parser = new QueryParser("id", new StandardAnalyzer() );
Query query;
List result;
@@ -127,7 +128,7 @@
+ " AND dateSecond:[20001214 TO 20001215034302]"
+ " AND dateMillisecond:[20001214 TO 20001215034302005]"
);
- result = session.createLuceneQuery(query).list();
+ result = session.createFullTextQuery(query).list();
assertEquals( "Date not found or not property truncated", 1, result.size() );
s.delete( s.get( Cloud.class, cloud.getId() ) );
Added: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,43 @@
+//$Id: $
+package org.hibernate.search.test.bridge;
+
+import java.util.Date;
+import java.util.Calendar;
+import java.util.GregorianCalendar;
+import java.util.TimeZone;
+
+import org.hibernate.search.bridge.FieldBridge;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+
+/**
+ * Store the date in 3 different field year, month, day
+ * to ease Range Query per year, month or day
+ * (eg get all the elements of december for the last 5 years)
+ *
+ * @author Emmanuel Bernard
+ */
+public class DateSplitBridge implements FieldBridge {
+ private final static TimeZone GMT = TimeZone.getTimeZone("GMT");
+
+ public void set(String name, Object value, Document document, Field.Store store, Field.Index index, Float boost) {
+ Date date = (Date) value;
+ Calendar cal = GregorianCalendar.getInstance( GMT );
+ cal.setTime( date );
+ int year = cal.get( Calendar.YEAR );
+ int month = cal.get( Calendar.MONTH ) + 1;
+ int day = cal.get( Calendar.DAY_OF_MONTH );
+ //set year
+ Field field = new Field( name + ".year", String.valueOf(year), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ //set month and pad it if needed
+ field = new Field( name + ".month", month < 10 ? "0" : "" + String.valueOf(month), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ //set day and pad it if needed
+ field = new Field( name + ".day", day < 10 ? "0" : "" + String.valueOf(day), store, index );
+ if ( boost != null ) field.setBoost( boost );
+ document.add( field );
+ }
+}
Added: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,44 @@
+//$Id: $
+package org.hibernate.search.test.bridge;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.HashMap;
+
+import org.hibernate.search.bridge.StringBridge;
+import org.hibernate.search.bridge.ParameterizedBridge;
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.search.annotations.FieldBridge;
+import org.hibernate.annotations.Parameter;
+
+/**
+ * Padding Integer bridge.
+ * All numbers will be padded with 0 to match 5 digits
+ *
+ * @author Emmanuel Bernard
+ */
+public class PaddedIntegerBridge implements TwoWayStringBridge, ParameterizedBridge {
+
+ public static String PADDING_PROPERTY = "padding";
+
+ private int padding = 5; //default
+
+ public void setParameterValues(Map parameters) {
+ Object padding = parameters.get( PADDING_PROPERTY );
+ if (padding != null) this.padding = (Integer) padding;
+ }
+
+ public String objectToString(Object object) {
+ String rawInteger = ( (Integer) object ).toString();
+ if (rawInteger.length() > padding) throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+
+ public Object stringToObject(String stringValue) {
+ return new Integer(stringValue);
+ }
+}
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -5,6 +5,7 @@
import org.hibernate.search.test.TestCase;
import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.apache.lucene.queryParser.QueryParser;
@@ -24,10 +25,10 @@
s.clear();
- FullTextSession session = new FullTextSession(s);
+ FullTextSession session = Search.createFullTextSession(s);
tx = session.beginTransaction();
QueryParser p = new QueryParser("id", new StandardAnalyzer( ) );
- List result = session.createLuceneQuery( p.parse( "Abstract:Hibernate" ) ).list();
+ List result = session.createFullTextQuery( p.parse( "Abstract:Hibernate" ) ).list();
assertEquals( "Query by field", 1, result.size() );
s.delete( result.get( 0 ) );
tx.commit();
@@ -48,10 +49,10 @@
s.clear();
- FullTextSession session = new FullTextSession(s);
+ FullTextSession session = Search.createFullTextSession(s);
tx = session.beginTransaction();
QueryParser p = new QueryParser("id", new StandardAnalyzer( ) );
- List result = session.createLuceneQuery( p.parse( "title:Action OR Abstract:Action" ) ).list();
+ List result = session.createFullTextQuery( p.parse( "title:Action OR Abstract:Action" ) ).list();
assertEquals( "Query by field", 2, result.size() );
assertEquals( "@Boost fails", "Hibernate in Action", ( (Document) result.get( 0 ) ).getTitle() );
s.delete( result.get( 0 ) );
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -3,6 +3,7 @@
import org.hibernate.search.test.TestCase;
import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
import org.hibernate.Transaction;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.analysis.StopAnalyzer;
@@ -16,7 +17,7 @@
public class InheritanceTest extends TestCase {
public void testInheritance() throws Exception {
- FullTextSession s = new FullTextSession( openSession() );
+ FullTextSession s = Search.createFullTextSession( openSession() );
Transaction tx = s.beginTransaction();
Animal a = new Animal();
a.setName("Shark Jr");
@@ -34,19 +35,19 @@
org.hibernate.Query hibQuery;
query = parser.parse( "Elephant" );
- hibQuery = s.createLuceneQuery( query, Mammal.class );
+ hibQuery = s.createFullTextQuery( query, Mammal.class );
List result = hibQuery.list();
assertNotNull( result );
assertEquals( "Query subclass by superclass attribute", 1, result.size() );
query = parser.parse( "mammalNbr:[2 TO 2]" );
- hibQuery = s.createLuceneQuery( query, Animal.class, Mammal.class );
+ hibQuery = s.createFullTextQuery( query, Animal.class, Mammal.class );
result = hibQuery.list();
assertNotNull( result );
assertEquals( "Query subclass by subclass attribute", 1, result.size() );
query = parser.parse( "Jr" );
- hibQuery = s.createLuceneQuery( query, Animal.class );
+ hibQuery = s.createFullTextQuery( query, Animal.class );
result = hibQuery.list();
assertNotNull( result );
assertEquals( "Query filtering on superclass return mapped subclasses", 2, result.size() );
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -6,6 +6,7 @@
import org.hibernate.search.test.TestCase;
import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
import org.hibernate.Transaction;
import org.hibernate.Hibernate;
import org.hibernate.ScrollableResults;
@@ -20,7 +21,7 @@
public class LuceneQueryTest extends TestCase {
public void testList() throws Exception {
- FullTextSession s = new FullTextSession( openSession() );
+ FullTextSession s = Search.createFullTextSession( openSession() );
Transaction tx = s.beginTransaction();
Clock clock = new Clock(1, "Seiko");
s.save( clock );
@@ -36,19 +37,19 @@
QueryParser parser = new QueryParser("title", new StopAnalyzer() );
Query query = parser.parse( "summary:noword" );
- org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
List result = hibQuery.list();
assertNotNull( result );
assertEquals( 0, result.size() );
query = parser.parse( "summary:Festina Or brand:Seiko" );
- hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
result = hibQuery.list();
assertNotNull( result );
assertEquals( "Query with explicit class filter", 2, result.size() );
query = parser.parse( "summary:Festina Or brand:Seiko" );
- hibQuery = s.createLuceneQuery( query );
+ hibQuery = s.createFullTextQuery( query );
result = hibQuery.list();
assertNotNull( result );
assertEquals( "Query with no class filter", 2, result.size() );
@@ -62,7 +63,7 @@
}
public void testFirstMax() throws Exception {
- FullTextSession s = new FullTextSession( openSession() );
+ FullTextSession s = Search.createFullTextSession( openSession() );
Transaction tx = s.beginTransaction();
Clock clock = new Clock(1, "Seiko");
s.save( clock );
@@ -78,7 +79,7 @@
QueryParser parser = new QueryParser("title", new StopAnalyzer() );
Query query = parser.parse( "summary:Festina Or brand:Seiko" );
- org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
hibQuery.setFirstResult( 1 );
List result = hibQuery.list();
assertNotNull( result );
@@ -108,7 +109,7 @@
}
public void testIterator() throws Exception {
- FullTextSession s = new FullTextSession( openSession() );
+ FullTextSession s = Search.createFullTextSession( openSession() );
Transaction tx = s.beginTransaction();
Clock clock = new Clock(1, "Seiko");
s.save( clock );
@@ -124,13 +125,13 @@
QueryParser parser = new QueryParser("title", new StopAnalyzer() );
Query query = parser.parse( "summary:noword" );
- org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
Iterator result = hibQuery.iterate();
assertNotNull( result );
assertFalse( result.hasNext() );
query = parser.parse( "summary:Festina Or brand:Seiko" );
- hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
result = hibQuery.iterate();
assertNotNull( result );
int index = 0;
@@ -145,7 +146,7 @@
}
public void testScrollableResultSet() throws Exception {
- FullTextSession s = new FullTextSession( openSession() );
+ FullTextSession s = Search.createFullTextSession( openSession() );
Transaction tx = s.beginTransaction();
Clock clock = new Clock(1, "Seiko");
s.save( clock );
@@ -161,7 +162,7 @@
QueryParser parser = new QueryParser("title", new StopAnalyzer() );
Query query = parser.parse( "summary:noword" );
- org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
ScrollableResults result = hibQuery.scroll();
assertNotNull( result );
assertEquals(-1, result.getRowNumber() );
@@ -169,7 +170,7 @@
result.close();
query = parser.parse( "summary:Festina Or brand:Seiko" );
- hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
result = hibQuery.scroll();
assertEquals(0, result.getRowNumber() );
result.beforeFirst();
@@ -188,7 +189,7 @@
}
public void testMultipleEntityPerIndex() throws Exception {
- FullTextSession s = new FullTextSession( openSession() );
+ FullTextSession s = Search.createFullTextSession( openSession() );
Transaction tx = s.beginTransaction();
Clock clock = new Clock(1, "Seiko");
s.save( clock );
@@ -202,20 +203,20 @@
QueryParser parser = new QueryParser("title", new StopAnalyzer() );
Query query = parser.parse( "summary:Festina" );
- org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
List result = hibQuery.list();
assertNotNull( result );
assertEquals( "Query with explicit class filter", 1, result.size() );
query = parser.parse( "summary:Festina" );
- hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
Iterator it = hibQuery.iterate();
assertTrue( it.hasNext() );
assertNotNull( it.next() );
assertFalse( it.hasNext() );
query = parser.parse( "summary:Festina" );
- hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
ScrollableResults sr = hibQuery.scroll();
assertTrue( sr.first() );
assertNotNull( sr.get() );
@@ -223,14 +224,14 @@
sr.close();
query = parser.parse( "summary:Festina OR brand:seiko" );
- hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
hibQuery.setMaxResults( 2 );
result = hibQuery.list();
assertNotNull( result );
assertEquals( "Query with explicit class filter and limit", 2, result.size() );
query = parser.parse( "summary:Festina" );
- hibQuery = s.createLuceneQuery( query );
+ hibQuery = s.createFullTextQuery( query );
result = hibQuery.list();
assertNotNull( result );
assertEquals( "Query with no class filter", 2, result.size() );
Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java 2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java 2006-11-23 22:30:01 UTC (rev 10865)
@@ -4,7 +4,9 @@
import java.util.List;
import org.hibernate.search.test.TestCase;
+import org.hibernate.search.impl.FullTextSessionImpl;
import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
import org.hibernate.Transaction;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.analysis.StopAnalyzer;
@@ -15,7 +17,7 @@
public class MassIndexTest extends TestCase {
public void testTransactional() throws Exception {
- FullTextSession s = new FullTextSession( openSession() );
+ FullTextSession s = Search.createFullTextSession( openSession() );
Transaction tx = s.beginTransaction();
int loop = 4;
for (int i = 0 ; i < loop; i++) {
@@ -27,16 +29,16 @@
tx.commit();
s.close();
- s = new FullTextSession( openSession() );
+ s = new FullTextSessionImpl( openSession() );
s.getTransaction().begin();
s.connection().createStatement().executeUpdate( "update Email set body='Meet the guys who write the software'");
s.getTransaction().commit();
s.close();
- s = new FullTextSession( openSession() );
+ s = new FullTextSessionImpl( openSession() );
tx = s.beginTransaction();
QueryParser parser = new QueryParser("id", new StopAnalyzer() );
- List result = s.createLuceneQuery( parser.parse( "body:write" ) ).list();
+ List result = s.createFullTextQuery( parser.parse( "body:write" ) ).list();
assertEquals( 0, result.size() );
result = s.createCriteria( Email.class ).list();
for (int i = 0 ; i < loop/2 ; i++)
@@ -49,9 +51,9 @@
tx.commit(); //do the process
s.close();
- s = new FullTextSession( openSession() );
+ s = new FullTextSessionImpl( openSession() );
tx = s.beginTransaction();
- result = s.createLuceneQuery( parser.parse( "body:write" ) ).list();
+ result = s.createFullTextQuery( parser.parse( "body:write" ) ).list();
assertEquals( loop, result.size() );
for (Object o : result) s.delete( o );
tx.commit();
18 years, 1 month
Hibernate SVN: r10864 - trunk/Hibernate3/src/org/hibernate/impl
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2006-11-22 07:54:10 -0500 (Wed, 22 Nov 2006)
New Revision: 10864
Modified:
trunk/Hibernate3/src/org/hibernate/impl/SessionFactoryImpl.java
Log:
HHH-2259 : jta current session + auto-flush
Modified: trunk/Hibernate3/src/org/hibernate/impl/SessionFactoryImpl.java
===================================================================
--- trunk/Hibernate3/src/org/hibernate/impl/SessionFactoryImpl.java 2006-11-22 12:53:53 UTC (rev 10863)
+++ trunk/Hibernate3/src/org/hibernate/impl/SessionFactoryImpl.java 2006-11-22 12:54:10 UTC (rev 10864)
@@ -990,6 +990,9 @@
return null;
}
else if ( "jta".equals( impl ) ) {
+ if ( settings.getTransactionFactory().areCallbacksLocalToHibernateTransactions() ) {
+ log.warn( "JTASessionContext being used with JDBCTransactionFactory; auto-flush will not operate correctly with getCurrentSession()" );
+ }
return new JTASessionContext( this );
}
else if ( "thread".equals( impl ) ) {
18 years, 1 month