/**
  * Initializes full text search functionality for this database. This adds the following Java
  * functions to the database:
  *
  * <ul>
  *   <li>FTL_CREATE_INDEX(schemaNameString, tableNameString, columnListString)
  *   <li>FTL_SEARCH(queryString, limitInt, offsetInt): result set
  *   <li>FTL_REINDEX()
  *   <li>FTL_DROP_ALL()
  * </ul>
  *
  * It also adds a schema FTL to the database where bookkeeping information is stored. This
  * function may be called from a Java application, or by using the SQL statements:
  *
  * <pre>
  * CREATE ALIAS IF NOT EXISTS FTL_INIT FOR
  *      &quot;org.h2.fulltext.FullTextLucene2.init&quot;;
  * CALL FTL_INIT();
  * </pre>
  *
  * @param conn the connection
  */
 public static void init(Connection conn) throws SQLException {
   Statement stat = conn.createStatement();
   stat.execute("CREATE SCHEMA IF NOT EXISTS " + SCHEMA);
   stat.execute(
       "CREATE TABLE IF NOT EXISTS "
           + SCHEMA
           + ".INDEXES(SCHEMA VARCHAR, TABLE VARCHAR, COLUMNS VARCHAR, PRIMARY KEY(SCHEMA, TABLE))");
   stat.execute(
       "CREATE ALIAS IF NOT EXISTS FTL_CREATE_INDEX FOR \""
           + FullTextLucene2.class.getName()
           + ".createIndex\"");
   stat.execute(
       "CREATE ALIAS IF NOT EXISTS FTL_SEARCH FOR \""
           + FullTextLucene2.class.getName()
           + ".search\"");
   stat.execute(
       "CREATE ALIAS IF NOT EXISTS FTL_SEARCH_DATA FOR \""
           + FullTextLucene2.class.getName()
           + ".searchData\"");
   stat.execute(
       "CREATE ALIAS IF NOT EXISTS FTL_REINDEX FOR \""
           + FullTextLucene2.class.getName()
           + ".reindex\"");
   stat.execute(
       "CREATE ALIAS IF NOT EXISTS FTL_DROP_ALL FOR \""
           + FullTextLucene2.class.getName()
           + ".dropAll\"");
   try {
     getIndexAccess(conn);
   } catch (SQLException e) {
     throw convertException(e);
   }
 }
 /**
  * Re-creates the full text index for this database. Calling this method is usually not needed, as
  * the index is kept up-to-date automatically.
  *
  * @param conn the connection
  */
 public static void reindex(Connection conn) throws SQLException {
   init(conn);
   removeAllTriggers(conn, TRIGGER_PREFIX);
   removeIndexFiles(conn);
   Statement stat = conn.createStatement();
   ResultSet rs = stat.executeQuery("SELECT * FROM " + SCHEMA + ".INDEXES");
   while (rs.next()) {
     String schema = rs.getString("SCHEMA");
     String table = rs.getString("TABLE");
     createTrigger(conn, schema, table);
     indexExistingRows(conn, schema, table);
   }
 }
 /**
  * Create a new full text index for a table and column list. Each table may only have one index at
  * any time.
  *
  * @param conn the connection
  * @param schema the schema name of the table (case sensitive)
  * @param table the table name (case sensitive)
  * @param columnList the column list (null for all columns)
  */
 public static void createIndex(Connection conn, String schema, String table, String columnList)
     throws SQLException {
   init(conn);
   PreparedStatement prep =
       conn.prepareStatement(
           "INSERT INTO " + SCHEMA + ".INDEXES(SCHEMA, TABLE, COLUMNS) VALUES(?, ?, ?)");
   prep.setString(1, schema);
   prep.setString(2, table);
   prep.setString(3, columnList);
   prep.execute();
   createTrigger(conn, schema, table);
   indexExistingRows(conn, schema, table);
 }
 /**
  * Get the path of the Lucene index for this database.
  *
  * @param conn the database connection
  * @return the path
  */
 protected static String getIndexPath(Connection conn) throws SQLException {
   Statement stat = conn.createStatement();
   ResultSet rs = stat.executeQuery("CALL DATABASE_PATH()");
   rs.next();
   String path = rs.getString(1);
   if (path == null) {
     throw throwException("Fulltext search for in-memory databases is not supported.");
   }
   int index = path.lastIndexOf(':');
   // position 1 means a windows drive letter is used, ignore that
   if (index > 1) {
     path = path.substring(index + 1);
   }
   rs.close();
   return path;
 }
 /**
  * Add the existing data to the index.
  *
  * @param conn the database connection
  * @param schema the schema name
  * @param table the table name
  */
 protected static void indexExistingRows(Connection conn, String schema, String table)
     throws SQLException {
   FullTextLucene2.FullTextTrigger existing = new FullTextLucene2.FullTextTrigger();
   existing.init(conn, schema, null, table, false, Trigger.INSERT);
   String sql =
       "SELECT * FROM "
           + StringUtils.quoteIdentifier(schema)
           + "."
           + StringUtils.quoteIdentifier(table);
   ResultSet rs = conn.createStatement().executeQuery(sql);
   int columnCount = rs.getMetaData().getColumnCount();
   while (rs.next()) {
     Object[] row = new Object[columnCount];
     for (int i = 0; i < columnCount; i++) {
       row[i] = rs.getObject(i + 1);
     }
     // existing.fire(conn, null, row);
     existing.insert(row, false);
   }
   existing.commitIndex();
 }
 /**
  * Create the trigger.
  *
  * @param conn the database connection
  * @param schema the schema name
  * @param table the table name
  */
 protected static void createTrigger(Connection conn, String schema, String table)
     throws SQLException {
   Statement stat = conn.createStatement();
   String trigger =
       StringUtils.quoteIdentifier(schema)
           + "."
           + StringUtils.quoteIdentifier(TRIGGER_PREFIX + table);
   stat.execute("DROP TRIGGER IF EXISTS " + trigger);
   StringBuilder buff = new StringBuilder("CREATE TRIGGER IF NOT EXISTS ");
   // the trigger is also called on rollback because transaction rollback
   // will not undo the changes in the Lucene index
   buff.append(trigger)
       .append(" AFTER INSERT, UPDATE, DELETE, ROLLBACK ON ")
       .append(StringUtils.quoteIdentifier(schema))
       .append('.')
       .append(StringUtils.quoteIdentifier(table))
       .append(" FOR EACH ROW CALL \"")
       .append(FullTextLucene2.FullTextTrigger.class.getName())
       .append('\"');
   stat.execute(buff.toString());
 }
 /** INTERNAL */
 public void init(
     Connection conn,
     String schemaName,
     String triggerName,
     String tableName,
     boolean before,
     int type)
     throws SQLException {
   this.schema = schemaName;
   this.table = tableName;
   this.indexPath = getIndexPath(conn);
   this.indexAccess = getIndexAccess(conn);
   ArrayList<String> keyList = New.arrayList();
   DatabaseMetaData meta = conn.getMetaData();
   ResultSet rs =
       meta.getColumns(
           null, escapeMetaDataPattern(schemaName), escapeMetaDataPattern(tableName), null);
   ArrayList<String> columnList = New.arrayList();
   while (rs.next()) {
     columnList.add(rs.getString("COLUMN_NAME"));
   }
   columnTypes = new int[columnList.size()];
   columns = new String[columnList.size()];
   columnList.toArray(columns);
   rs =
       meta.getColumns(
           null, escapeMetaDataPattern(schemaName), escapeMetaDataPattern(tableName), null);
   for (int i = 0; rs.next(); i++) {
     columnTypes[i] = rs.getInt("DATA_TYPE");
   }
   if (keyList.size() == 0) {
     rs = meta.getPrimaryKeys(null, escapeMetaDataPattern(schemaName), tableName);
     while (rs.next()) {
       keyList.add(rs.getString("COLUMN_NAME"));
     }
   }
   if (keyList.size() == 0) {
     throw throwException("No primary key for table " + tableName);
   }
   ArrayList<String> indexList = New.arrayList();
   PreparedStatement prep =
       conn.prepareStatement(
           "SELECT COLUMNS FROM " + SCHEMA + ".INDEXES WHERE SCHEMA=? AND TABLE=?");
   prep.setString(1, schemaName);
   prep.setString(2, tableName);
   rs = prep.executeQuery();
   if (rs.next()) {
     String cols = rs.getString(1);
     if (cols != null) {
       for (String s : StringUtils.arraySplit(cols, ',', true)) {
         indexList.add(s);
       }
     }
   }
   if (indexList.size() == 0) {
     indexList.addAll(columnList);
   }
   keys = new int[keyList.size()];
   setColumns(keys, keyList, columnList);
   indexColumns = new int[indexList.size()];
   setColumns(indexColumns, indexList, columnList);
 }
 /**
  * Do the search.
  *
  * @param conn the database connection
  * @param text the query
  * @param limit the limit
  * @param offset the offset
  * @param data whether the raw data should be returned
  * @return the result set
  */
 protected static ResultSet search(
     Connection conn, String text, int limit, int offset, boolean data) throws SQLException {
   SimpleResultSet result = createResultSet(data);
   if (conn.getMetaData().getURL().startsWith("jdbc:columnlist:")) {
     // this is just to query the result set columns
     return result;
   }
   if (text == null || text.trim().length() == 0) {
     return result;
   }
   try {
     IndexAccess access = getIndexAccess(conn);
     /*## LUCENE2 ##
     access.modifier.flush();
     String path = getIndexPath(conn);
     IndexReader reader = IndexReader.open(path);
     Analyzer analyzer = new StandardAnalyzer();
     Searcher searcher = new IndexSearcher(reader);
     QueryParser parser = new QueryParser(LUCENE_FIELD_DATA, analyzer);
     Query query = parser.parse(text);
     Hits hits = searcher.search(query);
     int max = hits.length();
     if (limit == 0) {
         limit = max;
     }
     for (int i = 0; i < limit && i + offset < max; i++) {
         Document doc = hits.doc(i + offset);
         float score = hits.score(i + offset);
     //*/
     // ## LUCENE3 ##
     // take a reference as the searcher may change
     Searcher searcher = access.searcher;
     // reuse the same analyzer; it's thread-safe;
     // also allows subclasses to control the analyzer used.
     Analyzer analyzer = access.writer.getAnalyzer();
     QueryParser parser = new QueryParser(Version.LUCENE_30, LUCENE_FIELD_DATA, analyzer);
     Query query = parser.parse(text);
     // Lucene 3 insists on a hard limit and will not provide
     // a total hits value. Take at least 100 which is
     // an optimal limit for Lucene as any more
     // will trigger writing results to disk.
     int maxResults = (limit == 0 ? 100 : limit) + offset;
     TopDocs docs = searcher.search(query, maxResults);
     if (limit == 0) {
       limit = docs.totalHits;
     }
     for (int i = 0, len = docs.scoreDocs.length;
         i < limit && i + offset < docs.totalHits && i + offset < len;
         i++) {
       ScoreDoc sd = docs.scoreDocs[i + offset];
       Document doc = searcher.doc(sd.doc);
       float score = sd.score;
       // */
       String q = doc.get(LUCENE_FIELD_QUERY);
       if (data) {
         int idx = q.indexOf(" WHERE ");
         JdbcConnection c = (JdbcConnection) conn;
         Session session = (Session) c.getSession();
         Parser p = new Parser(session);
         String tab = q.substring(0, idx);
         ExpressionColumn expr = (ExpressionColumn) p.parseExpression(tab);
         String schemaName = expr.getOriginalTableAliasName();
         String tableName = expr.getColumnName();
         q = q.substring(idx + " WHERE ".length());
         Object[][] columnData = parseKey(conn, q);
         result.addRow(schemaName, tableName, columnData[0], columnData[1], score);
       } else {
         result.addRow(q, score);
       }
     }
     /*## LUCENE2 ##
     // TODO keep it open if possible
     reader.close();
     //*/
   } catch (Exception e) {
     throw convertException(e);
   }
   return result;
 }
 /**
  * Drops all full text indexes from the database.
  *
  * @param conn the connection
  */
 public static void dropAll(Connection conn) throws SQLException {
   Statement stat = conn.createStatement();
   stat.execute("DROP SCHEMA IF EXISTS " + SCHEMA);
   removeAllTriggers(conn, TRIGGER_PREFIX);
   removeIndexFiles(conn);
 }