/** Generate a spelling suggestion for the definitions stored in defs */ public void createSpellingSuggestions() { IndexReader indexReader = null; SpellChecker checker; try { log.info("Generating spelling suggestion index ... "); indexReader = DirectoryReader.open(indexDirectory); checker = new SpellChecker(spellDirectory); // TODO below seems only to index "defs" , possible bug ? Analyzer analyzer = AnalyzerGuru.getAnalyzer(); IndexWriterConfig iwc = new IndexWriterConfig(SearchEngine.LUCENE_VERSION, analyzer); iwc.setOpenMode(OpenMode.CREATE_OR_APPEND); checker.indexDictionary(new LuceneDictionary(indexReader, QueryBuilder.DEFS), iwc, false); log.info("done"); } catch (IOException e) { log.log(Level.SEVERE, "ERROR: Generating spelling: {0}", e); } finally { if (indexReader != null) { try { indexReader.close(); } catch (IOException e) { log.log(Level.WARNING, "An error occured while closing reader", e); } } if (spellDirectory != null) { spellDirectory.close(); } } }
@Override public void stop(BundleContext context) throws Exception { super.stop(context); if (directory != null) directory.close(); INSTANCE = null; directory = null; logger.info("Stopping Lucene Index"); }
private RAMDirectory(FSDirectory dir, boolean closeDir, IOContext context) throws IOException { this(); for (String file : dir.listAll()) { if (!Files.isDirectory(dir.getDirectory().resolve(file))) { copyFrom(dir, file, file, context); } } if (closeDir) { dir.close(); } }
public void stop() { @SuppressWarnings("unused") int readCurrentState = current; // Another unneeded value, to ensure visibility of state protected by memory // barrier timer.cancel(); task.stop(); try { directory.close(); } catch (Exception e) { log.unableToCloseLuceneDirectory(directory.getDirectory(), e); } }
@PreDestroy private void exit() { timer.cancel(); try { logger.debug("Closing IndexWriter for directory lockid " + directory.getLockID()); iwriter.commit(); iwriter.close(); iwriter = null; logger.debug("IndexWriter closed for directory lockid " + directory.getLockID()); searcherManager.close(); logger.debug("SearcherManager closed for directory lockid " + directory.getLockID()); directory.close(); directory = null; logger.info("Directory closed"); } catch (Exception e) { StringWriter errors = new StringWriter(); e.printStackTrace(new PrintWriter(errors)); logger.fatal(errors.toString()); } }
/** * Index the fileset. * * @exception IOException if Lucene I/O exception TODO: refactor!!!!! */ private void indexDocs() throws IOException { Date start = new Date(); boolean create = overwrite; // If the index directory doesn't exist, // create it and force create mode if (indexDir.mkdirs() && !overwrite) { create = true; } FSDirectory dir = FSDirectory.open(indexDir); try { Searcher searcher = null; boolean checkLastModified = false; if (!create) { try { searcher = new IndexSearcher(dir, true); checkLastModified = true; } catch (IOException ioe) { log("IOException: " + ioe.getMessage()); // Empty - ignore, which indicates to index all // documents } } log("checkLastModified = " + checkLastModified, Project.MSG_VERBOSE); IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_CURRENT, analyzer) .setOpenMode(create ? OpenMode.CREATE : OpenMode.APPEND); LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy(); lmp.setUseCompoundFile(useCompoundIndex); lmp.setMergeFactor(mergeFactor); IndexWriter writer = new IndexWriter(dir, conf); int totalFiles = 0; int totalIndexed = 0; int totalIgnored = 0; try { for (int i = 0; i < rcs.size(); i++) { ResourceCollection rc = rcs.elementAt(i); if (rc.isFilesystemOnly()) { Iterator resources = rc.iterator(); while (resources.hasNext()) { Resource r = (Resource) resources.next(); if (!r.isExists() || !(r instanceof FileResource)) { continue; } totalFiles++; File file = ((FileResource) r).getFile(); if (!file.exists() || !file.canRead()) { throw new BuildException( "File \"" + file.getAbsolutePath() + "\" does not exist or is not readable."); } boolean indexIt = true; if (checkLastModified) { Term pathTerm = new Term("path", file.getPath()); TermQuery query = new TermQuery(pathTerm); ScoreDoc[] hits = searcher.search(query, null, 1).scoreDocs; // if document is found, compare the // indexed last modified time with the // current file // - don't index if up to date if (hits.length > 0) { Document doc = searcher.doc(hits[0].doc); String indexModified = doc.get("modified").trim(); if (indexModified != null) { long lastModified = 0; try { lastModified = DateTools.stringToTime(indexModified); } catch (ParseException e) { // if modified time is not parsable, skip } if (lastModified == file.lastModified()) { // TODO: remove existing document indexIt = false; } } } } if (indexIt) { try { log("Indexing " + file.getPath(), Project.MSG_VERBOSE); Document doc = handler.getDocument(file); if (doc == null) { totalIgnored++; } else { // Add the path of the file as a field named "path". Use a Keyword field, so // that the index stores the path, and so that the path is searchable doc.add( new Field( "path", file.getPath(), Field.Store.YES, Field.Index.NOT_ANALYZED)); // Add the last modified date of the file a field named "modified". Use a // Keyword field, so that it's searchable, but so that no attempt is made // to tokenize the field into words. doc.add( new Field( "modified", DateTools.timeToString( file.lastModified(), DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.NOT_ANALYZED)); writer.addDocument(doc); totalIndexed++; } } catch (DocumentHandlerException e) { throw new BuildException(e); } } } // for j } // if (fs != null) } // for i writer.optimize(); } // try finally { // always make sure everything gets closed, // no matter how we exit. writer.close(); if (searcher != null) { searcher.close(); } } Date end = new Date(); log( totalIndexed + " out of " + totalFiles + " indexed (" + totalIgnored + " ignored) in " + (end.getTime() - start.getTime()) + " milliseconds"); } finally { dir.close(); } }
public void convert(Application app, File dbhome) throws Exception { FSDirectory indexDir = FSDirectory.getDirectory(dbhome, false); if (indexDir instanceof TransFSDirectory) { FSDirectory.setDisableLocks(true); TransFSDirectory d = (TransFSDirectory) indexDir; TransSource source = app.getTransSource(); d.setDriverClass(source.getDriverClass()); d.setUrl(source.getUrl()); d.setUser(source.getUser()); d.setPassword(source.getPassword()); } File ndbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_tmp"); File olddbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_old"); FSDirectory nindexDir = FSDirectory.getDirectory(ndbhome, true); if (nindexDir instanceof TransFSDirectory) { FSDirectory.setDisableLocks(true); TransFSDirectory d = (TransFSDirectory) nindexDir; TransSource source = app.getTransSource(); d.setDriverClass(source.getDriverClass()); d.setUrl(source.getUrl()); d.setUser(source.getUser()); d.setPassword(source.getPassword()); } IndexSearcher searcher = null; IndexWriter writer = null; LuceneManager lmgr = null; try { searcher = new IndexSearcher(indexDir); PerFieldAnalyzerWrapper a = LuceneManager.buildAnalyzer(); writer = IndexWriterManager.getWriter(nindexDir, a, true); final int numDocs = searcher.getIndexReader().numDocs(); HashSet deldocs = new HashSet(); HashMap infos = new HashMap(); for (int i = 0; i < numDocs; i++) { Document doc = searcher.doc(i); String delprop = doc.get(DeletedInfos.DELETED); String layerStr = doc.get(LuceneManager.LAYER_OF_SAVE); int layer = -1; try { layer = Integer.parseInt(layerStr); } catch (Exception ex) { layer = -1; } final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR + doc.get(LuceneManager.LAYER_OF_SAVE); if (delprop != null && "true".equals(delprop) /* && layer == DbKey.LIVE_LAYER*/) { deldocs.add(id); } else { Object v; if ((v = infos.get(id)) == null) { infos.put(id, new Integer(i)); } else { final String lmod = doc.get(LuceneManager.LASTMODIFIED); final String lmod_prev = searcher.doc(((Integer) v).intValue()).get("_lastmodified"); if (lmod_prev == null || (lmod != null && lmod.compareTo(lmod_prev) > 0)) { infos.put(id, new Integer(i)); } } } } ArrayList listOfMaps = new ArrayList(); for (int i = 0; i < numDocs; i++) { Document doc = searcher.doc(i); String delprop = doc.get(DeletedInfos.DELETED); String layerStr = doc.get(LuceneManager.LAYER_OF_SAVE); int layer = -1; try { layer = Integer.parseInt(layerStr); } catch (Exception ex) { layer = -1; } final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR + doc.get(LuceneManager.LAYER_OF_SAVE); if (delprop != null && "true".equals(delprop)) { continue; } else if (id != null && deldocs.contains(id) /* && layer == DbKey.LIVE_LAYER*/) { continue; } Integer idx = (Integer) infos.get(id); if (idx != null && i != idx.intValue()) { continue; } Document ndoc = convertDocument(doc); if (this.recordNodes) { listOfMaps.add(LuceneManager.luceneDocumentToMap(doc)); } if (ndoc != null) { writer.addDocument(ndoc); } } if (this.recordNodes) { lmgr = new LuceneManager(this.app, false, true); this.allNodes = new HashMap(); final int size = listOfMaps.size(); for (int i = 0; i < size; i++) { HashMap m = (HashMap) listOfMaps.get(i); INode n = lmgr.mapToNode(m); this.allNodes.put(n.getID(), getPath(n)); n = null; } } } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } finally { if (searcher != null) { try { searcher.close(); } catch (Exception ex) { app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), ex); } } if (lmgr != null) { lmgr.shutdown(); lmgr = null; } indexDir.close(); SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(indexDir); sinfos.clear(); IndexObjectsFactory.removeDeletedInfos(indexDir); } Connection conn = null; boolean exceptionOccured = false; try { if (writer != null) { TransSource ts = app.getTransSource(); conn = ts.getConnection(); DatabaseMetaData dmd = conn.getMetaData(); ResultSet rs = dmd.getColumns(null, null, "Lucene", "version"); if (!rs.next()) { final String alterTbl = "ALTER TABLE Lucene ADD version INT NOT NULL DEFAULT 1"; PreparedStatement pstmt = null; try { pstmt = conn.prepareStatement(alterTbl); pstmt.execute(); } catch (SQLException sqle) { app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), sqle); } finally { if (pstmt != null) { pstmt.close(); pstmt = null; } } } rs.close(); rs = null; writer.close(); writer.flushCache(); // TODO:writer.writeSegmentsFile(); LuceneManager.commitSegments(conn, app, writer.getDirectory()); writer.finalizeTrans(); this.updateSQL(conn); } } catch (Exception ex) { ex.printStackTrace(); exceptionOccured = true; throw new RuntimeException(ex); } finally { if (conn != null) { try { if (!conn.getAutoCommit()) { if (!exceptionOccured) { conn.commit(); } else { conn.rollback(); } } conn.close(); } catch (Exception ex) { app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), ex); } conn = null; } nindexDir.close(); SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(nindexDir); sinfos.clear(); IndexObjectsFactory.removeDeletedInfos(nindexDir); } if (!dbhome.renameTo(olddbhome)) { throw new Exception("Could not move the old version of the db into " + olddbhome); } if (!ndbhome.renameTo(dbhome)) { throw new Exception("Could not move the newer version of the db into " + dbhome); } File oldBlobDir = new File(olddbhome, "blob"); File newBlobDir = new File(ndbhome, "blob"); oldBlobDir.renameTo(newBlobDir); if (!FileUtils.deleteDir(olddbhome)) { throw new Exception("Could not delete the old version of the db at " + olddbhome); } }