private void remove(Class entity, Serializable id) { log.trace("remove from Lucene index: " + entity + "#" + id); DocumentBuilder builder = workspace.getDocumentBuilder(entity); Term term = builder.getTerm(id); IndexReader reader = workspace.getIndexReader(entity); TermDocs termDocs = null; try { // TODO is there a faster way? // TODO include TermDocs into the workspace? termDocs = reader.termDocs(term); String entityName = entity.getName(); while (termDocs.next()) { int docIndex = termDocs.doc(); if (entityName.equals(reader.document(docIndex).get(DocumentBuilder.CLASS_FIELDNAME))) { // remove only the one of the right class // loop all to remove all the matches (defensive code) reader.deleteDocument(docIndex); } } } catch (Exception e) { throw new HibernateException("Unable to remove from Lucene index: " + entity + "#" + id, e); } finally { if (termDocs != null) try { termDocs.close(); } catch (IOException e) { log.warn("Unable to close termDocs properly", e); } } }
public void testCrashReaderDeletes() throws IOException { IndexWriter writer = initIndex(random, false); MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory(); writer.close(false); IndexReader reader = IndexReader.open(dir, false); reader.deleteDocument(3); dir.crash(); /* String[] l = dir.list(); Arrays.sort(l); for(int i=0;i<l.length;i++) System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes"); */ reader = IndexReader.open(dir, false); assertEquals(157, reader.numDocs()); reader.close(); dir.close(); }
public void testBinaryFieldInIndex() throws Exception { Fieldable binaryFldStored = new Field("binaryStored", binaryValStored.getBytes(), Field.Store.YES); Fieldable binaryFldCompressed = new Field("binaryCompressed", binaryValCompressed.getBytes(), Field.Store.COMPRESS); Fieldable stringFldStored = new Field( "stringStored", binaryValStored, Field.Store.YES, Field.Index.NO, Field.TermVector.NO); Fieldable stringFldCompressed = new Field( "stringCompressed", binaryValCompressed, Field.Store.COMPRESS, Field.Index.NO, Field.TermVector.NO); try { // binary fields with store off are not allowed new Field("fail", binaryValCompressed.getBytes(), Field.Store.NO); fail(); } catch (IllegalArgumentException iae) {; } Document doc = new Document(); doc.add(binaryFldStored); doc.add(binaryFldCompressed); doc.add(stringFldStored); doc.add(stringFldCompressed); /** test for field count */ assertEquals(4, doc.fields.size()); /** add the doc to a ram index */ RAMDirectory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); writer.addDocument(doc); writer.close(); /** open a reader and fetch the document */ IndexReader reader = IndexReader.open(dir); Document docFromReader = reader.document(0); assertTrue(docFromReader != null); /** fetch the binary stored field and compare it's content with the original one */ String binaryFldStoredTest = new String(docFromReader.getBinaryValue("binaryStored")); assertTrue(binaryFldStoredTest.equals(binaryValStored)); /** fetch the binary compressed field and compare it's content with the original one */ String binaryFldCompressedTest = new String(docFromReader.getBinaryValue("binaryCompressed")); assertTrue(binaryFldCompressedTest.equals(binaryValCompressed)); /** fetch the string field and compare it's content with the original one */ String stringFldStoredTest = docFromReader.get("stringStored"); assertTrue(stringFldStoredTest.equals(binaryValStored)); /** fetch the compressed string field and compare it's content with the original one */ String stringFldCompressedTest = docFromReader.get("stringCompressed"); assertTrue(stringFldCompressedTest.equals(binaryValCompressed)); /** delete the document from index */ reader.deleteDocument(0); assertEquals(0, reader.numDocs()); reader.close(); }
@Override protected void doDelete(int n) throws CorruptIndexException, IOException { if (_srcReader != null) _srcReader.deleteDocument(n); }
public void testDeletedDocs() throws IOException { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter( dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); Document doc = new Document(); doc.add( newField( "field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); for (int i = 0; i < 19; i++) { writer.addDocument(doc); } writer.optimize(); writer.close(); IndexReader reader = IndexReader.open(dir, false); reader.deleteDocument(5); reader.close(); ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); CheckIndex checker = new CheckIndex(dir); checker.setInfoStream(new PrintStream(bos)); if (VERBOSE) checker.setInfoStream(System.out); CheckIndex.Status indexStatus = checker.checkIndex(); if (indexStatus.clean == false) { System.out.println("CheckIndex failed"); System.out.println(bos.toString()); fail(); } final CheckIndex.Status.SegmentInfoStatus seg = indexStatus.segmentInfos.get(0); assertTrue(seg.openReaderPassed); assertNotNull(seg.diagnostics); assertNotNull(seg.fieldNormStatus); assertNull(seg.fieldNormStatus.error); assertEquals(1, seg.fieldNormStatus.totFields); assertNotNull(seg.termIndexStatus); assertNull(seg.termIndexStatus.error); assertEquals(1, seg.termIndexStatus.termCount); assertEquals(19, seg.termIndexStatus.totFreq); assertEquals(18, seg.termIndexStatus.totPos); assertNotNull(seg.storedFieldStatus); assertNull(seg.storedFieldStatus.error); assertEquals(18, seg.storedFieldStatus.docCount); assertEquals(18, seg.storedFieldStatus.totFields); assertNotNull(seg.termVectorStatus); assertNull(seg.termVectorStatus.error); assertEquals(18, seg.termVectorStatus.docCount); assertEquals(18, seg.termVectorStatus.totVectors); assertTrue(seg.diagnostics.size() > 0); final List<String> onlySegments = new ArrayList<String>(); onlySegments.add("_0"); assertTrue(checker.checkIndex(onlySegments).clean == true); dir.close(); }
/** @see LuceneIndexReader#deleteDocument(int) */ public void deleteDocument(int docNum) throws IOException { indexReader.deleteDocument(docNum); }