Example #1
0
  private void _initIndexWriter() {
    try {
      Analyzer analyzer =
          new LimitTokenCountAnalyzer(
              LuceneHelperUtil.getAnalyzer(), PropsValues.LUCENE_ANALYZER_MAX_TOKENS);

      IndexWriterConfig indexWriterConfig =
          new IndexWriterConfig(LuceneHelperUtil.getVersion(), analyzer);

      indexWriterConfig.setIndexDeletionPolicy(_dumpIndexDeletionPolicy);
      indexWriterConfig.setMergePolicy(_getMergePolicy());
      indexWriterConfig.setMergeScheduler(_getMergeScheduler());
      indexWriterConfig.setRAMBufferSizeMB(PropsValues.LUCENE_BUFFER_SIZE);

      _indexWriter = new IndexWriter(getLuceneDir(), indexWriterConfig);

      if (!IndexReader.indexExists(getLuceneDir())) {

        // Workaround for LUCENE-2386

        if (_log.isDebugEnabled()) {
          _log.debug("Creating missing index");
        }

        _indexWriter.commit();
      }
    } catch (Exception e) {
      _log.error("Initializing Lucene writer failed for " + _companyId, e);
    }
  }
 private IndexWriter createWriter(boolean create) throws IOException {
   try {
     final IndexWriterConfig iwc = new IndexWriterConfig(engineConfig.getAnalyzer());
     iwc.setCommitOnClose(false); // we by default don't commit on close
     iwc.setOpenMode(
         create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND);
     iwc.setIndexDeletionPolicy(deletionPolicy);
     // with tests.verbose, lucene sets this up: plumb to align with filesystem stream
     boolean verbose = false;
     try {
       verbose = Boolean.parseBoolean(System.getProperty("tests.verbose"));
     } catch (Throwable ignore) {
     }
     iwc.setInfoStream(verbose ? InfoStream.getDefault() : new LoggerInfoStream(logger));
     iwc.setMergeScheduler(mergeScheduler);
     MergePolicy mergePolicy = config().getMergePolicy();
     // Give us the opportunity to upgrade old segments while performing
     // background merges
     mergePolicy = new ElasticsearchMergePolicy(mergePolicy);
     iwc.setMergePolicy(mergePolicy);
     iwc.setSimilarity(engineConfig.getSimilarity());
     iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().mbFrac());
     iwc.setCodec(engineConfig.getCodec());
     iwc.setUseCompoundFile(
         true); // always use compound on flush - reduces # of file-handles on refresh
     return new IndexWriter(store.directory(), iwc);
   } catch (LockObtainFailedException ex) {
     logger.warn("could not lock IndexWriter", ex);
     throw ex;
   }
 }
Example #3
0
 public static IndexWriter openWriter(
     Directory directory, int maxMergeDocs, boolean useSerialMerger)
     throws CorruptIndexException, LockObtainFailedException, IOException {
   IndexWriterConfig indexWriterConfig = new IndexWriterConfig(LUCENE_VERSION, analyzer);
   if (useSerialMerger) {
     indexWriterConfig.setMergeScheduler(mergeScheduler);
   }
   LogMergePolicy mergePolicy = new LogByteSizeMergePolicy();
   mergePolicy.setMaxMergeDocs(maxMergeDocs);
   indexWriterConfig.setMergePolicy(mergePolicy);
   return new IndexWriter(directory, indexWriterConfig);
 }
Example #4
0
 /** 删除索引 */
 public void deleteIndex() {
   Directory fsDir;
   try {
     fsDir = FSDirectory.open(new File(PATH));
     NRTCachingDirectory cachedFSDir = new NRTCachingDirectory(fsDir, 5.0, 60.0);
     IndexWriterConfig conf =
         new IndexWriterConfig(Version.LUCENE_36, new StandardAnalyzer(Version.LUCENE_36));
     conf.setMergeScheduler(cachedFSDir.getMergeScheduler());
     IndexWriter writer = new IndexWriter(cachedFSDir, conf);
     writer.deleteAll();
   } catch (IOException e) {
     // TODO Auto-generated catch block
     e.printStackTrace();
   }
 }
Example #5
0
  public static IndexWriter getIndexWriter() {
    Directory fsDir;
    IndexWriter writer = null;
    try {
      fsDir = FSDirectory.open(new File(PATH));
      System.out.println("在目录:" + PATH + " 开始创建索引");
      NRTCachingDirectory cachedFSDir = new NRTCachingDirectory(fsDir, 5.0, 60.0);
      Analyzer analyzer = new IKAnalyzer();
      IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_36, analyzer);
      conf.setMergeScheduler(cachedFSDir.getMergeScheduler());
      writer = new IndexWriter(cachedFSDir, conf);
      return writer;

    } catch (Exception e) {

    }
    return writer;
  }
Example #6
0
  /** 创建索引 */
  private static void createIndex() throws IOException {

    Directory fsDir = FSDirectory.open(new File(PATH));
    NRTCachingDirectory cachedFSDir = new NRTCachingDirectory(fsDir, 5.0, 60.0);
    Analyzer analyzer = new IKAnalyzer();
    IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_36, analyzer);
    conf.setMergeScheduler(cachedFSDir.getMergeScheduler());
    IndexWriter writer = new IndexWriter(cachedFSDir, conf);
    writer.deleteAll();

    // 索引的数据源
    List<MapBean> ls = LuceneIndexOperator.createDataSource();
    int i = 0;
    for (MapBean mdata : ls) {
      // mdata.getClass().getDeclaredMethods();
      Document doc = new Document();
      doc.add(new Field("id", "" + mdata.getId(), Field.Store.YES, Field.Index.ANALYZED));
      doc.add(new Field("name", mdata.getName(), Field.Store.YES, Field.Index.ANALYZED));
      doc.add(new Field("address", mdata.getAddress(), Field.Store.YES, Field.Index.ANALYZED));
      doc.add(new Field("city", mdata.getCity(), Field.Store.YES, Field.Index.ANALYZED));
      doc.add(new Field("num", mdata.getNum(), Field.Store.YES, Field.Index.ANALYZED));
      doc.add(
          new Field(
              "eastNew",
              nullToBlank(mdata.getEastNew()),
              Field.Store.YES,
              Field.Index.NOT_ANALYZED));
      doc.add(
          new Field(
              "northNew",
              nullToBlank(mdata.getNorthNew()),
              Field.Store.YES,
              Field.Index.NOT_ANALYZED));
      doc.add(new Field("datatype", mdata.getDatatype(), Field.Store.YES, Field.Index.ANALYZED));
      doc.add(new Field("phone", mdata.getPhone(), Field.Store.YES, Field.Index.NOT_ANALYZED));
      doc.add(
          new Field(
              "geom",
              mdata.getEastNew() + "," + mdata.getNorthNew(),
              Field.Store.YES,
              Field.Index.ANALYZED));
      doc.add(
          new Field(
              "dataType", nullToBlank(mdata.getDatatype()), Field.Store.YES, Field.Index.ANALYZED));
      doc.add(
          new Field(
              "comType",
              nullToBlank(mdata.getDataTypeByComType().getDataTypeName()),
              Field.Store.YES,
              Field.Index.ANALYZED));
      doc.add(
          new Field(
              "dataTypeKey",
              nullToBlank(mdata.getDataTypeByDataType().getDataTypeKey()),
              Field.Store.YES,
              Field.Index.ANALYZED));
      doc.add(
          new Field(
              "comTypeKey",
              nullToBlank(mdata.getDataTypeByComType().getDataTypeKey()),
              Field.Store.YES,
              Field.Index.ANALYZED));
      //	ReflectFillTheBean(mdata,doc);
      //	System.out.println(nullToBlank(mdata.getDataTypeByDataType().getDataTypeKey()));
      i++;
      writer.addDocument(doc);
      if (i % 10000 == 0) {
        writer.commit();
        System.out.println("提交" + i);
      }
    }
    writer.optimize(true);
    writer.commit();
    writer.close();
  }