@PostConstruct
 public void createOrVerifyIndex() throws Exception {
   LOGGER.info("Initializing Index..........................please Wait..0%");
   index = new File(appproperties.getLuceneIndexPath());
   suggest = new File(appproperties.getLiceneSuggestIndexPath());
   directory = FSDirectory.open(index, NoLockFactory.getNoLockFactory());
   suggestDirectory = FSDirectory.open(suggest, NoLockFactory.getNoLockFactory());
   iwc.setOpenMode(OpenMode.CREATE_OR_APPEND);
   writer = new IndexWriter(directory, iwc);
   writer.commit();
   indexReader = DirectoryReader.open(directory);
   indexSearcher = new IndexSearcher(indexReader, executorService);
   parser = new MultiFieldQueryParser(new String[] {TITLE_FIELD, CONTENTS_FIELD}, analyzer);
   suggester =
       new AnalyzingInfixSuggester(
           Version.LATEST,
           suggestDirectory,
           analyzer,
           analyzer,
           AnalyzingInfixSuggester.DEFAULT_MIN_PREFIX_CHARS);
   if (!suggest.exists() && !suggest.isDirectory()) {
     LOGGER.info(
         "Lucene Suggest did not exist.........................................Building Please wait.........0%");
     suggester.build(new IndexFileIterator(new ArrayList<IndexFile>().iterator()));
     suggester.refresh();
     LOGGER.info(
         "Lucene Suggest Build Complete...................................................................100%");
   }
   LOGGER.info("Lucene Ready............................................100%");
 }
  private static Directory injectLockFactory(Directory dir, String lockPath, String rawLockType)
      throws IOException {
    if (null == rawLockType) {
      // we default to "simple" for backwards compatibility
      log.warn("No lockType configured for " + dir + " assuming 'simple'");
      rawLockType = "simple";
    }
    final String lockType = rawLockType.toLowerCase(Locale.ROOT).trim();

    if ("simple".equals(lockType)) {
      // multiple SimpleFSLockFactory instances should be OK
      dir.setLockFactory(new SimpleFSLockFactory(lockPath));
    } else if ("native".equals(lockType)) {
      dir.setLockFactory(new NativeFSLockFactory(lockPath));
    } else if ("single".equals(lockType)) {
      if (!(dir.getLockFactory() instanceof SingleInstanceLockFactory))
        dir.setLockFactory(new SingleInstanceLockFactory());
    } else if ("hdfs".equals(lockType)) {
      Directory del = dir;

      if (dir instanceof NRTCachingDirectory) {
        del = ((NRTCachingDirectory) del).getDelegate();
      }

      if (del instanceof BlockDirectory) {
        del = ((BlockDirectory) del).getDirectory();
      }

      if (!(del instanceof HdfsDirectory)) {
        throw new SolrException(
            ErrorCode.FORBIDDEN,
            "Directory: "
                + del.getClass().getName()
                + ", but hdfs lock factory can only be used with HdfsDirectory");
      }

      dir.setLockFactory(
          new HdfsLockFactory(
              ((HdfsDirectory) del).getHdfsDirPath(), ((HdfsDirectory) del).getConfiguration()));
    } else if ("none".equals(lockType)) {
      // Recipe for disaster
      log.error("CONFIGURATION WARNING: locks are disabled on " + dir);
      dir.setLockFactory(NoLockFactory.getNoLockFactory());
    } else {
      throw new SolrException(
          SolrException.ErrorCode.SERVER_ERROR, "Unrecognized lockType: " + rawLockType);
    }
    return dir;
  }
  @Override
  public List<SearchTerm> search(String terms, List<String> strings, Integer threshold)
      throws Exception {
    IndexReader ir =
        DirectoryReader.open(
            new SimpleFSDirectory(
                new File(systemProperties.getProperty("crawler.dir.lucene")),
                NoLockFactory.getNoLockFactory()));

    List<SearchTerm> search =
        termExtractService.search(ir, cruiseAnalyzer, terms, strings, threshold, "crawler");
    ir.close();

    return search;
  }
示例#4
0
 public HdfsDirectory(Path hdfsDirPath, Configuration configuration) throws IOException {
   assert hdfsDirPath.toString().startsWith("hdfs:/") : hdfsDirPath.toString();
   setLockFactory(NoLockFactory.getNoLockFactory());
   this.hdfsDirPath = hdfsDirPath;
   this.configuration = configuration;
   fileSystem = FileSystem.newInstance(hdfsDirPath.toUri(), configuration);
   try {
     if (!fileSystem.exists(hdfsDirPath)) {
       fileSystem.mkdirs(hdfsDirPath);
     }
   } catch (Exception e) {
     org.apache.solr.util.IOUtils.closeQuietly(fileSystem);
     throw new RuntimeException("Problem creating directory: " + hdfsDirPath, e);
   }
 }
示例#5
0
  @SuppressWarnings("PMD.CollapsibleIfStatements")
  private void initialize() throws IOException {
    synchronized (this) {
      RuntimeEnvironment env = RuntimeEnvironment.getInstance();
      File indexDir = new File(env.getDataRootFile(), "index");
      File spellDir = new File(env.getDataRootFile(), "spellIndex");
      if (project != null) {
        indexDir = new File(indexDir, project.getPath());
        spellDir = new File(spellDir, project.getPath());
      }

      if (!indexDir.exists() && !indexDir.mkdirs()) {
        // to avoid race conditions, just recheck..
        if (!indexDir.exists()) {
          throw new FileNotFoundException(
              "Failed to create root directory [" + indexDir.getAbsolutePath() + "]");
        }
      }

      if (!spellDir.exists() && !spellDir.mkdirs()) {
        if (!spellDir.exists()) {
          throw new FileNotFoundException(
              "Failed to create root directory [" + spellDir.getAbsolutePath() + "]");
        }
      }

      if (!env.isUsingLuceneLocking()) {
        lockfact = NoLockFactory.getNoLockFactory();
      }
      indexDirectory = FSDirectory.open(indexDir, lockfact);
      spellDirectory = FSDirectory.open(spellDir, lockfact);
      ignoredNames = env.getIgnoredNames();
      includedNames = env.getIncludedNames();
      analyzerGuru = new AnalyzerGuru();
      if (env.isGenerateHtml()) {
        xrefDir = new File(env.getDataRootFile(), "xref");
      }
      listeners = new ArrayList<IndexChangedListener>();
      dirtyFile = new File(indexDir, "dirty");
      dirty = dirtyFile.exists();
      directories = new ArrayList<String>();
    }
  }
示例#6
0
  private IndexWriter initIndex(Random random, MockDirectoryWrapper dir, boolean initialCommit)
      throws IOException {
    dir.setLockFactory(NoLockFactory.getNoLockFactory());

    IndexWriter writer =
        new IndexWriter(
            dir,
            newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
                .setMaxBufferedDocs(10)
                .setMergeScheduler(new ConcurrentMergeScheduler()));
    ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
    if (initialCommit) {
      writer.commit();
    }

    Document doc = new Document();
    doc.add(newField("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
    doc.add(newField("id", "0", Field.Store.YES, Field.Index.ANALYZED));
    for (int i = 0; i < 157; i++) writer.addDocument(doc);

    return writer;
  }
示例#7
0
  /**
   * Get an indexReader for the Index database where a given file
   *
   * @param path the file to get the database for
   * @return The index database where the file should be located or null if it cannot be located.
   */
  public static IndexReader getIndexReader(String path) {
    IndexReader ret = null;

    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    File indexDir = new File(env.getDataRootFile(), "index");

    if (env.hasProjects()) {
      Project p = Project.getProject(path);
      if (p == null) {
        return null;
      }
      indexDir = new File(indexDir, p.getPath());
    }
    try {
      FSDirectory fdir = FSDirectory.open(indexDir, NoLockFactory.getNoLockFactory());
      if (indexDir.exists() && DirectoryReader.indexExists(fdir)) {
        ret = DirectoryReader.open(fdir);
      }
    } catch (Exception ex) {
      log.log(Level.SEVERE, "Failed to open index: {0}", indexDir.getAbsolutePath());
      log.log(Level.FINE, "Stack Trace: ", ex);
    }
    return ret;
  }
  public HdfsDirectory(Path hdfsDirPath, Configuration configuration) throws IOException {
    setLockFactory(NoLockFactory.getNoLockFactory());
    this.hdfsDirPath = hdfsDirPath;
    this.configuration = configuration;
    fileSystem = FileSystem.newInstance(hdfsDirPath.toUri(), configuration);

    while (true) {
      try {
        if (!fileSystem.exists(hdfsDirPath)) {
          boolean success = fileSystem.mkdirs(hdfsDirPath);
          if (!success) {
            throw new RuntimeException("Could not create directory: " + hdfsDirPath);
          }
        } else {
          fileSystem.mkdirs(hdfsDirPath); // check for safe mode
        }

        break;
      } catch (RemoteException e) {
        if (e.getClassName().equals("org.apache.hadoop.hdfs.server.namenode.SafeModeException")) {
          LOG.warn("The NameNode is in SafeMode - Solr will wait 5 seconds and try again.");
          try {
            Thread.sleep(5000);
          } catch (InterruptedException e1) {
            Thread.interrupted();
          }
          continue;
        }
        org.apache.solr.util.IOUtils.closeQuietly(fileSystem);
        throw new RuntimeException("Problem creating directory: " + hdfsDirPath, e);
      } catch (Exception e) {
        org.apache.solr.util.IOUtils.closeQuietly(fileSystem);
        throw new RuntimeException("Problem creating directory: " + hdfsDirPath, e);
      }
    }
  }