Example #1
0
  /**
   * Update the index database for all of the projects
   *
   * @param executor An executor to run the job
   * @param listener where to signal the changes to the database
   * @throws IOException if an error occurs
   */
  static void updateAll(ExecutorService executor, IndexChangedListener listener)
      throws IOException {
    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    List<IndexDatabase> dbs = new ArrayList<>();

    if (env.hasProjects()) {
      for (Project project : env.getProjects()) {
        dbs.add(new IndexDatabase(project));
      }
    } else {
      dbs.add(new IndexDatabase());
    }

    for (IndexDatabase d : dbs) {
      final IndexDatabase db = d;
      if (listener != null) {
        db.addIndexChangedListener(listener);
      }

      executor.submit(
          new Runnable() {
            @Override
            public void run() {
              try {
                db.update();
              } catch (Throwable e) {
                LOGGER.log(Level.SEVERE, "Problem updating lucene index database: ", e);
              }
            }
          });
    }
  }
Example #2
0
  protected JFlexXref() {
    try {
      // TODO when bug #16053 is fixed, we should add a getter to a file
      // that's included from all the Xref classes so that we avoid the
      // reflection.
      Field f = getClass().getField("YYEOF");
      yyeof = f.getInt(null);
      userPageLink = RuntimeEnvironment.getInstance().getUserPage();
      if (userPageLink != null && userPageLink.length() == 0) {
        userPageLink = null;
      }
      userPageSuffix = RuntimeEnvironment.getInstance().getUserPageSuffix();
      if (userPageSuffix != null && userPageSuffix.length() == 0) {
        userPageSuffix = null;
      }
    } catch (NoSuchFieldException
        | SecurityException
        | IllegalArgumentException
        | IllegalAccessException e) {
      // The auto-generated constructors for the Xref classes don't
      // expect a checked exception, so wrap it in an AssertionError.
      // This should never happen, since all the Xref classes will get
      // a public static YYEOF field from JFlex.

      // NOPMD (stack trace is preserved by initCause(), but
      // PMD thinks it's lost)
      throw new AssertionError("Couldn't initialize yyeof", e);
    }
  }
Example #3
0
  /**
   * Optimize all index databases
   *
   * @param executor An executor to run the job
   * @throws IOException if an error occurs
   */
  static void optimizeAll(ExecutorService executor) throws IOException {
    List<IndexDatabase> dbs = new ArrayList<IndexDatabase>();
    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    if (env.hasProjects()) {
      for (Project project : env.getProjects()) {
        dbs.add(new IndexDatabase(project));
      }
    } else {
      dbs.add(new IndexDatabase());
    }

    for (IndexDatabase d : dbs) {
      final IndexDatabase db = d;
      if (db.isDirty()) {
        executor.submit(
            new Runnable() {
              @Override
              public void run() {
                try {
                  db.update();
                } catch (Throwable e) {
                  log.log(Level.SEVERE, "Problem updating lucene index database: ", e);
                }
              }
            });
      }
    }
  }
  @BeforeClass
  public static void setUpClass() throws Exception {
    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    assertTrue("No ctags available", env.validateExuberantCtags());

    repository = new TestRepository();
    repository.create(IndexDatabase.class.getResourceAsStream("source.zip"));

    env.setSourceRoot(repository.getSourceRoot());
    env.setDataRoot(repository.getDataRoot());

    Indexer indexer = Indexer.getInstance();
    indexer.prepareIndexer(
        env,
        true,
        true,
        "/c",
        null,
        false,
        false,
        false,
        null,
        null,
        new ArrayList<String>(),
        false);
    indexer.doIndexerExecution(true, 1, null, null);
  }
Example #5
0
  /**
   * Get a writer to which the xref can be written, or null if no xref should be produced for files
   * of this type.
   */
  private Writer getXrefWriter(FileAnalyzer fa, String path) throws IOException {
    Genre g = fa.getFactory().getGenre();
    if (xrefDir != null && (g == Genre.PLAIN || g == Genre.XREFABLE)) {
      File xrefFile = new File(xrefDir, path);
      // If mkdirs() returns false, the failure is most likely
      // because the file already exists. But to check for the
      // file first and only add it if it doesn't exists would
      // only increase the file IO...
      if (!xrefFile.getParentFile().mkdirs()) {
        assert xrefFile.getParentFile().exists();
      }

      RuntimeEnvironment env = RuntimeEnvironment.getInstance();

      boolean compressed = env.isCompressXref();
      File file = new File(xrefDir, path + (compressed ? ".gz" : ""));
      return new BufferedWriter(
          new OutputStreamWriter(
              compressed
                  ? new GZIPOutputStream(new FileOutputStream(file))
                  : new FileOutputStream(file)));
    }

    // no Xref for this analyzer
    return null;
  }
Example #6
0
  static void listFrequentTokens(List<String> subFiles) throws IOException {
    final int limit = 4;

    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    if (env.hasProjects()) {
      if (subFiles == null || subFiles.isEmpty()) {
        for (Project project : env.getProjects()) {
          IndexDatabase db = new IndexDatabase(project);
          db.listTokens(4);
        }
      } else {
        for (String path : subFiles) {
          Project project = Project.getProject(path);
          if (project == null) {
            log.log(Level.WARNING, "Warning: Could not find a project for \"{0}\"", path);
          } else {
            IndexDatabase db = new IndexDatabase(project);
            db.listTokens(4);
          }
        }
      }
    } else {
      IndexDatabase db = new IndexDatabase();
      db.listTokens(limit);
    }
  }
Example #7
0
  /**
   * Create a history log cache for all files in this repository. {@code getHistory()} is used to
   * fetch the history for the entire repository. If {@code hasHistoryForDirectories()} returns
   * {@code false}, this method is a no-op.
   *
   * @param cache the cache instance in which to store the history log
   * @param sinceRevision if non-null, incrementally update the cache with all revisions after the
   *     specified revision; otherwise, create the full history starting with the initial revision
   * @throws HistoryException on error
   */
  final void createCache(HistoryCache cache, String sinceRevision) throws HistoryException {
    if (!isWorking()) {
      return;
    }

    // If we don't have a directory parser, we can't create the cache
    // this way. Just give up and return.
    if (!hasHistoryForDirectories()) {
      Logger.getLogger(getClass().getName())
          .log(
              Level.INFO,
              "Skipping creation of history cache for {0}, since retrieval "
                  + "of history for directories is not implemented for this "
                  + "repository type.",
              getDirectoryName());
      return;
    }

    File directory = new File(getDirectoryName());

    History history;
    try {
      history = getHistory(directory, sinceRevision);
    } catch (HistoryException he) {
      if (sinceRevision == null) {
        // Failed to get full history, so fail.
        throw he;
      }
      // Failed to get partial history. This may have been caused
      // by changes in the revision numbers since the last update
      // (bug #14724) so we'll try to regenerate the cache from
      // scratch instead.
      OpenGrokLogger.getLogger()
          .log(
              Level.INFO,
              "Failed to get partial history. Attempting to "
                  + "recreate the history cache from scratch.",
              he);
      history = null;
    }

    if (sinceRevision != null && history == null) {
      // Failed to get partial history, now get full history instead.
      history = getHistory(directory);
      // Got full history successfully. Clear the history cache so that
      // we can recreate it from scratch.
      cache.clear(this);
    }

    // We need to refresh list of tags for incremental reindex.
    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    if (env.isTagsEnabled() && this.hasFileBasedTags()) {
      this.buildTagList(new File(this.directoryName));
    }

    if (history != null) {
      cache.store(history, this);
    }
  }
  /** Helper for {@link #get(File, Repository)}. */
  private History getHistory(File file, Repository repository, boolean withFiles)
      throws HistoryException, SQLException {
    final String filePath = getSourceRootRelativePath(file);
    final String reposPath = toUnixPath(repository.getDirectoryName());
    final ArrayList<HistoryEntry> entries = new ArrayList<HistoryEntry>();
    final ConnectionResource conn = connectionManager.getConnectionResource();
    try {
      final PreparedStatement ps;
      if (file.isDirectory()) {
        // Fetch history for all files under this directory.
        ps = conn.getStatement(GET_DIR_HISTORY);
        ps.setString(2, filePath);
      } else {
        // Fetch history for a single file only.
        ps = conn.getStatement(GET_FILE_HISTORY);
        ps.setString(2, getParentPath(filePath));
        ps.setString(3, getBaseName(filePath));
      }
      ps.setString(1, reposPath);

      final PreparedStatement filePS = withFiles ? conn.getStatement(GET_CS_FILES) : null;

      try (ResultSet rs = ps.executeQuery()) {
        while (rs.next()) {
          // Get the information about a changeset
          String revision = rs.getString(1);
          String author = rs.getString(2);
          Timestamp time = rs.getTimestamp(3);
          String message = rs.getString(4);
          HistoryEntry entry = new HistoryEntry(revision, time, author, null, message, true);
          entries.add(entry);

          // Fill the list of files touched by the changeset, if
          // requested.
          if (withFiles) {
            int changeset = rs.getInt(5);
            filePS.setInt(1, changeset);
            try (ResultSet fileRS = filePS.executeQuery()) {
              while (fileRS.next()) {
                entry.addFile(fileRS.getString(1));
              }
            }
          }
        }
      }
    } finally {
      connectionManager.releaseConnection(conn);
    }

    History history = new History();
    history.setHistoryEntries(entries);

    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    if (env.isTagsEnabled() && repository.hasFileBasedTags()) {
      repository.assignTagsInHistory(history);
    }

    return history;
  }
Example #9
0
 @Override
 History getHistory(File file, String sinceRevision) throws HistoryException {
   RuntimeEnvironment env = RuntimeEnvironment.getInstance();
   History result = new BazaarHistoryParser(this).parse(file, sinceRevision);
   // Assign tags to changesets they represent
   // We don't need to check if this repository supports tags, because we know it:-)
   if (env.isTagsEnabled()) {
     assignTagsInHistory(result);
   }
   return result;
 }
Example #10
0
 /**
  * Set the name of the external client command that should be used to access the repository wrt.
  * the given parameters. Does nothing, if this repository's <var>RepoCommand</var> has already
  * been set (i.e. has a non-{@code null} value).
  *
  * @param propertyKey property key to lookup the corresponding system property.
  * @param fallbackCommand the command to use, if lookup fails.
  * @return the command to use.
  * @see #RepoCommand
  */
 protected String ensureCommand(String propertyKey, String fallbackCommand) {
   if (RepoCommand != null) {
     return RepoCommand;
   }
   RepoCommand = RuntimeEnvironment.getInstance().getRepoCmd(this.getClass().getCanonicalName());
   if (RepoCommand == null) {
     RepoCommand = System.getProperty(propertyKey, fallbackCommand);
     RuntimeEnvironment.getInstance().setRepoCmd(this.getClass().getCanonicalName(), RepoCommand);
   }
   return RepoCommand;
 }
Example #11
0
  /**
   * Update the index database for a number of sub-directories
   *
   * @param executor An executor to run the job
   * @param listener where to signal the changes to the database
   * @param paths
   * @throws IOException if an error occurs
   */
  public static void update(
      ExecutorService executor, IndexChangedListener listener, List<String> paths)
      throws IOException {
    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    List<IndexDatabase> dbs = new ArrayList<IndexDatabase>();

    for (String path : paths) {
      Project project = Project.getProject(path);
      if (project == null && env.hasProjects()) {
        log.log(Level.WARNING, "Could not find a project for \"{0}\"", path);
      } else {
        IndexDatabase db;

        try {
          if (project == null) {
            db = new IndexDatabase();
          } else {
            db = new IndexDatabase(project);
          }

          int idx = dbs.indexOf(db);
          if (idx != -1) {
            db = dbs.get(idx);
          }

          if (db.addDirectory(path)) {
            if (idx == -1) {
              dbs.add(db);
            }
          } else {
            log.log(Level.WARNING, "Directory does not exist \"{0}\"", path);
          }
        } catch (IOException e) {
          log.log(Level.WARNING, "An error occured while updating index", e);
        }
      }

      for (final IndexDatabase db : dbs) {
        db.addIndexChangedListener(listener);
        executor.submit(
            new Runnable() {
              @Override
              public void run() {
                try {
                  db.update();
                } catch (Throwable e) {
                  log.log(Level.SEVERE, "An error occured while updating index", e);
                }
              }
            });
      }
    }
  }
Example #12
0
  @SuppressWarnings("PMD.CollapsibleIfStatements")
  private void initialize() throws IOException {
    synchronized (this) {
      RuntimeEnvironment env = RuntimeEnvironment.getInstance();
      File indexDir = new File(env.getDataRootFile(), INDEX_DIR);
      if (project != null) {
        indexDir = new File(indexDir, project.getPath());
      }

      if (!indexDir.exists() && !indexDir.mkdirs()) {
        // to avoid race conditions, just recheck..
        if (!indexDir.exists()) {
          throw new FileNotFoundException(
              "Failed to create root directory [" + indexDir.getAbsolutePath() + "]");
        }
      }

      if (!env.isUsingLuceneLocking()) {
        lockfact = NoLockFactory.INSTANCE;
      }
      indexDirectory = FSDirectory.open(indexDir.toPath(), lockfact);
      ignoredNames = env.getIgnoredNames();
      includedNames = env.getIncludedNames();
      analyzerGuru = new AnalyzerGuru();
      if (env.isGenerateHtml()) {
        xrefDir = new File(env.getDataRootFile(), "xref");
      }
      listeners = new ArrayList<>();
      dirtyFile = new File(indexDir, "dirty");
      dirty = dirtyFile.exists();
      directories = new ArrayList<>();
    }
  }
Example #13
0
  /**
   * Remove a stale file (uidIter.term().text()) from the index database (and the xref file)
   *
   * @throws java.io.IOException if an error occurs
   */
  private void removeFile() throws IOException {
    String path = Util.uid2url(uidIter.term().utf8ToString());

    for (IndexChangedListener listener : listeners) {
      listener.fileRemove(path);
    }
    writer.deleteDocuments(new Term(QueryBuilder.U, uidIter.term()));
    writer.prepareCommit();
    writer.commit();

    File xrefFile;
    if (RuntimeEnvironment.getInstance().isCompressXref()) {
      xrefFile = new File(xrefDir, path + ".gz");
    } else {
      xrefFile = new File(xrefDir, path);
    }
    File parent = xrefFile.getParentFile();

    if (!xrefFile.delete() && xrefFile.exists()) {
      log.log(Level.INFO, "Failed to remove obsolete xref-file: {0}", xrefFile.getAbsolutePath());
    }

    // Remove the parent directory if it's empty
    if (parent.delete()) {
      log.log(Level.FINE, "Removed empty xref dir:{0}", parent.getAbsolutePath());
    }
    setDirty();
    for (IndexChangedListener listener : listeners) {
      listener.fileRemoved(path);
    }
  }
Example #14
0
 /**
  * Prepare a search helper with all required information, ready to execute the query implied by
  * the related request parameters and cookies.
  *
  * <p>NOTE: One should check the {@link SearchHelper#errorMsg} as well as {@link
  * SearchHelper#redirect} and take the appropriate action before executing the prepared query or
  * continue processing.
  *
  * <p>This method stops populating fields as soon as an error occurs.
  *
  * @return a search helper.
  */
 public SearchHelper prepareSearch() {
   SearchHelper sh = new SearchHelper();
   sh.dataRoot = getDataRoot(); // throws Exception if none-existent
   List<SortOrder> sortOrders = getSortOrder();
   sh.order = sortOrders.isEmpty() ? SortOrder.RELEVANCY : sortOrders.get(0);
   if (getRequestedProjects().isEmpty() && getEnv().hasProjects()) {
     sh.errorMsg = "You must select a project!";
     return sh;
   }
   sh.builder = getQueryBuilder();
   if (sh.builder.getSize() == 0) {
     // Entry page show the map
     sh.redirect = req.getContextPath() + '/';
     return sh;
   }
   sh.start = getSearchStart();
   sh.maxItems = getSearchMaxItems();
   sh.contextPath = req.getContextPath();
   // jel: this should be IMHO a config param since not only core dependend
   sh.parallel = Runtime.getRuntime().availableProcessors() > 1;
   sh.isCrossRefSearch = getPrefix() == Prefix.SEARCH_R;
   sh.compressed = env.isCompressXref();
   sh.desc = getEftarReader();
   sh.sourceRoot = new File(getSourceRootPath());
   sh.lastEditedDisplayMode = isLastEditedDisplayMode();
   return sh;
 }
Example #15
0
 /**
  * Write an e-mail address. The address will be obfuscated if {@link
  * RuntimeEnvironment#isObfuscatingEMailAddresses()} returns {@code true}.
  *
  * @param address the address to write
  * @throws IOException if an error occurs while writing to the stream
  */
 protected void writeEMailAddress(String address) throws IOException {
   if (RuntimeEnvironment.getInstance().isObfuscatingEMailAddresses()) {
     out.write(address.replace("@", " (at) "));
   } else {
     out.write(address);
   }
 }
Example #16
0
 /**
  * Same as {@link #getRequestedProjects()}, but with a variable cookieName and parameter name.
  * This way it is trivial to implement a project filter ...
  *
  * @param paramName the name of the request parameter, which possibly contains the project list in
  *     question.
  * @param cookieName name of the cookie which possible contains project lists used as fallback
  * @return a possible empty set but never {@code null}.
  */
 protected SortedSet<String> getRequestedProjects(String paramName, String cookieName) {
   TreeSet<String> set = new TreeSet<>();
   List<Project> projects = getEnv().getProjects();
   if (projects == null) {
     return set;
   }
   if (projects.size() == 1 && authFramework.isAllowed(req, projects.get(0))) {
     set.add(projects.get(0).getDescription());
     return set;
   }
   List<String> vals = getParamVals(paramName);
   for (String s : vals) {
     Project x = Project.getByDescription(s);
     if (x != null && authFramework.isAllowed(req, x)) {
       set.add(s);
     }
   }
   if (set.isEmpty()) {
     List<String> cookies = getCookieVals(cookieName);
     for (String s : cookies) {
       Project x = Project.getByDescription(s);
       if (x != null && authFramework.isAllowed(req, x)) {
         set.add(s);
       }
     }
   }
   if (set.isEmpty()) {
     Project defaultProject = env.getDefaultProject();
     if (defaultProject != null && authFramework.isAllowed(req, defaultProject)) {
       set.add(defaultProject.getDescription());
     }
   }
   return set;
 }
Example #17
0
  /**
   * Check if I should accept this file into the index database
   *
   * @param file the file to check
   * @return true if the file should be included, false otherwise
   */
  private boolean accept(File file) {

    if (!includedNames.isEmpty()
        && // the filter should not affect directory names
        (!(file.isDirectory() || includedNames.match(file)))) {
      return false;
    }

    String absolutePath = file.getAbsolutePath();

    if (ignoredNames.ignore(file)) {
      LOGGER.log(Level.FINER, "ignoring {0}", absolutePath);
      return false;
    }

    if (!file.canRead()) {
      LOGGER.log(Level.WARNING, "Could not read {0}", absolutePath);
      return false;
    }

    try {
      String canonicalPath = file.getCanonicalPath();
      if (!absolutePath.equals(canonicalPath) && !acceptSymlink(absolutePath, canonicalPath)) {

        LOGGER.log(
            Level.FINE,
            "Skipped symlink ''{0}'' -> ''{1}''",
            new Object[] {absolutePath, canonicalPath});
        return false;
      }
      // below will only let go files and directories, anything else is considered special and is
      // not added
      if (!file.isFile() && !file.isDirectory()) {
        LOGGER.log(Level.WARNING, "Ignored special file {0}", absolutePath);
        return false;
      }
    } catch (IOException exp) {
      LOGGER.log(Level.WARNING, "Failed to resolve name: {0}", absolutePath);
      LOGGER.log(Level.FINE, "Stack Trace: ", exp);
    }

    if (file.isDirectory()) {
      // always accept directories so that their files can be examined
      return true;
    }

    if (HistoryGuru.getInstance().hasHistory(file)) {
      // versioned files should always be accepted
      return true;
    }

    // this is an unversioned file, check if it should be indexed
    return !RuntimeEnvironment.getInstance().isIndexVersionedFilesOnly();
  }
Example #18
0
  /**
   * Get the latest definitions for a file from the index.
   *
   * @param file the file whose definitions to find
   * @return definitions for the file, or {@code null} if they could not be found
   * @throws IOException if an error happens when accessing the index
   * @throws ParseException if an error happens when building the Lucene query
   * @throws ClassNotFoundException if the class for the stored definitions instance cannot be found
   */
  public static Definitions getDefinitions(File file)
      throws IOException, ParseException, ClassNotFoundException {
    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    String path = env.getPathRelativeToSourceRoot(file, 0);
    // sanitize windows path delimiters
    // in order not to conflict with Lucene escape character
    path = path.replace("\\", "/");

    IndexReader ireader = getIndexReader(path);

    if (ireader == null) {
      // No index, no definitions...
      return null;
    }

    try {
      Query q = new QueryBuilder().setPath(path).build();
      IndexSearcher searcher = new IndexSearcher(ireader);
      TopDocs top = searcher.search(q, 1);
      if (top.totalHits == 0) {
        // No hits, no definitions...
        return null;
      }
      Document doc = searcher.doc(top.scoreDocs[0].doc);
      String foundPath = doc.get(QueryBuilder.PATH);

      // Only use the definitions if we found an exact match.
      if (path.equals(foundPath)) {
        IndexableField tags = doc.getField(QueryBuilder.TAGS);
        if (tags != null) {
          return Definitions.deserialize(tags.binaryValue().bytes);
        }
      }
    } finally {
      ireader.close();
    }

    // Didn't find any definitions.
    return null;
  }
Example #19
0
  /**
   * Add a file to the Lucene index (and generate a xref file)
   *
   * @param file The file to add
   * @param path The path to the file (from source root)
   * @throws java.io.IOException if an error occurs
   */
  private void addFile(File file, String path) throws IOException {
    FileAnalyzer fa;
    try (InputStream in = new BufferedInputStream(new FileInputStream(file))) {
      fa = AnalyzerGuru.getAnalyzer(in, path);
    }

    for (IndexChangedListener listener : listeners) {
      listener.fileAdd(path, fa.getClass().getSimpleName());
    }
    fa.setCtags(ctags);
    fa.setProject(Project.getProject(path));
    fa.setScopesEnabled(RuntimeEnvironment.getInstance().isScopesEnabled());
    fa.setFoldingEnabled(RuntimeEnvironment.getInstance().isFoldingEnabled());

    Document doc = new Document();
    try (Writer xrefOut = getXrefWriter(fa, path)) {
      analyzerGuru.populateDocument(doc, file, path, fa, xrefOut);
    } catch (Exception e) {
      LOGGER.log(
          Level.INFO,
          "Skipped file ''{0}'' because the analyzer didn''t " + "understand it.",
          path);
      LOGGER.log(Level.FINE, "Exception from analyzer " + fa.getClass().getName(), e);
      cleanupResources(doc);
      return;
    }

    try {
      writer.addDocument(doc);
    } catch (Throwable t) {
      cleanupResources(doc);
      throw t;
    }

    setDirty();
    for (IndexChangedListener listener : listeners) {
      listener.fileAdded(path, fa.getClass().getSimpleName());
    }
  }
Example #20
0
  /**
   * Check if a file is local to the current project. If we don't have projects, check if the file
   * is in the source root.
   *
   * @param path the path to a file
   * @return true if the file is local to the current repository
   */
  private boolean isLocal(String path) {
    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    String srcRoot = env.getSourceRootPath();

    boolean local = false;

    if (path.startsWith(srcRoot)) {
      if (env.hasProjects()) {
        String relPath = path.substring(srcRoot.length());
        if (project.equals(Project.getProject(relPath))) {
          // File is under the current project, so it's local.
          local = true;
        }
      } else {
        // File is under source root, and we don't have projects, so
        // consider it local.
        local = true;
      }
    }

    return local;
  }
Example #21
0
  /**
   * Get an indexReader for the Index database where a given file
   *
   * @param path the file to get the database for
   * @return The index database where the file should be located or null if it cannot be located.
   */
  public static IndexReader getIndexReader(String path) {
    IndexReader ret = null;

    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    File indexDir = new File(env.getDataRootFile(), "index");

    if (env.hasProjects()) {
      Project p = Project.getProject(path);
      if (p == null) {
        return null;
      }
      indexDir = new File(indexDir, p.getPath());
    }
    try {
      FSDirectory fdir = FSDirectory.open(indexDir, NoLockFactory.getNoLockFactory());
      if (indexDir.exists() && DirectoryReader.indexExists(fdir)) {
        ret = DirectoryReader.open(fdir);
      }
    } catch (Exception ex) {
      log.log(Level.SEVERE, "Failed to open index: {0}", indexDir.getAbsolutePath());
      log.log(Level.FINE, "Stack Trace: ", ex);
    }
    return ret;
  }
Example #22
0
 /**
  * List all files in some of the index databases
  *
  * @param subFiles Subdirectories for the various projects to list the files for (or null or an
  *     empty list to dump all projects)
  * @throws IOException if an error occurs
  */
 public static void listAllFiles(List<String> subFiles) throws IOException {
   RuntimeEnvironment env = RuntimeEnvironment.getInstance();
   if (env.hasProjects()) {
     if (subFiles == null || subFiles.isEmpty()) {
       for (Project project : env.getProjects()) {
         IndexDatabase db = new IndexDatabase(project);
         db.listFiles();
       }
     } else {
       for (String path : subFiles) {
         Project project = Project.getProject(path);
         if (project == null) {
           LOGGER.log(Level.WARNING, "Could not find a project for \"{0}\"", path);
         } else {
           IndexDatabase db = new IndexDatabase(project);
           db.listFiles();
         }
       }
     }
   } else {
     IndexDatabase db = new IndexDatabase();
     db.listFiles();
   }
 }
Example #23
0
 /**
  * By default the indexer will traverse all directories in the project. If you add directories
  * with this function update will just process the specified directories.
  *
  * @param dir The directory to scan
  * @return <code>true</code> if the file is added, false otherwise
  */
 @SuppressWarnings("PMD.UseStringBufferForStringAppends")
 public boolean addDirectory(String dir) {
   String directory = dir;
   if (directory.startsWith("\\")) {
     directory = directory.replace('\\', '/');
   } else if (directory.charAt(0) != '/') {
     directory = "/" + directory;
   }
   File file = new File(RuntimeEnvironment.getInstance().getSourceRootFile(), directory);
   if (file.exists()) {
     directories.add(directory);
     return true;
   }
   return false;
 }
Example #24
0
  /**
   * Check if I should accept the path containing a symlink
   *
   * @param absolutePath the path with a symlink to check
   * @param canonicalPath the canonical path to the file
   * @return true if the file should be accepted, false otherwise
   */
  private boolean acceptSymlink(String absolutePath, String canonicalPath) throws IOException {
    // Always accept local symlinks
    if (isLocal(canonicalPath)) {
      return true;
    }

    for (String allowedSymlink : RuntimeEnvironment.getInstance().getAllowedSymlinks()) {
      if (absolutePath.startsWith(allowedSymlink)) {
        String allowedTarget = new File(allowedSymlink).getCanonicalPath();
        if (canonicalPath.startsWith(allowedTarget)
            && absolutePath
                .substring(allowedSymlink.length())
                .equals(canonicalPath.substring(allowedTarget.length()))) {
          return true;
        }
      }
    }
    return false;
  }
Example #25
0
 /**
  * Dump the configuration as an HTML table.
  *
  * @param out destination for the HTML output
  * @throws IOException if an error happens while writing to {@code out}
  * @throws HistoryException if the history guru cannot be accesses
  */
 @SuppressWarnings("boxing")
 public static void dumpConfiguration(Appendable out) throws IOException, HistoryException {
   out.append("<table border=\"1\" width=\"100%\">");
   out.append("<tr><th>Variable</th><th>Value</th></tr>");
   RuntimeEnvironment env = RuntimeEnvironment.getInstance();
   printTableRow(out, "Source root", env.getSourceRootPath());
   printTableRow(out, "Data root", env.getDataRootPath());
   printTableRow(out, "CTags", env.getCtags());
   printTableRow(out, "Bug page", env.getBugPage());
   printTableRow(out, "Bug pattern", env.getBugPattern());
   printTableRow(out, "User page", env.getUserPage());
   printTableRow(out, "User page suffix", env.getUserPageSuffix());
   printTableRow(out, "Review page", env.getReviewPage());
   printTableRow(out, "Review pattern", env.getReviewPattern());
   printTableRow(out, "Using projects", env.hasProjects());
   out.append("<tr><td>Ignored files</td><td>");
   printUnorderedList(out, env.getIgnoredNames().getItems());
   out.append("</td></tr>");
   printTableRow(out, "Index word limit", env.getIndexWordLimit());
   printTableRow(out, "Allow leading wildcard in search", env.isAllowLeadingWildcard());
   printTableRow(out, "History cache", HistoryGuru.getInstance().getCacheInfo());
   out.append("</table>");
 }
Example #26
0
 /**
  * Get the current runtime environment.
  *
  * @return the runtime env.
  * @see RuntimeEnvironment#getInstance()
  * @see RuntimeEnvironment#register()
  */
 public RuntimeEnvironment getEnv() {
   if (env == null) {
     env = RuntimeEnvironment.getInstance().register();
   }
   return env;
 }
  private void storeHistory(ConnectionResource conn, History history, Repository repository)
      throws SQLException {

    Integer reposId = null;
    Map<String, Integer> authors = null;
    Map<String, Integer> files = null;
    Map<String, Integer> directories = null;
    PreparedStatement addChangeset = null;
    PreparedStatement addDirchange = null;
    PreparedStatement addFilechange = null;
    PreparedStatement addFilemove = null;
    RuntimeEnvironment env = RuntimeEnvironment.getInstance();

    // return immediately when there is nothing to do
    List<HistoryEntry> entries = history.getHistoryEntries();
    if (entries.isEmpty()) {
      return;
    }

    for (int i = 0; ; i++) {
      try {
        if (reposId == null) {
          reposId = getRepositoryId(conn, repository);
          conn.commit();
        }

        if (authors == null) {
          authors = getAuthors(conn, history, reposId);
          conn.commit();
        }

        if (directories == null || files == null) {
          Map<String, Integer> dirs = new HashMap<String, Integer>();
          Map<String, Integer> fls = new HashMap<String, Integer>();
          getFilesAndDirectories(conn, history, reposId, dirs, fls);
          conn.commit();
          directories = dirs;
          files = fls;
        }

        if (addChangeset == null) {
          addChangeset = conn.getStatement(ADD_CHANGESET);
        }

        if (addDirchange == null) {
          addDirchange = conn.getStatement(ADD_DIRCHANGE);
        }

        if (addFilechange == null) {
          addFilechange = conn.getStatement(ADD_FILECHANGE);
        }

        if (addFilemove == null) {
          addFilemove = conn.getStatement(ADD_FILEMOVE);
        }

        // Success! Break out of the loop.
        break;

      } catch (SQLException sqle) {
        handleSQLException(sqle, i);
        conn.rollback();
      }
    }

    addChangeset.setInt(1, reposId);

    // getHistoryEntries() returns the entries in reverse chronological
    // order, but we want to insert them in chronological order so that
    // their auto-generated identity column can be used as a chronological
    // ordering column. Otherwise, incremental updates will make the
    // identity column unusable for chronological ordering. So therefore
    // we walk the list backwards.
    for (ListIterator<HistoryEntry> it = entries.listIterator(entries.size()); it.hasPrevious(); ) {
      HistoryEntry entry = it.previous();
      retry:
      for (int i = 0; ; i++) {
        try {
          addChangeset.setString(2, entry.getRevision());
          addChangeset.setInt(3, authors.get(entry.getAuthor()));
          addChangeset.setTimestamp(4, new Timestamp(entry.getDate().getTime()));
          String msg = entry.getMessage();
          // Truncate the message if it can't fit in a VARCHAR
          // (bug #11663).
          if (msg.length() > MAX_MESSAGE_LENGTH) {
            msg = truncate(msg, MAX_MESSAGE_LENGTH);
          }
          addChangeset.setString(5, msg);
          int changesetId = nextChangesetId.getAndIncrement();
          addChangeset.setInt(6, changesetId);
          addChangeset.executeUpdate();

          // Add one row for each file in FILECHANGES, and one row
          // for each path element of the directories in DIRCHANGES.
          Set<String> addedDirs = new HashSet<String>();
          addDirchange.setInt(1, changesetId);
          addFilechange.setInt(1, changesetId);
          for (String file : entry.getFiles()) {
            // ignore ignored files
            String repodir = "";
            try {
              repodir = env.getPathRelativeToSourceRoot(new File(repository.getDirectoryName()), 0);
            } catch (IOException ex) {
              Logger.getLogger(JDBCHistoryCache.class.getName()).log(Level.SEVERE, null, ex);
            }

            String fullPath = toUnixPath(file);
            if (!history.isIgnored(file.substring(repodir.length() + 1))) {
              int fileId = files.get(fullPath);
              addFilechange.setInt(2, fileId);
              addFilechange.executeUpdate();
            }
            String[] pathElts = splitPath(fullPath);
            for (int j = 0; j < pathElts.length; j++) {
              String dir = unsplitPath(pathElts, j);
              // Only add to DIRCHANGES if we haven't already
              // added this dir/changeset combination.
              if (!addedDirs.contains(dir)) {
                addDirchange.setInt(2, directories.get(dir));
                addDirchange.executeUpdate();
                addedDirs.add(dir);
              }
            }
          }

          conn.commit();

          // Successfully added the entry. Break out of retry loop.
          break retry;

        } catch (SQLException sqle) {
          handleSQLException(sqle, i);
          conn.rollback();
        }
      }
    }

    /*
     * Special handling for certain files - this is mainly for files which
     * have been renamed in Mercurial repository.
     * This ensures that their complete history (follow) will be saved.
     */
    for (String filename : history.getIgnoredFiles()) {
      String file_path = repository.getDirectoryName() + File.separatorChar + filename;
      File file = new File(file_path);
      String repo_path = file_path.substring(env.getSourceRootPath().length());
      History hist;
      try {
        hist = repository.getHistory(file);
      } catch (HistoryException ex) {
        Logger.getLogger(JDBCHistoryCache.class.getName()).log(Level.SEVERE, null, ex);
        continue;
      }

      int fileId = files.get(repo_path);
      for (HistoryEntry entry : hist.getHistoryEntries()) {
        retry:
        for (int i = 0; ; i++) {
          try {
            int changesetId = getIdForRevision(entry.getRevision());

            /*
             * If the file exists in the changeset, store it in
             * the table tracking moves of the file when it had
             * one of its precedent names so it can be found
             * when performing historyget on directory.
             */
            if (entry.getFiles().contains(repo_path)) {
              addFilechange.setInt(1, changesetId);
              addFilechange.setInt(2, fileId);
              addFilechange.executeUpdate();
            } else {
              addFilemove.setInt(1, changesetId);
              addFilemove.setInt(2, fileId);
              addFilemove.executeUpdate();
            }

            conn.commit();
            break retry;
          } catch (SQLException sqle) {
            handleSQLException(sqle, i);
            conn.rollback();
          }
        }
      }
    }
  }
Example #28
0
  /**
   * Update the content of this index database
   *
   * @throws IOException if an error occurs
   * @throws HistoryException if an error occurs when accessing the history
   */
  public void update() throws IOException, HistoryException {
    synchronized (lock) {
      if (running) {
        throw new IOException("Indexer already running!");
      }
      running = true;
      interrupted = false;
    }

    String ctgs = RuntimeEnvironment.getInstance().getCtags();
    if (ctgs != null) {
      ctags = new Ctags();
      ctags.setBinary(ctgs);
    }
    if (ctags == null) {
      log.severe("Unable to run ctags! searching definitions will not work!");
    }

    if (ctags != null) {
      String filename = RuntimeEnvironment.getInstance().getCTagsExtraOptionsFile();
      if (filename != null) {
        ctags.setCTagsExtraOptionsFile(filename);
      }
    }

    try {
      Analyzer analyzer = AnalyzerGuru.getAnalyzer();
      IndexWriterConfig iwc = new IndexWriterConfig(SearchEngine.LUCENE_VERSION, analyzer);
      iwc.setOpenMode(OpenMode.CREATE_OR_APPEND);
      // iwc.setRAMBufferSizeMB(256.0);  //TODO check what is the sweet spot
      writer = new IndexWriter(indexDirectory, iwc);
      writer.commit(); // to make sure index exists on the disk
      // writer.setMaxFieldLength(RuntimeEnvironment.getInstance().getIndexWordLimit());

      if (directories.isEmpty()) {
        if (project == null) {
          directories.add("");
        } else {
          directories.add(project.getPath());
        }
      }

      for (String dir : directories) {
        File sourceRoot;
        if ("".equals(dir)) {
          sourceRoot = RuntimeEnvironment.getInstance().getSourceRootFile();
        } else {
          sourceRoot = new File(RuntimeEnvironment.getInstance().getSourceRootFile(), dir);
        }

        HistoryGuru.getInstance().ensureHistoryCacheExists(sourceRoot);

        String startuid = Util.path2uid(dir, "");
        IndexReader reader = DirectoryReader.open(indexDirectory); // open existing index
        Terms terms = null;
        int numDocs = reader.numDocs();
        if (numDocs > 0) {
          Fields uFields = MultiFields.getFields(reader); // reader.getTermVectors(0);
          terms = uFields.terms(QueryBuilder.U);
        }

        try {
          if (numDocs > 0) {
            uidIter = terms.iterator(null);
            TermsEnum.SeekStatus stat = uidIter.seekCeil(new BytesRef(startuid), true); // init uid
            if (stat == TermsEnum.SeekStatus.END || stat == TermsEnum.SeekStatus.NOT_FOUND) {
              uidIter = null;
            }
          }
          // TODO below should be optional, since it traverses the tree once more to get total
          // count! :(
          int file_cnt = 0;
          if (RuntimeEnvironment.getInstance().isPrintProgress()) {
            log.log(Level.INFO, "Counting files in {0} ...", dir);
            file_cnt = indexDown(sourceRoot, dir, true, 0, 0);
            if (log.isLoggable(Level.INFO)) {
              log.log(
                  Level.INFO, "Need to process: {0} files for {1}", new Object[] {file_cnt, dir});
            }
          }

          indexDown(sourceRoot, dir, false, 0, file_cnt);

          while (uidIter != null
              && uidIter.term() != null
              && uidIter.term().utf8ToString().startsWith(startuid)) {
            removeFile();
            uidIter.next();
          }
        } finally {
          reader.close();
        }
      }
    } finally {
      if (writer != null) {
        try {
          writer.prepareCommit();
          writer.commit();
          writer.close();
        } catch (IOException e) {
          log.log(Level.WARNING, "An error occured while closing writer", e);
        }
      }

      if (ctags != null) {
        try {
          ctags.close();
        } catch (IOException e) {
          log.log(Level.WARNING, "An error occured while closing ctags process", e);
        }
      }

      synchronized (lock) {
        running = false;
      }
    }

    if (!isInterrupted() && isDirty()) {
      if (RuntimeEnvironment.getInstance().isOptimizeDatabase()) {
        optimize();
      }
      createSpellingSuggestions();
      RuntimeEnvironment env = RuntimeEnvironment.getInstance();
      File timestamp = new File(env.getDataRootFile(), "timestamp");
      if (timestamp.exists()) {
        if (!timestamp.setLastModified(System.currentTimeMillis())) {
          log.log(
              Level.WARNING,
              "Failed to set last modified time on ''{0}'', used for timestamping the index database.",
              timestamp.getAbsolutePath());
        }
      } else {
        if (!timestamp.createNewFile()) {
          log.log(
              Level.WARNING,
              "Failed to create file ''{0}'', used for timestamping the index database.",
              timestamp.getAbsolutePath());
        }
      }
    }
  }
Example #29
0
 /**
  * Lookup the file {@link #getPath()} relative to the crossfile directory of the opengrok data
  * directory. It is tried to find the compressed file first by appending the file extension ".gz"
  * to the filename. If that fails or an uncompressed version of the file is younger than its
  * compressed version, the uncompressed file gets used.
  *
  * @return {@code null} if not found, the file otherwise.
  */
 public File findDataFile() {
   return checkFile(
       new File(getEnv().getDataRootPath() + Prefix.XREF_P), path, env.isCompressXref());
 }
Example #30
0
  /**
   * Generate indexes recursively
   *
   * @param dir the root indexDirectory to generate indexes for
   * @param path the path
   * @param count_only if true will just traverse the source root and count files
   * @param cur_count current count during the traversal of the tree
   * @param est_total estimate total files to process
   */
  private int indexDown(File dir, String parent, boolean count_only, int cur_count, int est_total)
      throws IOException {
    int lcur_count = cur_count;
    if (isInterrupted()) {
      return lcur_count;
    }

    if (!accept(dir)) {
      return lcur_count;
    }

    File[] files = dir.listFiles();
    if (files == null) {
      log.log(Level.SEVERE, "Failed to get file listing for: {0}", dir.getAbsolutePath());
      return lcur_count;
    }
    Arrays.sort(
        files,
        new Comparator<File>() {
          @Override
          public int compare(File p1, File p2) {
            return p1.getName().compareTo(p2.getName());
          }
        });

    for (File file : files) {
      if (accept(dir, file)) {
        String path = parent + '/' + file.getName();

        if (file.isDirectory()) {
          lcur_count = indexDown(file, path, count_only, lcur_count, est_total);
        } else {
          lcur_count++;
          if (count_only) {
            continue;
          }

          if (RuntimeEnvironment.getInstance().isPrintProgress()
              && est_total > 0
              && log.isLoggable(Level.INFO)) {
            log.log(
                Level.INFO,
                "Progress: {0} ({1}%)",
                new Object[] {lcur_count, (lcur_count * 100.0f / est_total)});
          }

          if (uidIter != null) {
            String uid =
                Util.path2uid(
                    path,
                    DateTools.timeToString(
                        file.lastModified(),
                        DateTools.Resolution.MILLISECOND)); // construct uid for doc
            BytesRef buid = new BytesRef(uid);
            while (uidIter.term() != null
                && uidIter.term().compareTo(emptyBR) != 0
                && uidIter.term().compareTo(buid) < 0) {
              removeFile();
              uidIter.next();
            }

            if (uidIter.term() != null && uidIter.term().bytesEquals(buid)) {
              uidIter.next(); // keep matching docs
              continue;
            }
          }
          try {
            addFile(file, path);
          } catch (Exception e) {
            log.log(Level.WARNING, "Failed to add file " + file.getAbsolutePath(), e);
          }
        }
      }
    }

    return lcur_count;
  }