Ejemplo n.º 1
0
  @Override
  public void setDirectoryName(String directoryName) {
    super.setDirectoryName(directoryName);

    if (isWorking()) {
      // set to true if we manage to find the root directory
      Boolean rootFound = Boolean.FALSE;

      List<String> cmd = new ArrayList<String>();
      cmd.add(this.cmd);
      cmd.add("info");
      cmd.add("--xml");
      File directory = new File(getDirectoryName());

      Executor executor = new Executor(cmd, directory);
      if (executor.exec() == 0) {
        try {
          DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
          DocumentBuilder builder = factory.newDocumentBuilder();
          Document document = builder.parse(executor.getOutputStream());

          String url = getValue(document.getElementsByTagName("url").item(0));
          if (url == null) {
            OpenGrokLogger.getLogger()
                .warning(
                    "svn info did not contain an URL for ["
                        + directoryName
                        + "]. Assuming remote repository.");
            setRemote(true);
          } else {
            if (!url.startsWith("file")) {
              setRemote(true);
            }
          }
          String root = getValue(document.getElementsByTagName("root").item(0));
          if (url != null && root != null) {
            reposPath = url.substring(root.length());
            rootFound = Boolean.TRUE;
          }
        } catch (SAXException saxe) {
          OpenGrokLogger.getLogger().log(Level.WARNING, "Parser error parsing svn output", saxe);
        } catch (ParserConfigurationException pce) {
          OpenGrokLogger.getLogger()
              .log(Level.WARNING, "Parser configuration error parsing svn output", pce);
        } catch (IOException ioe) {
          OpenGrokLogger.getLogger()
              .log(Level.WARNING, "IOException reading from svn process", ioe);
        }
      } else {
        OpenGrokLogger.getLogger()
            .warning(
                "Failed to execute svn info for [" + directoryName + "]. Repository disabled.");
      }

      setWorking(rootFound);
    }
  }
Ejemplo n.º 2
0
  /** @param directory Directory where we list tags */
  @Override
  protected void buildTagList(File directory) {
    this.tagList = new TreeSet<TagEntry>();
    ArrayList<String> argv = new ArrayList<String>();
    ensureCommand(CMD_PROPERTY_KEY, CMD_FALLBACK);
    argv.add(cmd);
    argv.add("tags");
    ProcessBuilder pb = new ProcessBuilder(argv);
    pb.directory(directory);
    Process process = null;
    BufferedReader in = null;

    try {
      process = pb.start();
      in = new BufferedReader(new InputStreamReader(process.getInputStream()));
      String line;
      while ((line = in.readLine()) != null) {
        String parts[] = line.split("  *");
        if (parts.length < 2) {
          throw new HistoryException("Tag line contains more than 2 columns: " + line);
        }
        // Grrr, how to parse tags with spaces inside?
        // This solution will loose multiple spaces;-/
        String tag = parts[0];
        for (int i = 1; i < parts.length - 1; ++i) {
          tag += " " + parts[i];
        }
        TagEntry tagEntry = new BazaarTagEntry(Integer.parseInt(parts[parts.length - 1]), tag);
        // Bazaar lists multiple tags on more lines. We need to merge those into single TagEntry
        TagEntry higher = this.tagList.ceiling(tagEntry);
        if (higher != null && higher.equals(tagEntry)) {
          // Found in the tree, merge tags
          this.tagList.remove(higher);
          tagEntry.setTags(higher.getTags() + ", " + tag);
        }
        this.tagList.add(tagEntry);
      }
    } catch (IOException e) {
      OpenGrokLogger.getLogger().log(Level.WARNING, "Failed to read tag list: {0}", e.getMessage());
      this.tagList = null;
    } catch (HistoryException e) {
      OpenGrokLogger.getLogger()
          .log(Level.WARNING, "Failed to parse tag list: {0}", e.getMessage());
      this.tagList = null;
    }

    IOUtils.close(in);
    if (process != null) {
      try {
        process.exitValue();
      } catch (IllegalThreadStateException e) {
        // the process is still running??? just kill it..
        process.destroy();
      }
    }
  }
Ejemplo n.º 3
0
 /**
  * Silently dump a file to the given destionation. All {@link IOException}s gets caught and
  * logged, but not re-thrown.
  *
  * @param out dump destination
  * @param file file to dump.
  * @param compressed if {@code true} the denoted file is assumed to be gzipped.
  * @return {@code true} on success (everything read and written).
  * @throws NullPointerException if a parameter is {@code null}.
  */
 public static boolean dump(Writer out, File file, boolean compressed) {
   if (!file.exists()) {
     return false;
   }
   FileInputStream fis = null;
   GZIPInputStream gis = null;
   Reader in = null;
   try {
     if (compressed) {
       fis = new FileInputStream(file);
       gis = new GZIPInputStream(fis);
       in = new InputStreamReader(gis);
     } else {
       in = new FileReader(file);
     }
     dump(out, in);
     return true;
   } catch (IOException e) {
     OpenGrokLogger.getLogger()
         .log(Level.WARNING, "An error occured while piping file " + file + ": ", e);
   } finally {
     IOUtils.close(in);
     IOUtils.close(gis);
     IOUtils.close(fis);
   }
   return false;
 }
Ejemplo n.º 4
0
  /**
   * Annotate the specified file/revision.
   *
   * @param file file to annotate
   * @param revision revision to annotate
   * @return file annotation
   */
  @Override
  public Annotation annotate(File file, String revision) throws IOException {
    List<String> cmd = new ArrayList<String>();
    ensureCommand(CMD_PROPERTY_KEY, CMD_FALLBACK);
    cmd.add(this.cmd);
    cmd.add("blame");
    cmd.add("--all");
    cmd.add("--long");
    if (revision != null) {
      cmd.add("-r");
      cmd.add(revision);
    }
    cmd.add(file.getName());

    Executor exec = new Executor(cmd, file.getParentFile());
    int status = exec.exec();

    if (status != 0) {
      OpenGrokLogger.getLogger()
          .log(
              Level.WARNING,
              "Failed to get annotations for: \"{0}\" Exit code: {1}",
              new Object[] {file.getAbsolutePath(), String.valueOf(status)});
    }

    return parseAnnotation(exec.getOutputReader(), file.getName());
  }
Ejemplo n.º 5
0
  /**
   * Get the history after a specified revision.
   *
   * <p>The default implementation first fetches the full history and then throws away the oldest
   * revisions. This is not efficient, so subclasses should override it in order to get good
   * performance. Once every subclass has implemented a more efficient method, the default
   * implementation should be removed and made abstract.
   *
   * @param file the file to get the history for
   * @param sinceRevision the revision right before the first one to return, or {@code null} to
   *     return the full history
   * @return partial history for file
   * @throws HistoryException on error accessing the history
   */
  History getHistory(File file, String sinceRevision) throws HistoryException {

    // If we want an incremental history update and get here, warn that
    // it may be slow.
    if (sinceRevision != null) {
      Logger logger = OpenGrokLogger.getLogger();
      logger.log(
          Level.WARNING,
          "Incremental history retrieval is not implemented for {0}.",
          getClass().getSimpleName());
      logger.log(Level.WARNING, "Falling back to slower full history retrieval.");
    }

    History history = getHistory(file);

    if (sinceRevision == null) {
      return history;
    }

    List<HistoryEntry> partial = new ArrayList<>();
    for (HistoryEntry entry : history.getHistoryEntries()) {
      partial.add(entry);
      if (sinceRevision.equals(entry.getRevision())) {
        // Found revision right before the first one to return.
        break;
      }
    }

    removeAndVerifyOldestChangeset(partial, sinceRevision);
    history.setHistoryEntries(partial);
    return history;
  }
Ejemplo n.º 6
0
  /**
   * Create a history log cache for all files in this repository. {@code getHistory()} is used to
   * fetch the history for the entire repository. If {@code hasHistoryForDirectories()} returns
   * {@code false}, this method is a no-op.
   *
   * @param cache the cache instance in which to store the history log
   * @param sinceRevision if non-null, incrementally update the cache with all revisions after the
   *     specified revision; otherwise, create the full history starting with the initial revision
   * @throws HistoryException on error
   */
  final void createCache(HistoryCache cache, String sinceRevision) throws HistoryException {
    if (!isWorking()) {
      return;
    }

    // If we don't have a directory parser, we can't create the cache
    // this way. Just give up and return.
    if (!hasHistoryForDirectories()) {
      Logger.getLogger(getClass().getName())
          .log(
              Level.INFO,
              "Skipping creation of history cache for {0}, since retrieval "
                  + "of history for directories is not implemented for this "
                  + "repository type.",
              getDirectoryName());
      return;
    }

    File directory = new File(getDirectoryName());

    History history;
    try {
      history = getHistory(directory, sinceRevision);
    } catch (HistoryException he) {
      if (sinceRevision == null) {
        // Failed to get full history, so fail.
        throw he;
      }
      // Failed to get partial history. This may have been caused
      // by changes in the revision numbers since the last update
      // (bug #14724) so we'll try to regenerate the cache from
      // scratch instead.
      OpenGrokLogger.getLogger()
          .log(
              Level.INFO,
              "Failed to get partial history. Attempting to "
                  + "recreate the history cache from scratch.",
              he);
      history = null;
    }

    if (sinceRevision != null && history == null) {
      // Failed to get partial history, now get full history instead.
      history = getHistory(directory);
      // Got full history successfully. Clear the history cache so that
      // we can recreate it from scratch.
      cache.clear(this);
    }

    // We need to refresh list of tags for incremental reindex.
    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    if (env.isTagsEnabled() && this.hasFileBasedTags()) {
      this.buildTagList(new File(this.directoryName));
    }

    if (history != null) {
      cache.store(history, this);
    }
  }
Ejemplo n.º 7
0
  @Override
  public Annotation annotate(File file, String revision) throws IOException {
    SAXParserFactory factory = SAXParserFactory.newInstance();
    SAXParser saxParser = null;
    try {
      saxParser = factory.newSAXParser();
    } catch (Exception ex) {
      IOException err = new IOException("Failed to create SAX parser", ex);
      throw err;
    }

    ArrayList<String> argv = new ArrayList<String>();
    ensureCommand(CMD_PROPERTY_KEY, CMD_FALLBACK);
    argv.add(cmd);
    argv.add("annotate");
    argv.add("--trust-server-cert");
    argv.add("--non-interactive");
    argv.add("--xml");
    if (revision != null) {
      argv.add("-r");
      argv.add(revision);
    }
    argv.add(escapeFileName(file.getName()));
    ProcessBuilder pb = new ProcessBuilder(argv);
    pb.directory(file.getParentFile());
    Process process = null;
    BufferedInputStream in = null;
    Annotation ret = null;
    try {
      process = pb.start();
      in = new BufferedInputStream(process.getInputStream());

      AnnotateHandler handler = new AnnotateHandler(file.getName());
      try {
        saxParser.parse(in, handler);
        ret = handler.annotation;
      } catch (Exception e) {
        OpenGrokLogger.getLogger()
            .log(Level.SEVERE, "An error occurred while parsing the xml output", e);
      }
    } finally {
      IOUtils.close(in);
      if (process != null) {
        try {
          process.exitValue();
        } catch (IllegalThreadStateException e) {
          // the process is still running??? just kill it..
          process.destroy();
        }
      }
    }
    return ret;
  }
Ejemplo n.º 8
0
  /**
   * Handle an {@code SQLException}. If the exception indicates that the operation may succeed if
   * it's retried and the number of attempts hasn't exceeded the limit defined by {@link
   * #MAX_RETRIES}, ignore it and let the caller retry the operation. Otherwise, re-throw the
   * exception.
   *
   * @param sqle the exception to handle
   * @param attemptNo the attempt number, first attempt is 0
   * @throws SQLException if the operation shouldn't be retried
   */
  private static void handleSQLException(SQLException sqle, int attemptNo) throws SQLException {
    boolean isTransient = false;
    for (Throwable cause : sqle) {
      if (cause instanceof SQLTransientException) {
        isTransient = true;
        break;
      }
    }

    if (isTransient && attemptNo < MAX_RETRIES) {
      Logger logger = OpenGrokLogger.getLogger();
      logger.info("Transient database failure detected. Retrying.");
      logger.log(Level.FINE, "Transient database failure details:", sqle);
    } else {
      throw sqle;
    }
  }
Ejemplo n.º 9
0
  @Override
  public InputStream getHistoryGet(String parent, String basename, String rev) {
    InputStream ret = null;

    File directory = new File(directoryName);

    Process process = null;
    try {
      String filename =
          (new File(parent, basename)).getCanonicalPath().substring(directoryName.length() + 1);
      ensureCommand(CMD_PROPERTY_KEY, CMD_FALLBACK);
      String argv[] = {cmd, "cat", "-r", rev, filename};
      process = Runtime.getRuntime().exec(argv, null, directory);

      ByteArrayOutputStream out = new ByteArrayOutputStream();
      byte[] buffer = new byte[32 * 1024];
      InputStream in = process.getInputStream();
      int len;

      while ((len = in.read(buffer)) != -1) {
        if (len > 0) {
          out.write(buffer, 0, len);
        }
      }

      ret = new ByteArrayInputStream(out.toByteArray());
    } catch (Exception exp) {
      OpenGrokLogger.getLogger()
          .log(Level.SEVERE, "Failed to get history: " + exp.getClass().toString(), exp);
    } finally {
      // Clean up zombie-processes...
      if (process != null) {
        try {
          process.exitValue();
        } catch (IllegalThreadStateException exp) {
          // the process is still running??? just kill it..
          process.destroy();
        }
      }
    }

    return ret;
  }
Ejemplo n.º 10
0
 protected Annotation parseAnnotation(Reader input, String fileName) throws IOException {
   BufferedReader in = new BufferedReader(input);
   Annotation ret = new Annotation(fileName);
   String line = "";
   int lineno = 0;
   Matcher matcher = BLAME_PATTERN.matcher(line);
   while ((line = in.readLine()) != null) {
     ++lineno;
     matcher.reset(line);
     if (matcher.find()) {
       String rev = matcher.group(1);
       String author = matcher.group(2).trim();
       ret.addLine(rev, author, true);
     } else {
       OpenGrokLogger.getLogger()
           .log(
               Level.SEVERE,
               "Error: did not find annotation in line {0}: [{1}]",
               new Object[] {String.valueOf(lineno), line});
     }
   }
   return ret;
 }
Ejemplo n.º 11
0
  /**
   * Writes matching History log entries from 'in' to 'out' or to 'hits'
   *
   * @param in the history to fetch entries from
   * @param out to write matched context
   * @param path path to the file
   * @param hits list of hits
   * @param wcontext web context - beginning of url
   */
  private boolean getHistoryContext(
      History in, String path, Writer out, List<Hit> hits, String wcontext) {
    if ((out == null) == (hits == null)) {
      // There should be exactly one destination for the output. If
      // none or both are specified, it's a bug.
      throw new IllegalArgumentException("Exactly one of out and hits should be non-null");
    }

    if (m == null) {
      return false;
    }

    int matchedLines = 0;
    Iterator<HistoryEntry> it = in.getHistoryEntries().iterator();
    try {
      HistoryEntry he = null;
      HistoryEntry nhe = null;
      String nrev = null;
      while ((it.hasNext() || (nhe != null)) && matchedLines < 10) {
        if (nhe == null) {
          he = it.next();
        } else {
          he = nhe;
        } // nhe is the lookahead revision
        String line = he.getLine();
        String rev = he.getRevision();
        if (it.hasNext()) {
          nhe = it.next();
        } // this prefetch mechanism is here because of the diff link generation
        // we currently generate the diff to previous revision
        else {
          nhe = null;
        }
        if (nhe == null) {
          nrev = null;
        } else {
          nrev = nhe.getRevision();
        }
        tokens.reInit(line);
        String token;
        int matchState;
        int start = -1;
        while ((token = tokens.next()) != null) {
          for (int i = 0; i < m.length; i++) {
            matchState = m[i].match(token);
            if (matchState == LineMatcher.MATCHED) {
              if (start < 0) {
                start = tokens.getMatchStart();
              }
              int end = tokens.getMatchEnd();
              if (out == null) {
                StringBuilder sb = new StringBuilder();
                writeMatch(sb, line, start, end, true, path, wcontext, nrev, rev);
                hits.add(new Hit(path, sb.toString(), "", false, false));
              } else {
                writeMatch(out, line, start, end, false, path, wcontext, nrev, rev);
              }
              matchedLines++;
              break;
            } else if (matchState == LineMatcher.WAIT) {
              if (start < 0) {
                start = tokens.getMatchStart();
              }
            } else {
              start = -1;
            }
          }
        }
      }
    } catch (Exception e) {
      OpenGrokLogger.getLogger().log(Level.WARNING, "Could not get history context for " + path, e);
    }
    return matchedLines > 0;
  }
Ejemplo n.º 12
0
  /**
   * @param in File to be matched
   * @param out to write the context
   * @param morePrefix to link to more... page
   * @param path path of the file
   * @param tags format to highlight defs.
   * @param limit should the number of matching lines be limited?
   * @return Did it get any matching context?
   */
  public boolean getContext(
      Reader in,
      Writer out,
      String urlPrefix,
      String morePrefix,
      String path,
      Definitions tags,
      boolean limit,
      List<Hit> hits) {
    alt = !alt;
    if (m == null) {
      return false;
    }
    boolean anything = false;
    TreeMap<Integer, String[]> matchingTags = null;
    if (tags != null) {
      matchingTags = new TreeMap<Integer, String[]>();
      try {
        for (Definitions.Tag tag : tags.getTags()) {
          for (int i = 0; i < m.length; i++) {
            if (m[i].match(tag.symbol) == LineMatcher.MATCHED) {
              /*
               * desc[1] is line number
               * desc[2] is type
               * desc[3] is  matching line;
               */
              String[] desc = {
                tag.symbol, Integer.toString(tag.line), tag.type, tag.text,
              };
              if (in == null) {
                if (out == null) {
                  Hit hit =
                      new Hit(
                          path,
                          Util.htmlize(desc[3]).replaceAll(desc[0], "<em>" + desc[0] + "</em>"),
                          desc[1],
                          false,
                          alt);
                  hits.add(hit);
                  anything = true;
                } else {
                  out.write("<a class=\"s\" href=\"");
                  out.write(Util.URIEncodePath(urlPrefix));
                  out.write(Util.URIEncodePath(path));
                  out.write("#");
                  out.write(desc[1]);
                  out.write("\"><span class=\"l\">");
                  out.write(desc[1]);
                  out.write("</span> ");
                  out.write(Util.htmlize(desc[3]).replaceAll(desc[0], "<em>" + desc[0] + "</em>"));
                  out.write("</a> <i> ");
                  out.write(desc[2]);
                  out.write(" </i><br/>");
                  anything = true;
                }
              } else {
                matchingTags.put(tag.line, desc);
              }
              break;
            }
          }
        }
      } catch (IOException e) {
        if (hits != null) {
          // @todo verify why we ignore all exceptions?
          OpenGrokLogger.getLogger().log(Level.WARNING, "Could not get context for " + path, e);
        }
      }
    }
    /** Just to get the matching tag send a null in */
    if (in == null) {
      return anything;
    }
    int charsRead = 0;
    boolean truncated = false;

    boolean lim = limit;
    if (!RuntimeEnvironment.getInstance().isQuickContextScan()) {
      lim = false;
    }

    if (lim) {
      try {
        charsRead = in.read(buffer);
        if (charsRead == MAXFILEREAD) {
          // we probably only read parts of the file, so set the
          // truncated flag to enable the [all...] link that
          // requests all matches
          truncated = true;
          // truncate to last line read (don't look more than 100
          // characters back)
          for (int i = charsRead - 1; i > charsRead - 100; i--) {
            if (buffer[i] == '\n') {
              charsRead = i;
              break;
            }
          }
        }
      } catch (IOException e) {
        OpenGrokLogger.getLogger().log(Level.WARNING, "An error occured while reading data", e);
        return anything;
      }
      if (charsRead == 0) {
        return anything;
      }

      tokens.reInit(
          buffer, charsRead, out, Util.URIEncodePath(urlPrefix + path) + "#", matchingTags);
    } else {
      tokens.reInit(in, out, Util.URIEncodePath(urlPrefix + path) + "#", matchingTags);
    }

    if (hits != null) {
      tokens.setAlt(alt);
      tokens.setHitList(hits);
      tokens.setFilename(path);
    }

    try {
      String token;
      int matchState = LineMatcher.NOT_MATCHED;
      int matchedLines = 0;
      while ((token = tokens.yylex()) != null && (!lim || matchedLines < 10)) {
        for (int i = 0; i < m.length; i++) {
          matchState = m[i].match(token);
          if (matchState == LineMatcher.MATCHED) {
            tokens.printContext(urlPrefix);
            matchedLines++;
            // out.write("<br> <i>Matched " + token + " maxlines = " + matchedLines + "</i><br>");
            break;
          } else if (matchState == LineMatcher.WAIT) {
            tokens.holdOn();
          } else {
            tokens.neverMind();
          }
        }
      }
      anything = matchedLines > 0;
      tokens.dumpRest();
      if (lim && (truncated || matchedLines == 10) && out != null) {
        out.write(
            "&nbsp; &nbsp; [<a href=\""
                + Util.URIEncodePath(morePrefix + path)
                + "?"
                + queryAsURI
                + "\">all</a>...]");
      }
    } catch (IOException e) {
      OpenGrokLogger.getLogger().log(Level.WARNING, "Could not get context for " + path, e);
    } finally {
      if (in != null) {
        try {
          in.close();
        } catch (IOException e) {
          OpenGrokLogger.getLogger().log(Level.WARNING, "An error occured while closing stream", e);
        }
      }
      if (out != null) {
        try {
          out.flush();
        } catch (IOException e) {
          OpenGrokLogger.getLogger()
              .log(Level.WARNING, "An error occured while flushing stream", e);
        }
      }
    }
    return anything;
  }