Beispiel #1
0
  /**
   * Extracts entries from the archive.
   *
   * @param ctx query context
   * @return text entries
   * @throws QueryException query exception
   */
  private TokenList extract(final QueryContext ctx) throws QueryException {
    final B64 archive = (B64) checkType(checkItem(expr[0], ctx), AtomType.B64);
    TokenSet hs = null;
    if (expr.length > 1) {
      // filter result to specified entries
      hs = new TokenSet();
      final Iter names = ctx.iter(expr[1]);
      for (Item en; (en = names.next()) != null; ) {
        hs.add(checkElmStr(en).string(info));
      }
    }

    final TokenList tl = new TokenList();
    final ArchiveIn in = ArchiveIn.get(archive.input(info), info);
    try {
      while (in.more()) {
        final ZipEntry ze = in.entry();
        if (ze.isDirectory()) continue;
        if (hs == null || hs.delete(token(ze.getName())) != 0) tl.add(in.read());
      }
    } catch (final IOException ex) {
      Util.debug(ex);
      ARCH_FAIL.thrw(info, ex);
    } finally {
      in.close();
    }
    return tl;
  }
Beispiel #2
0
 /**
  * Caches and returns all unique tokens specified in a query.
  *
  * @param list token list
  * @return token set
  */
 private TokenSet unique(final TokenList list) {
   // cache all query tokens in a set (duplicates are removed)
   final TokenSet ts = new TokenSet();
   switch (mode) {
     case ALL:
     case ANY:
       for (final byte[] t : list) ts.add(t);
       break;
     case ALL_WORDS:
     case ANY_WORD:
       final FTLexer l = new FTLexer(ftt.opt);
       for (final byte[] t : list) {
         l.init(t);
         while (l.hasNext()) ts.add(l.nextToken());
       }
       break;
     case PHRASE:
       final TokenBuilder tb = new TokenBuilder();
       for (final byte[] t : list) tb.add(t).add(' ');
       ts.add(tb.trim().finish());
   }
   return ts;
 }