@Override public String toString() { final TokenBuilder tb = new TokenBuilder().add(var.toString()).add(' ').add(ASSIGN); tb.add(' ').add(expr.toString()); if (coll != null) tb.add(' ').add(COLLATION).add(" \"").add(coll.uri()).add('"'); return tb.toString(); }
@Override public String toString() { final int es = expr.length; final TokenBuilder tb = new TokenBuilder(expr[es - 1].toString()).add('('); for (int e = 0; e < es - 1; e++) { tb.add(expr[e].toString()); if (e < es - 2) tb.add(", "); } return tb.add(')').toString(); }
@Override public String toString() { final TokenBuilder tb = new TokenBuilder("map { "); boolean key = true; for (final Expr e : expr) { tb.add(key ? tb.size() > 6 ? ", " : "" : ":=").add(e.toString()); key ^= true; } return tb.add(" }").toString(); }
/** * Caches and returns all unique tokens specified in a query. * * @param list token list * @return token set */ private TokenSet unique(final TokenList list) { // cache all query tokens in a set (duplicates are removed) final TokenSet ts = new TokenSet(); switch (mode) { case ALL: case ANY: for (final byte[] t : list) ts.add(t); break; case ALL_WORDS: case ANY_WORD: final FTLexer l = new FTLexer(ftt.opt); for (final byte[] t : list) { l.init(t); while (l.hasNext()) ts.add(l.nextToken()); } break; case PHRASE: final TokenBuilder tb = new TokenBuilder(); for (final byte[] t : list) tb.add(t).add(' '); ts.add(tb.trim().finish()); } return ts; }