/** * Returns a formatted number. * * @param info input info * @param number number to be formatted * @param picture picture * @return string representation * @throws QueryException query exception */ public byte[] format(final InputInfo info, final ANum number, final byte[] picture) throws QueryException { // find pattern separator and sub-patterns final TokenList tl = new TokenList(); byte[] pic = picture; // "A picture-string consists either of a sub-picture, or of two sub-pictures separated by // a pattern-separator-sign" final int i = indexOf(pic, pattern); if (i == -1) { tl.add(pic); } else { tl.add(substring(pic, 0, i)); pic = substring(pic, i + cl(pic, i)); // "A picture-string must not contain more than one pattern-separator-sign" if (contains(pic, pattern)) throw PICNUM_X.get(info, picture); tl.add(pic); } final byte[][] patterns = tl.finish(); // check and analyze patterns if (!checkSyntax(patterns)) throw PICNUM_X.get(info, picture); final Picture[] pics = analyze(patterns); // return formatted string return format(number, pics, info); }
/** * Extracts entries from the archive. * * @param ctx query context * @return text entries * @throws QueryException query exception */ private TokenList extract(final QueryContext ctx) throws QueryException { final B64 archive = (B64) checkType(checkItem(expr[0], ctx), AtomType.B64); TokenSet hs = null; if (expr.length > 1) { // filter result to specified entries hs = new TokenSet(); final Iter names = ctx.iter(expr[1]); for (Item en; (en = names.next()) != null; ) { hs.add(checkElmStr(en).string(info)); } } final TokenList tl = new TokenList(); final ArchiveIn in = ArchiveIn.get(archive.input(info), info); try { while (in.more()) { final ZipEntry ze = in.entry(); if (ze.isDirectory()) continue; if (hs == null || hs.delete(token(ze.getName())) != 0) tl.add(in.read()); } } catch (final IOException ex) { Util.debug(ex); ARCH_FAIL.thrw(info, ex); } finally { in.close(); } return tl; }
/** * Returns all tokens of the query. * * @param qc query context * @return token list * @throws QueryException query exception */ private TokenList tokens(final QueryContext qc) throws QueryException { final TokenList tl = new TokenList(); final Iter ir = qc.iter(query); for (byte[] qu; (qu = nextToken(ir)) != null; ) { // skip empty tokens if not all results are needed if (qu.length != 0 || mode == FTMode.ALL || mode == FTMode.ALL_WORDS) tl.add(qu); } return tl; }
/** * Evaluates the tokenize function. * * @param ctx query context * @return function result * @throws QueryException query exception */ private Value tokenize(final QueryContext ctx) throws QueryException { final byte[] val = checkEStr(expr[0], ctx); final Pattern p = pattern(expr[1], expr.length == 3 ? expr[2] : null, ctx); if (p.matcher("").matches()) REGROUP.thrw(info); final TokenList tl = new TokenList(); final String str = string(val); if (!str.isEmpty()) { final Matcher m = p.matcher(str); int s = 0; while (m.find()) { tl.add(str.substring(s, m.start())); s = m.end(); } tl.add(str.substring(s, str.length())); } return StrSeq.get(tl); }