@Override protected void indexDelete(final int pre, final int size) { final boolean textI = meta.textindex, attrI = meta.attrindex; if (textI || attrI) { // collect all keys and ids indexBegin(); final int l = pre + size; for (int p = pre; p < l; ++p) { final int k = kind(p); // consider nodes which are attribute, text, comment, or proc. instruction final boolean text = k == TEXT || k == COMM || k == PI; if (textI && text || attrI && k == ATTR) { final byte[] key = text(p, text); if (key.length <= meta.maxlen) { final TokenObjMap<IntList> m = text ? txtBuffer : atvBuffer; IntList ids = m.get(key); if (ids == null) { ids = new IntList(1); m.put(key, ids); } ids.add(id(p)); } } } indexDelete(); } }
@Override protected long index(final int pre, final int id, final byte[] value, final int kind) { final DataAccess store; final TokenObjMap<IntList> map; if (kind == ATTR) { store = values; map = meta.attrindex ? atvBuffer : null; } else { store = texts; // don't index document names map = meta.textindex && kind != DOC ? txtBuffer : null; } // add text to map to index later if (meta.updindex && map != null && value.length <= meta.maxlen) { IntList ids = map.get(value); if (ids == null) { ids = new IntList(1); map.put(value, ids); } ids.add(id); } // add text to text file // inline integer value... final long v = toSimpleInt(value); if (v != Integer.MIN_VALUE) return v | IO.OFFNUM; // store text final long off = store.length(); final byte[] val = COMP.get().pack(value); store.writeToken(off, val); return val == value ? off : off | IO.OFFCOMP; }
/** * Returns a node for the specified term. * * @param term term * @return node */ private ThesNode node(final byte[] term) { ThesNode node = nodes.get(term); if (node == null) { node = new ThesNode(); node.term = term; nodes.put(term, node); } return node; }
/** * Returns a regular expression pattern. * * @param pattern input pattern * @param modifier modifier item * @param ctx query context * @return pattern modifier * @throws QueryException query exception */ private Pattern pattern(final Expr pattern, final Expr modifier, final QueryContext ctx) throws QueryException { final byte[] pat = checkStr(pattern, ctx); final byte[] mod = modifier != null ? checkStr(modifier, ctx) : null; final TokenBuilder tb = new TokenBuilder(pat); if (mod != null) tb.add(0).add(mod); final byte[] key = tb.finish(); Pattern p = patterns.get(key); if (p == null) { p = RegExParser.parse(pat, mod, ctx.sc.xquery3(), info); patterns.add(key, p); } return p; }
/** * Returns a value for the specified parameter or {@code null}. * * @param doc documentation * @param name parameter name * @return documentation of specified variable */ public static byte[] doc(final TokenObjMap<TokenList> doc, final byte[] name) { final TokenList params = doc != null ? doc.get(DOC_PARAM) : null; if (params != null) { for (final byte[] param : params) { final int vl = param.length; final int s = startsWith(param, '$') ? 1 : 0; for (int v = s; v < vl; v++) { if (!ws(param[v])) continue; if (!eq(substring(param, s, v), name)) break; return trim(substring(param, v + 1, vl)); } } } return null; }
/** * Updates an archive. * * @param ctx query context * @return updated archive * @throws QueryException query exception */ private B64 update(final QueryContext ctx) throws QueryException { final B64 archive = (B64) checkType(checkItem(expr[0], ctx), AtomType.B64); // entries to be updated final TokenObjMap<Item[]> hm = new TokenObjMap<Item[]>(); final Iter entr = ctx.iter(expr[1]); final Iter cont = ctx.iter(expr[2]); int e = 0; int c = 0; Item en, cn; while (true) { en = entr.next(); cn = cont.next(); if (en == null || cn == null) break; hm.add(checkElmStr(en).string(info), new Item[] {en, cn}); e++; c++; } // count remaining entries if (cn != null) do c++; while (cont.next() != null); if (en != null) do e++; while (entr.next() != null); if (e != c) ARCH_DIFF.thrw(info, e, c); final ArchiveIn in = ArchiveIn.get(archive.input(info), info); final ArchiveOut out = ArchiveOut.get(in.format(), info); try { if (in instanceof GZIPIn) ARCH_MODIFY.thrw(info, in.format().toUpperCase(Locale.ENGLISH)); // delete entries to be updated while (in.more()) if (!hm.contains(token(in.entry().getName()))) out.write(in); // add new and updated entries for (final byte[] h : hm) { if (h == null) continue; final Item[] it = hm.get(h); add(it[0], it[1], out, ZipEntry.DEFLATED); } } catch (final IOException ex) { Util.debug(ex); ARCH_FAIL.thrw(info, ex); } finally { in.close(); out.close(); } return new B64(out.toArray()); }
/** * Finds a thesaurus term. * * @param ii input info * @param list result list * @param token token * @throws QueryException query exception */ void find(final InputInfo ii, final TokenList list, final byte[] token) throws QueryException { if (nodes.isEmpty()) init(ii); find(list, nodes.get(token), 1); }
/** * Creates a comment sub element. * * @param tags map with tags * @param parent parent element */ final void comment(final TokenObjMap<TokenList> tags, final FElem parent) { for (final byte[] tag : tags) { for (final byte[] name : tags.get(tag)) add(name, elem(tag, parent)); } }