@Override public Item item(final QueryContext qc, final InputInfo ii) throws QueryException { final Data data = checkData(qc); final String path = path(1, qc); final Item item = toItem(exprs[2], qc); final Options opts = toOptions(3, Q_OPTIONS, new Options(), qc); final Updates updates = qc.resources.updates(); final IntList docs = data.resources.docs(path); int d = 0; // delete binary resources final IOFile bin = data.meta.binary(path); if (bin == null || bin.isDir()) throw BXDB_REPLACE_X.get(info, path); if (item instanceof Bin) { updates.add(new DBStore(data, path, item, info), qc); } else { if (bin.exists()) updates.add(new DBDelete(data, path, info), qc); final NewInput input = checkInput(item, token(path)); if (docs.isEmpty() || docs.get(0) == 0) { // no replacement of first document (because of TableDiskAccess#insert, used > 0, pre = 0) updates.add(new DBAdd(data, input, opts, qc, info), qc); } else { updates.add(new ReplaceDoc(docs.get(0), data, input, opts, qc, info), qc); d = 1; } } // delete old documents final int ds = docs.size(); for (; d < ds; d++) updates.add(new DeleteNode(docs.get(d), data, info), qc); return null; }
/** * Constructor. * * @param pr pre values * @param ps positions */ private FTCache(final IntList pr, final IntList ps) { final int s = pr.size(); final double[] v = new double[s]; for (int i = 0; i < s; i++) v[i] = (long) pr.get(i) << 32 | ps.get(i); order = Array.createOrder(v, true); pre = pr; pos = ps; }
@Override public void execute(final GUI gui) { final DialogExport dialog = new DialogExport(gui); if (!dialog.ok()) return; final IOFile root = new IOFile(dialog.path()); // check if existing files will be overwritten if (root.exists()) { IO file = null; boolean overwrite = false; final Data d = gui.context.data(); final IntList il = d.resources.docs(); final int is = il.size(); for (int i = 0; i < is; i++) { file = root.merge(Token.string(d.text(il.get(i), true))); if (file.exists()) { if (overwrite) { // more than one file will be overwritten; check remaining tests file = null; break; } overwrite = true; } } if (overwrite) { // show message for overwriting files or directories final String msg = file == null ? FILES_REPLACE_X : FILE_EXISTS_X; if (file == null) file = root; if (!BaseXDialog.confirm(gui, Util.info(msg, file))) return; } } DialogProgress.execute(gui, new Export(root.path())); }
@Override public IndexIterator iter(final IndexToken token) { final int id = values.id(token.get()); if (id == 0) return IndexIterator.EMPTY; final int len = lenList.get(id); final int[] ids = idsList.get(id), pres; if (data.meta.updindex) { final IntList tmp = new IntList(); for (int i = 0; i < len; ++i) tmp.add(data.pre(ids[i])); pres = tmp.sort().finish(); } else { pres = ids; } return new IndexIterator() { int p; @Override public boolean more() { return p < len; } @Override public int pre() { return pres[p++]; } @Override public int size() { return len; } }; }
/** Finishes the index creation. */ void finish() { if (reorder == null) return; for (int i = 1; i < reorder.size(); i++) { if (reorder.get(i)) Arrays.sort(idsList.get(i), 0, lenList.get(i)); } reorder = null; }
@Override public int size() { // returns the actual number of indexed entries int s = 0; for (int c = 1; c < s; c++) if (lenList.get(c) > 0) s++; return s; }
/** * Algorithm of Tarjan for computing the strongly connected components of a graph. * * @param v current node * @throws QueryException if a variable directly calls itself */ private void tarjan(final int v) throws QueryException { final int ixv = 2 * v, llv = ixv + 1, idx = next++; while (list.size() <= llv) list.add(-1); list.set(ixv, idx); list.set(llv, idx); stack.push(v); for (final int w : adjacentTo(v)) { final int ixw = 2 * w, llw = ixw + 1; if (list.size() <= ixw || list.get(ixw) < 0) { // Successor w has not yet been visited; recurse on it tarjan(w); list.set(llv, Math.min(list.get(llv), list.get(llw))); } else if (stack.contains(w)) { // Successor w is in stack S and hence in the current SCC list.set(llv, Math.min(list.get(llv), list.get(ixw))); } } // If v is a root node, pop the stack and generate an SCC if (list.get(llv) == list.get(ixv)) { int w; Scope[] out = null; do { w = stack.pop(); final Scope scp = scopes.get(w); out = out == null ? new Scope[] {scp} : Array.add(out, scp); } while (w != v); result.add(out); } }
/** * Removes values from the index. * * @param key key * @param vals sorted values */ void delete(final byte[] key, final int... vals) { final int id = values.id(key), vl = vals.length, l = lenList.get(id), s = l - vl; final int[] ids = idsList.get(id); for (int i = 0, n = 0, v = 0; i < l; i++) { if (v == vl || ids[i] != vals[v]) ids[n++] = ids[i]; else v++; } lenList.set(id, s); if (s == 0) idsList.set(id, null); }
@Override public byte[] info(final MainOptions options) { final TokenBuilder tb = new TokenBuilder(); tb.add(LI_STRUCTURE).add(HASH).add(NL); tb.add(LI_NAMES).add(data.meta.names(type)).add(NL); final IndexStats stats = new IndexStats(options.get(MainOptions.MAXSTAT)); final int s = values.size(); for (int p = 1; p <= s; p++) { final int oc = lenList.get(p); if (oc > 0 && stats.adding(oc)) stats.add(values.key(p), oc); } stats.print(tb); return tb.finish(); }
/** * Lists resources of the specified database. * * @return success flag * @throws IOException I/O exception */ private boolean listDB() throws IOException { final String db = args[0]; final String path = args[1] != null ? args[1] : ""; if (!Databases.validName(db)) return error(NAME_INVALID_X, db); final Table table = new Table(); table.description = RESOURCES; table.header.add(INPUT_PATH); table.header.add(TYPE); table.header.add(MimeTypes.CONTENT_TYPE); table.header.add(SIZE); try { // add xml documents final Data data = Open.open(db, context); final Resources res = data.resources; final IntList il = res.docs(path); final int ds = il.size(); for (int i = 0; i < ds; i++) { final int pre = il.get(i); final TokenList tl = new TokenList(3); final byte[] file = data.text(pre, true); tl.add(file); tl.add(DataText.M_XML); tl.add(MimeTypes.APP_XML); tl.add(data.size(pre, Data.DOC)); table.contents.add(tl); } // add binary resources for (final byte[] file : res.binaries(path)) { final String f = string(file); final TokenList tl = new TokenList(3); tl.add(file); tl.add(DataText.M_RAW); tl.add(MimeTypes.get(f)); tl.add(data.meta.binary(f).length()); table.contents.add(tl); } Close.close(data, context); } catch (final IOException ex) { return error(Util.message(ex)); } out.println(table.sort().finish()); return true; }
/** * Adds values to the index. * * @param key key to be indexed * @param vals sorted values */ void add(final byte[] key, final int... vals) { // token index: add values. otherwise, reference existing values final int id = type == IndexType.TOKEN ? values.put(key) : values.id(key), vl = vals.length; // updatable index: if required, resize existing arrays while (idsList.size() < id + 1) idsList.add(null); if (lenList.size() < id + 1) lenList.set(id, 0); final int len = lenList.get(id), size = len + vl; int[] ids = idsList.get(id); if (ids == null) { ids = vals; } else { if (ids.length < size) ids = Arrays.copyOf(ids, Array.newSize(size)); System.arraycopy(vals, 0, ids, len, vl); if (ids[len - 1] > vals[0]) { if (reorder == null) reorder = new BoolList(values.size()); reorder.set(id, true); } } idsList.set(id, ids); lenList.set(id, size); }
/** * Returns a string representation of the index structure. * * @param all include database contents in the representation. During updates, database lookups * must be avoided, as the data structures will be inconsistent. * @return string */ public String toString(final boolean all) { final TokenBuilder tb = new TokenBuilder(); tb.addExt(type).add(" INDEX, '").add(data.meta.name).add("':\n"); final int s = lenList.size(); for (int m = 1; m < s; m++) { final int len = lenList.get(m); if (len == 0) continue; final int[] ids = idsList.get(m); tb.add(" ").addInt(m); if (all) tb.add(", key: \"").add(data.text(data.pre(ids[0]), type == IndexType.TEXT)).add('"'); tb.add(", ids"); if (all) tb.add("/pres"); tb.add(": "); for (int n = 0; n < len; n++) { if (n != 0) tb.add(","); tb.addInt(ids[n]); if (all) tb.add('/').addInt(data.pre(ids[n])); } tb.add("\n"); } return tb.toString(); }
/** * Inserts a data instance at the specified pre value. Note that the specified data instance must * differ from this instance. * * @param ipre value at which to insert new data * @param ipar parent pre value of node * @param clip data clip */ public final void insert(final int ipre, final int ipar, final DataClip clip) { meta.update(); // update value and document indexes if (meta.updindex) indexBegin(); resources.insert(ipre, clip); final int dsize = clip.size(); final int buf = Math.min(dsize, IO.BLOCKSIZE >> IO.NODEPOWER); // resize buffer to cache more entries buffer(buf); // find all namespaces in scope to avoid duplicate declarations final TokenMap nsScope = nspaces.scope(ipar, this); // loop through all entries final IntList preStack = new IntList(); final NSNode nsRoot = nspaces.current(); final HashSet<NSNode> newNodes = new HashSet<NSNode>(); final IntList flagPres = new IntList(); // indicates if database only contains a dummy node final Data data = clip.data; int c = 0; for (int dpre = clip.start; dpre < clip.end; ++dpre, ++c) { if (c != 0 && c % buf == 0) insert(ipre + c - buf); final int pre = ipre + c; final int dkind = data.kind(dpre); final int dpar = data.parent(dpre, dkind); // ipar < 0 if document nodes on top level are added final int dis = dpar >= 0 ? dpre - dpar : ipar >= 0 ? pre - ipar : 0; final int par = dis == 0 ? -1 : pre - dis; if (c == 0) nspaces.root(par, this); while (!preStack.isEmpty() && preStack.peek() > par) nspaces.close(preStack.pop()); switch (dkind) { case DOC: // add document nspaces.prepare(); final int s = data.size(dpre, dkind); doc(pre, s, data.text(dpre, true)); meta.ndocs++; preStack.push(pre); break; case ELEM: // add element nspaces.prepare(); boolean ne = false; if (data.nsFlag(dpre)) { final Atts at = data.ns(dpre); for (int a = 0; a < at.size(); ++a) { // see if prefix has been declared/ is part of current ns scope final byte[] old = nsScope.get(at.name(a)); if (old == null || !eq(old, at.value(a))) { // we have to keep track of all new NSNodes that are added // to the Namespace structure, as their pre values must not // be updated. I.e. if an NSNode N with pre value 3 existed // prior to inserting and two new nodes are inserted at // location pre == 3 we have to make sure N and only N gets // updated. newNodes.add(nspaces.add(at.name(a), at.value(a), pre)); ne = true; } } } byte[] nm = data.name(dpre, dkind); elem( dis, tagindex.index(nm, null, false), data.attSize(dpre, dkind), data.size(dpre, dkind), nspaces.uri(nm, true), ne); preStack.push(pre); break; case TEXT: case COMM: case PI: // add text text(pre, dis, data.text(dpre, true), dkind); break; case ATTR: // add attribute nm = data.name(dpre, dkind); // check if prefix already in nsScope or not final byte[] attPref = prefix(nm); // check if prefix of attribute has already been declared, otherwise // add declaration to parent node if (data.nsFlag(dpre) && nsScope.get(attPref) == null) { nspaces.add( par, preStack.isEmpty() ? -1 : preStack.peek(), attPref, data.nspaces.uri(data.uri(dpre, dkind)), this); // save pre value to set ns flag later for this node. can't be done // here as direct table access would interfere with the buffer flagPres.add(par); } attr( pre, dis, atnindex.index(nm, null, false), data.text(dpre, false), nspaces.uri(nm, false), false); break; } } // finalize and update namespace structure while (!preStack.isEmpty()) nspaces.close(preStack.pop()); nspaces.root(nsRoot); if (bp != 0) insert(ipre + c - 1 - (c - 1) % buf); // reset buffer to old size buffer(1); // set ns flags for (int f = 0; f < flagPres.size(); f++) { final int fl = flagPres.get(f); table.write2(fl, 1, name(fl) | 1 << 15); } // increase size of ancestors int p = ipar; while (p >= 0) { final int k = kind(p); size(p, k, size(p, k) + dsize); p = parent(p, k); } if (meta.updindex) { // add the entries to the ID -> PRE mapping: idmap.insert(ipre, id(ipre), dsize); indexEnd(); } if (!cache) updateDist(ipre + dsize, dsize); // propagate PRE value shifts to namespaces if (ipar != -1) nspaces.insert(ipre, dsize, newNodes); }
@Override public int costs(final IndexToken it) { return lenList.get(values.id(it.get())); }
/** * Constructor. * * @param args command-line arguments * @throws IOException I/O exception */ public BaseX(final String... args) throws IOException { super(args); // create session to show optional login request session(); console = true; try { // loop through all commands final StringBuilder bind = new StringBuilder(); SerializerOptions sopts = null; boolean v = false, qi = false, qp = false; final int os = ops.size(); for (int o = 0; o < os; o++) { final int c = ops.get(o); String val = vals.get(o); if (c == 'b') { // set/add variable binding if (bind.length() != 0) bind.append(','); // commas are escaped by a second comma val = bind.append(val.replaceAll(",", ",,")).toString(); execute(new Set(MainOptions.BINDINGS, val), false); } else if (c == 'c') { // evaluate commands final IO io = IO.get(val); String base = "."; if (io.exists() && !io.isDir()) { val = io.string(); base = io.path(); } execute(new Set(MainOptions.QUERYPATH, base), false); execute(val); execute(new Set(MainOptions.QUERYPATH, ""), false); console = false; } else if (c == 'D') { // hidden option: show/hide dot query graph execute(new Set(MainOptions.DOTPLAN, null), false); } else if (c == 'i') { // open database or create main memory representation execute(new Set(MainOptions.MAINMEM, true), false); execute(new Check(val), verbose); execute(new Set(MainOptions.MAINMEM, false), false); } else if (c == 'I') { // set/add variable binding if (bind.length() != 0) bind.append(','); // commas are escaped by a second comma val = bind.append("=").append(val.replaceAll(",", ",,")).toString(); execute(new Set(MainOptions.BINDINGS, val), false); } else if (c == 'o') { // change output stream if (out != System.out) out.close(); out = new PrintOutput(val); session().setOutputStream(out); } else if (c == 'q') { // evaluate query execute(new XQuery(val), verbose); console = false; } else if (c == 'Q') { // evaluate file contents or string as query final IO io = IO.get(val); String base = "."; if (io.exists() && !io.isDir()) { val = io.string(); base = io.path(); } execute(new Set(MainOptions.QUERYPATH, base), false); execute(new XQuery(val), verbose); execute(new Set(MainOptions.QUERYPATH, ""), false); console = false; } else if (c == 'r') { // parse number of runs execute(new Set(MainOptions.RUNS, Strings.toInt(val)), false); } else if (c == 'R') { // toggle query evaluation execute(new Set(MainOptions.RUNQUERY, null), false); } else if (c == 's') { // set/add serialization parameter if (sopts == null) sopts = new SerializerOptions(); final String[] kv = val.split("=", 2); sopts.assign(kv[0], kv.length > 1 ? kv[1] : ""); execute(new Set(MainOptions.SERIALIZER, sopts), false); } else if (c == 't') { // evaluate query execute(new Test(val), verbose); console = false; } else if (c == 'u') { // (de)activate write-back for updates execute(new Set(MainOptions.WRITEBACK, null), false); } else if (c == 'v') { // show/hide verbose mode v ^= true; } else if (c == 'V') { // show/hide query info qi ^= true; execute(new Set(MainOptions.QUERYINFO, null), false); } else if (c == 'w') { // toggle chopping of whitespaces execute(new Set(MainOptions.CHOP, null), false); } else if (c == 'x') { // show/hide xml query plan execute(new Set(MainOptions.XMLPLAN, null), false); qp ^= true; } else if (c == 'X') { // show query plan before/after query compilation execute(new Set(MainOptions.COMPPLAN, null), false); } else if (c == 'z') { // toggle result serialization execute(new Set(MainOptions.SERIALIZE, null), false); } verbose = qi || qp || v; } if (console) console(); } finally { quit(); } }