@Override public void execute(final GUI gui) { final DialogExport dialog = new DialogExport(gui); if (!dialog.ok()) return; final IOFile root = new IOFile(dialog.path()); // check if existing files will be overwritten if (root.exists()) { IO file = null; boolean overwrite = false; final Data d = gui.context.data(); final IntList il = d.resources.docs(); final int is = il.size(); for (int i = 0; i < is; i++) { file = root.merge(Token.string(d.text(il.get(i), true))); if (file.exists()) { if (overwrite) { // more than one file will be overwritten; check remaining tests file = null; break; } overwrite = true; } } if (overwrite) { // show message for overwriting files or directories final String msg = file == null ? FILES_REPLACE_X : FILE_EXISTS_X; if (file == null) file = root; if (!BaseXDialog.confirm(gui, Util.info(msg, file))) return; } } DialogProgress.execute(gui, new Export(root.path())); }
@Override public Item item(final QueryContext qc, final InputInfo ii) throws QueryException { final Data data = checkData(qc); final String path = path(1, qc); final Item item = toItem(exprs[2], qc); final Options opts = toOptions(3, Q_OPTIONS, new Options(), qc); final Updates updates = qc.resources.updates(); final IntList docs = data.resources.docs(path); int d = 0; // delete binary resources final IOFile bin = data.meta.binary(path); if (bin == null || bin.isDir()) throw BXDB_REPLACE_X.get(info, path); if (item instanceof Bin) { updates.add(new DBStore(data, path, item, info), qc); } else { if (bin.exists()) updates.add(new DBDelete(data, path, info), qc); final NewInput input = checkInput(item, token(path)); if (docs.isEmpty() || docs.get(0) == 0) { // no replacement of first document (because of TableDiskAccess#insert, used > 0, pre = 0) updates.add(new DBAdd(data, input, opts, qc, info), qc); } else { updates.add(new ReplaceDoc(docs.get(0), data, input, opts, qc, info), qc); d = 1; } } // delete old documents final int ds = docs.size(); for (; d < ds; d++) updates.add(new DeleteNode(docs.get(d), data, info), qc); return null; }
/** * Constructor. * * @param pr pre values * @param ps positions */ private FTCache(final IntList pr, final IntList ps) { final int s = pr.size(); final double[] v = new double[s]; for (int i = 0; i < s; i++) v[i] = (long) pr.get(i) << 32 | ps.get(i); order = Array.createOrder(v, true); pre = pr; pos = ps; }
/** * Lists resources of the specified database. * * @return success flag * @throws IOException I/O exception */ private boolean listDB() throws IOException { final String db = args[0]; final String path = args[1] != null ? args[1] : ""; if (!Databases.validName(db)) return error(NAME_INVALID_X, db); final Table table = new Table(); table.description = RESOURCES; table.header.add(INPUT_PATH); table.header.add(TYPE); table.header.add(MimeTypes.CONTENT_TYPE); table.header.add(SIZE); try { // add xml documents final Data data = Open.open(db, context); final Resources res = data.resources; final IntList il = res.docs(path); final int ds = il.size(); for (int i = 0; i < ds; i++) { final int pre = il.get(i); final TokenList tl = new TokenList(3); final byte[] file = data.text(pre, true); tl.add(file); tl.add(DataText.M_XML); tl.add(MimeTypes.APP_XML); tl.add(data.size(pre, Data.DOC)); table.contents.add(tl); } // add binary resources for (final byte[] file : res.binaries(path)) { final String f = string(file); final TokenList tl = new TokenList(3); tl.add(file); tl.add(DataText.M_RAW); tl.add(MimeTypes.get(f)); tl.add(data.meta.binary(f).length()); table.contents.add(tl); } Close.close(data, context); } catch (final IOException ex) { return error(Util.message(ex)); } out.println(table.sort().finish()); return true; }
/** * Adds values to the index. * * @param key key to be indexed * @param vals sorted values */ void add(final byte[] key, final int... vals) { // token index: add values. otherwise, reference existing values final int id = type == IndexType.TOKEN ? values.put(key) : values.id(key), vl = vals.length; // updatable index: if required, resize existing arrays while (idsList.size() < id + 1) idsList.add(null); if (lenList.size() < id + 1) lenList.set(id, 0); final int len = lenList.get(id), size = len + vl; int[] ids = idsList.get(id); if (ids == null) { ids = vals; } else { if (ids.length < size) ids = Arrays.copyOf(ids, Array.newSize(size)); System.arraycopy(vals, 0, ids, len, vl); if (ids[len - 1] > vals[0]) { if (reorder == null) reorder = new BoolList(values.size()); reorder.set(id, true); } } idsList.set(id, ids); lenList.set(id, size); }
/** * Returns a string representation of the index structure. * * @param all include database contents in the representation. During updates, database lookups * must be avoided, as the data structures will be inconsistent. * @return string */ public String toString(final boolean all) { final TokenBuilder tb = new TokenBuilder(); tb.addExt(type).add(" INDEX, '").add(data.meta.name).add("':\n"); final int s = lenList.size(); for (int m = 1; m < s; m++) { final int len = lenList.get(m); if (len == 0) continue; final int[] ids = idsList.get(m); tb.add(" ").addInt(m); if (all) tb.add(", key: \"").add(data.text(data.pre(ids[0]), type == IndexType.TEXT)).add('"'); tb.add(", ids"); if (all) tb.add("/pres"); tb.add(": "); for (int n = 0; n < len; n++) { if (n != 0) tb.add(","); tb.addInt(ids[n]); if (all) tb.add('/').addInt(data.pre(ids[n])); } tb.add("\n"); } return tb.toString(); }
/** * Optimizes the structures of a database. * * @param data data * @param enforceText enforce creation or deletion of text index * @param enforceAttr enforce creation or deletion of attribute index * @param enforceToken enforce creation or deletion of token index * @param enforceFt enforce creation or deletion of full-text index * @param cmd calling command instance (may be {@code null}) * @throws IOException I/O Exception during index rebuild */ public static void optimize( final Data data, final boolean enforceText, final boolean enforceAttr, final boolean enforceToken, final boolean enforceFt, final Optimize cmd) throws IOException { // initialize structural indexes final MetaData md = data.meta; if (!md.uptodate) { data.paths.init(); data.elemNames.init(); data.attrNames.init(); md.dirty = true; final IntList pars = new IntList(), elms = new IntList(); int n = 0; for (int pre = 0; pre < md.size; ++pre) { final byte kind = (byte) data.kind(pre); final int par = data.parent(pre, kind); while (!pars.isEmpty() && pars.peek() > par) { pars.pop(); elms.pop(); } final int level = pars.size(); if (kind == Data.DOC) { data.paths.put(0, Data.DOC, level); pars.push(pre); elms.push(0); ++n; } else if (kind == Data.ELEM) { final int id = data.nameId(pre); data.elemNames.index(data.elemNames.key(id), null, true); data.paths.put(id, Data.ELEM, level); pars.push(pre); elms.push(id); } else if (kind == Data.ATTR) { final int id = data.nameId(pre); final byte[] val = data.text(pre, false); data.attrNames.index(data.attrNames.key(id), val, true); data.paths.put(id, Data.ATTR, level, val, md); } else { final byte[] val = data.text(pre, true); if (kind == Data.TEXT && level > 1) data.elemNames.index(elms.peek(), val); data.paths.put(0, kind, level, val, md); } if (cmd != null) cmd.pre = pre; } md.ndocs = n; md.uptodate = true; } // rebuild value indexes optimize(IndexType.TEXT, data, md.createtext, md.textindex, enforceText, cmd); optimize(IndexType.ATTRIBUTE, data, md.createattr, md.attrindex, enforceAttr, cmd); optimize(IndexType.TOKEN, data, md.createtoken, md.tokenindex, enforceToken, cmd); optimize(IndexType.FULLTEXT, data, md.createft, md.ftindex, enforceFt, cmd); }