/** * Returns all tokens of the query. * * @param qc query context * @return token list * @throws QueryException query exception */ private TokenList tokens(final QueryContext qc) throws QueryException { final TokenList tl = new TokenList(); final Iter ir = qc.iter(query); for (byte[] qu; (qu = nextToken(ir)) != null; ) { // skip empty tokens if not all results are needed if (qu.length != 0 || mode == FTMode.ALL || mode == FTMode.ALL_WORDS) tl.add(qu); } return tl; }
/** * Lists the table contents. * * @param table table reference * @param root root node * @param header table header * @param skip number of columns to skip */ static void list(final Table table, final FElem root, final QNm header, final int skip) { for (final TokenList list : table.contents) { final FElem el = new FElem(header); // don't show last attribute (input path) final int ll = list.size() - skip; for (int l = 1; l < ll; l++) { el.add(new QNm(lc(table.header.get(l))), list.get(l)); } el.add(list.get(0)); root.add(el); } }
/** * Recursively collects relevant thesaurus terms. * * @param list result list * @param node input node * @param level current level */ private void find(final TokenList list, final ThesNode node, final long level) { if (level > max || node == null) return; for (int n = 0; n < node.size; ++n) { if (rel.length == 0 || eq(node.rs[n], rel)) { final byte[] term = node.nodes[n].term; if (!list.contains(term)) { list.add(term); find(list, node.nodes[n], level + 1); } } } }
/** * Creates an XQuery representation for the specified table query. * * @param filter filter terms * @param cols filter columns * @param elem element flag * @param name name of root element * @param root root flag * @return query */ public static String findTable( final StringList filter, final TokenList cols, final BoolList elem, final byte[] name, final boolean root) { final TokenBuilder tb = new TokenBuilder(); final int is = filter.size(); for (int i = 0; i < is; ++i) { final String[] spl = split(filter.get(i)); for (final String s : spl) { final byte[] term = trim(replace(token(s), '"', ' ')); if (term.length == 0) continue; tb.add('['); final boolean elm = elem.get(i); tb.add(elm ? ".//" : "@"); tb.add("*:"); tb.add(cols.get(i)); if (term[0] == '<' || term[0] == '>') { tb.add(term[0]); tb.addLong(calcNum(substring(term, 1))); } else { tb.add(" contains text \""); tb.add(term); tb.add('"'); } tb.add(']'); } } return tb.isEmpty() ? "/" : (root ? "/" : "") + Axis.DESCORSELF + "::*:" + string(name) + tb; }
/** * Lists resources of the specified database. * * @return success flag * @throws IOException I/O exception */ private boolean listDB() throws IOException { final String db = args[0]; final String path = args[1] != null ? args[1] : ""; if (!Databases.validName(db)) return error(NAME_INVALID_X, db); final Table table = new Table(); table.description = RESOURCES; table.header.add(INPUT_PATH); table.header.add(TYPE); table.header.add(MimeTypes.CONTENT_TYPE); table.header.add(SIZE); try { // add xml documents final Data data = Open.open(db, context); final Resources res = data.resources; final IntList il = res.docs(path); final int ds = il.size(); for (int i = 0; i < ds; i++) { final int pre = il.get(i); final TokenList tl = new TokenList(3); final byte[] file = data.text(pre, true); tl.add(file); tl.add(DataText.M_XML); tl.add(MimeTypes.APP_XML); tl.add(data.size(pre, Data.DOC)); table.contents.add(tl); } // add binary resources for (final byte[] file : res.binaries(path)) { final String f = string(file); final TokenList tl = new TokenList(3); tl.add(file); tl.add(DataText.M_RAW); tl.add(MimeTypes.get(f)); tl.add(data.meta.binary(f).length()); table.contents.add(tl); } Close.close(data, context); } catch (final IOException ex) { return error(Util.message(ex)); } out.println(table.sort().finish()); return true; }
/** * Lists all databases. * * @return success flag * @throws IOException I/O exception */ private boolean list() throws IOException { final Table table = new Table(); table.description = DATABASES_X; final boolean create = context.user.has(Perm.CREATE); table.header.add(T_NAME); table.header.add(RESOURCES); table.header.add(SIZE); if (create) table.header.add(INPUT_PATH); for (final String name : context.databases.listDBs()) { String file = null; long size = 0; int docs = 0; final MetaData meta = new MetaData(name, context); try { meta.read(); size = meta.dbsize(); docs = meta.ndocs; if (context.perm(Perm.READ, meta)) file = meta.original; } catch (final IOException ex) { file = ERROR; } // count number of raw files final IOFile dir = new IOFile(mprop.dbpath(name), M_RAW); final int bin = dir.descendants().size(); // create entry if (file != null) { final TokenList tl = new TokenList(4); tl.add(name); tl.add(docs + bin); tl.add(size); if (create) tl.add(file); table.contents.add(tl); } } out.println(table.sort().finish()); return true; }
/** * Creates a container for the specified string. * * @param strings string * @return iterator */ protected static Value strings(final String... strings) { final TokenList tl = new TokenList(strings.length); for (final String s : strings) tl.add(s); return StrSeq.get(tl.finish()); }