/** Connect to searchd server and run current search query. */ public SphinxResult Query(String query, String index, String comment) throws SphinxException { myAssert( _reqs == null || _reqs.size() == 0, "AddQuery() and Query() can not be combined; use RunQueries() instead"); AddQuery(query, index, comment); SphinxResult[] results = RunQueries(); _reqs = new ArrayList(); /* just in case it failed too early */ if (results == null || results.length < 1) return null; /* probably network error; error message should be already filled */ SphinxResult res = results[0]; _warning = res.warning; _error = res.error; if (res == null || res.getStatus() == SEARCHD_ERROR) return null; return res; }
/** Run all previously added search queries. */ public SphinxResult[] RunQueries() throws SphinxException { if (_reqs == null || _reqs.size() < 1) { _error = "no queries defined, issue AddQuery() first"; return null; } /* build the mega-request */ int nreqs = _reqs.size(); ByteArrayOutputStream reqBuf = new ByteArrayOutputStream(); try { DataOutputStream req = new DataOutputStream(reqBuf); /* its a client */ req.writeInt(0); req.writeInt(nreqs); for (int i = 0; i < nreqs; i++) req.write((byte[]) _reqs.get(i)); req.flush(); } catch (Exception e) { _error = "internal error: failed to build request: " + e; return null; } DataInputStream in = _DoRequest(SEARCHD_COMMAND_SEARCH, VER_COMMAND_SEARCH, reqBuf); if (in == null) return null; SphinxResult[] results = new SphinxResult[nreqs]; _reqs = new ArrayList(); try { for (int ires = 0; ires < nreqs; ires++) { SphinxResult res = new SphinxResult(); results[ires] = res; int status = in.readInt(); res.setStatus(status); if (status != SEARCHD_OK) { String message = readNetUTF8(in); if (status == SEARCHD_WARNING) { res.warning = message; } else { res.error = message; continue; } } /* read fields */ int nfields = in.readInt(); res.fields = new String[nfields]; int pos = 0; for (int i = 0; i < nfields; i++) res.fields[i] = readNetUTF8(in); /* read arrts */ int nattrs = in.readInt(); res.attrTypes = new int[nattrs]; res.attrNames = new String[nattrs]; for (int i = 0; i < nattrs; i++) { String AttrName = readNetUTF8(in); int AttrType = in.readInt(); res.attrNames[i] = AttrName; res.attrTypes[i] = AttrType; } /* read match count */ int count = in.readInt(); int id64 = in.readInt(); res.matches = new SphinxMatch[count]; for (int matchesNo = 0; matchesNo < count; matchesNo++) { SphinxMatch docInfo; docInfo = new SphinxMatch((id64 == 0) ? readDword(in) : in.readLong(), in.readInt()); /* read matches */ for (int attrNumber = 0; attrNumber < res.attrTypes.length; attrNumber++) { String attrName = res.attrNames[attrNumber]; int type = res.attrTypes[attrNumber]; /* handle bigints */ if (type == SPH_ATTR_BIGINT) { docInfo.attrValues.add(attrNumber, new Long(in.readLong())); continue; } /* handle floats */ if (type == SPH_ATTR_FLOAT) { docInfo.attrValues.add(attrNumber, new Float(in.readFloat())); continue; } /* handle strings */ if (type == SPH_ATTR_STRING) { String s = readNetUTF8(in); docInfo.attrValues.add(attrNumber, s); continue; } /* handle everything else as unsigned ints */ long val = readDword(in); if (type == SPH_ATTR_MULTI) { long[] vals = new long[(int) val]; for (int k = 0; k < val; k++) vals[k] = readDword(in); docInfo.attrValues.add(attrNumber, vals); } else if (type == SPH_ATTR_MULTI64) { val = val / 2; long[] vals = new long[(int) val]; for (int k = 0; k < val; k++) vals[k] = in.readLong(); docInfo.attrValues.add(attrNumber, vals); } else { docInfo.attrValues.add(attrNumber, new Long(val)); } } res.matches[matchesNo] = docInfo; } res.total = in.readInt(); res.totalFound = in.readInt(); res.time = in.readInt() / 1000.0f; res.words = new SphinxWordInfo[in.readInt()]; for (int i = 0; i < res.words.length; i++) res.words[i] = new SphinxWordInfo(readNetUTF8(in), readDword(in), readDword(in)); } return results; } catch (IOException e) { _error = "incomplete reply"; return null; } }