/** Connect to searchd server and run current search query. */ public SphinxResult Query(String query, String index, String comment) throws SphinxException { myAssert( _reqs == null || _reqs.size() == 0, "AddQuery() and Query() can not be combined; use RunQueries() instead"); AddQuery(query, index, comment); SphinxResult[] results = RunQueries(); _reqs = new ArrayList(); /* just in case it failed too early */ if (results == null || results.length < 1) return null; /* probably network error; error message should be already filled */ SphinxResult res = results[0]; _warning = res.warning; _error = res.error; if (res == null || res.getStatus() == SEARCHD_ERROR) return null; return res; }
/** Run all previously added search queries. */ public SphinxResult[] RunQueries() throws SphinxException { if (_reqs == null || _reqs.size() < 1) { _error = "no queries defined, issue AddQuery() first"; return null; } /* build the mega-request */ int nreqs = _reqs.size(); ByteArrayOutputStream reqBuf = new ByteArrayOutputStream(); try { DataOutputStream req = new DataOutputStream(reqBuf); /* its a client */ req.writeInt(0); req.writeInt(nreqs); for (int i = 0; i < nreqs; i++) req.write((byte[]) _reqs.get(i)); req.flush(); } catch (Exception e) { _error = "internal error: failed to build request: " + e; return null; } DataInputStream in = _DoRequest(SEARCHD_COMMAND_SEARCH, VER_COMMAND_SEARCH, reqBuf); if (in == null) return null; SphinxResult[] results = new SphinxResult[nreqs]; _reqs = new ArrayList(); try { for (int ires = 0; ires < nreqs; ires++) { SphinxResult res = new SphinxResult(); results[ires] = res; int status = in.readInt(); res.setStatus(status); if (status != SEARCHD_OK) { String message = readNetUTF8(in); if (status == SEARCHD_WARNING) { res.warning = message; } else { res.error = message; continue; } } /* read fields */ int nfields = in.readInt(); res.fields = new String[nfields]; int pos = 0; for (int i = 0; i < nfields; i++) res.fields[i] = readNetUTF8(in); /* read arrts */ int nattrs = in.readInt(); res.attrTypes = new int[nattrs]; res.attrNames = new String[nattrs]; for (int i = 0; i < nattrs; i++) { String AttrName = readNetUTF8(in); int AttrType = in.readInt(); res.attrNames[i] = AttrName; res.attrTypes[i] = AttrType; } /* read match count */ int count = in.readInt(); int id64 = in.readInt(); res.matches = new SphinxMatch[count]; for (int matchesNo = 0; matchesNo < count; matchesNo++) { SphinxMatch docInfo; docInfo = new SphinxMatch((id64 == 0) ? readDword(in) : in.readLong(), in.readInt()); /* read matches */ for (int attrNumber = 0; attrNumber < res.attrTypes.length; attrNumber++) { String attrName = res.attrNames[attrNumber]; int type = res.attrTypes[attrNumber]; /* handle bigints */ if (type == SPH_ATTR_BIGINT) { docInfo.attrValues.add(attrNumber, new Long(in.readLong())); continue; } /* handle floats */ if (type == SPH_ATTR_FLOAT) { docInfo.attrValues.add(attrNumber, new Float(in.readFloat())); continue; } /* handle strings */ if (type == SPH_ATTR_STRING) { String s = readNetUTF8(in); docInfo.attrValues.add(attrNumber, s); continue; } /* handle everything else as unsigned ints */ long val = readDword(in); if (type == SPH_ATTR_MULTI) { long[] vals = new long[(int) val]; for (int k = 0; k < val; k++) vals[k] = readDword(in); docInfo.attrValues.add(attrNumber, vals); } else if (type == SPH_ATTR_MULTI64) { val = val / 2; long[] vals = new long[(int) val]; for (int k = 0; k < val; k++) vals[k] = in.readLong(); docInfo.attrValues.add(attrNumber, vals); } else { docInfo.attrValues.add(attrNumber, new Long(val)); } } res.matches[matchesNo] = docInfo; } res.total = in.readInt(); res.totalFound = in.readInt(); res.time = in.readInt() / 1000.0f; res.words = new SphinxWordInfo[in.readInt()]; for (int i = 0; i < res.words.length; i++) res.words[i] = new SphinxWordInfo(readNetUTF8(in), readDword(in), readDword(in)); } return results; } catch (IOException e) { _error = "incomplete reply"; return null; } }
/** Add new query with current settings to current search request. */ public int AddQuery(String query, String index, String comment) throws SphinxException { ByteArrayOutputStream req = new ByteArrayOutputStream(); /* build request */ try { DataOutputStream out = new DataOutputStream(req); out.writeInt(_offset); out.writeInt(_limit); out.writeInt(_mode); out.writeInt(_ranker); if (_ranker == SPH_RANK_EXPR) { writeNetUTF8(out, _rankexpr); } out.writeInt(_sort); writeNetUTF8(out, _sortby); writeNetUTF8(out, query); int weightLen = _weights != null ? _weights.length : 0; out.writeInt(weightLen); if (_weights != null) { for (int i = 0; i < _weights.length; i++) out.writeInt(_weights[i]); } writeNetUTF8(out, index); out.writeInt(0); out.writeInt(_minId); out.writeInt(_maxId); /* filters */ out.writeInt(_filterCount); out.write(_rawFilters.toByteArray()); /* group-by, max matches, sort-by-group flag */ out.writeInt(_groupFunc); writeNetUTF8(out, _groupBy); out.writeInt(_maxMatches); writeNetUTF8(out, _groupSort); out.writeInt(_cutoff); out.writeInt(_retrycount); out.writeInt(_retrydelay); writeNetUTF8(out, _groupDistinct); /* anchor point */ if (_latitudeAttr == null || _latitudeAttr.length() == 0 || _longitudeAttr == null || _longitudeAttr.length() == 0) { out.writeInt(0); } else { out.writeInt(1); writeNetUTF8(out, _latitudeAttr); writeNetUTF8(out, _longitudeAttr); out.writeFloat(_latitude); out.writeFloat(_longitude); } /* per-index weights */ out.writeInt(_indexWeights.size()); for (Iterator e = _indexWeights.keySet().iterator(); e.hasNext(); ) { String indexName = (String) e.next(); Integer weight = (Integer) _indexWeights.get(indexName); writeNetUTF8(out, indexName); out.writeInt(weight.intValue()); } /* max query time */ out.writeInt(_maxQueryTime); /* per-field weights */ out.writeInt(_fieldWeights.size()); for (Iterator e = _fieldWeights.keySet().iterator(); e.hasNext(); ) { String field = (String) e.next(); Integer weight = (Integer) _fieldWeights.get(field); writeNetUTF8(out, field); out.writeInt(weight.intValue()); } /* comment */ writeNetUTF8(out, comment); /* overrides */ out.writeInt(_overrideTypes.size()); for (Iterator e = _overrideTypes.keySet().iterator(); e.hasNext(); ) { String attr = (String) e.next(); Integer type = (Integer) _overrideTypes.get(attr); Map values = (Map) _overrideValues.get(attr); writeNetUTF8(out, attr); out.writeInt(type.intValue()); out.writeInt(values.size()); for (Iterator e2 = values.keySet().iterator(); e2.hasNext(); ) { Long id = (Long) e2.next(); out.writeLong(id.longValue()); switch (type.intValue()) { case SPH_ATTR_FLOAT: out.writeFloat(((Float) values.get(id)).floatValue()); break; case SPH_ATTR_BIGINT: out.writeLong(((Long) values.get(id)).longValue()); break; default: out.writeInt(((Integer) values.get(id)).intValue()); break; } } } /* select-list */ writeNetUTF8(out, _select); /* done! */ out.flush(); int qIndex = _reqs.size(); _reqs.add(qIndex, req.toByteArray()); return qIndex; } catch (Exception e) { myAssert(false, "error in AddQuery(): " + e + ": " + e.getMessage()); } finally { try { _filters.close(); _rawFilters.close(); } catch (IOException e) { myAssert(false, "error in AddQuery(): " + e + ": " + e.getMessage()); } } return -1; }