示例#1
0
  /**
   * Connect to searchd server and update given attributes on given documents in given indexes.
   * Sample code that will set group_id=123 where id=1 and group_id=456 where id=3:
   *
   * <pre>
   * String[] attrs = new String[1];
   *
   * attrs[0] = "group_id";
   * long[][] values = new long[2][2];
   *
   * values[0] = new long[2]; values[0][0] = 1; values[0][1] = 123;
   * values[1] = new long[2]; values[1][0] = 3; values[1][1] = 456;
   *
   * int res = cl.UpdateAttributes ( "test1", attrs, values );
   * </pre>
   *
   * @param index index name(s) to update; might be distributed
   * @param attrs array with the names of the attributes to update
   * @param values array of updates; each long[] entry must contains document ID in the first
   *     element, and all new attribute values in the following ones
   * @param ignorenonexistent the flag whether to silently ignore non existent columns up update
   *     request
   * @return -1 on failure, amount of actually found and updated documents (might be 0) on success
   * @throws SphinxException on invalid parameters
   */
  public int UpdateAttributes(
      String index, String[] attrs, long[][] values, boolean ignorenonexistent)
      throws SphinxException {
    /* check args */
    myAssert(index != null && index.length() > 0, "no index name provided");
    myAssert(attrs != null && attrs.length > 0, "no attribute names provided");
    myAssert(values != null && values.length > 0, "no update entries provided");
    for (int i = 0; i < values.length; i++) {
      myAssert(values[i] != null, "update entry #" + i + " is null");
      myAssert(values[i].length == 1 + attrs.length, "update entry #" + i + " has wrong length");
    }

    /* build and send request */
    ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
    DataOutputStream req = new DataOutputStream(reqBuf);
    try {
      writeNetUTF8(req, index);

      req.writeInt(attrs.length);
      req.writeInt(ignorenonexistent ? 1 : 0);
      for (int i = 0; i < attrs.length; i++) {
        writeNetUTF8(req, attrs[i]);
        req.writeInt(0); // not MVA attr
      }

      req.writeInt(values.length);
      for (int i = 0; i < values.length; i++) {
        req.writeLong(values[i][0]); /* send docid as 64bit value */
        for (int j = 1; j < values[i].length; j++)
          req.writeInt(
              (int)
                  values[i][
                      j]); /* send values as 32bit values; FIXME! what happens when they are over 2^31? */
      }

      req.flush();

    } catch (Exception e) {
      _error = "internal error: failed to build request: " + e;
      return -1;
    }

    /* get and parse response */
    DataInputStream in = _DoRequest(SEARCHD_COMMAND_UPDATE, VER_COMMAND_UPDATE, reqBuf);
    if (in == null) return -1;

    try {
      return in.readInt();
    } catch (Exception e) {
      _error = "incomplete reply";
      return -1;
    }
  }
示例#2
0
  /**
   * Connect to searchd server, and generate keyword list for a given query. Returns null on
   * failure, an array of Maps with misc per-keyword info on success.
   */
  public Map[] BuildKeywords(String query, String index, boolean hits) throws SphinxException {
    /* build request */
    ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
    DataOutputStream req = new DataOutputStream(reqBuf);
    try {
      writeNetUTF8(req, query);
      writeNetUTF8(req, index);
      req.writeInt(hits ? 1 : 0);

    } catch (Exception e) {
      _error = "internal error: failed to build request: " + e;
      return null;
    }

    /* run request */
    DataInputStream in = _DoRequest(SEARCHD_COMMAND_KEYWORDS, VER_COMMAND_KEYWORDS, reqBuf);
    if (in == null) return null;

    /* parse reply */
    try {
      int iNumWords = in.readInt();
      Map[] res = new Map[iNumWords];

      for (int i = 0; i < iNumWords; i++) {
        res[i] = new LinkedHashMap();
        res[i].put("tokenized", readNetUTF8(in));
        res[i].put("normalized", readNetUTF8(in));
        if (hits) {
          res[i].put("docs", new Long(readDword(in)));
          res[i].put("hits", new Long(readDword(in)));
        }
      }
      return res;

    } catch (Exception e) {
      _error = "incomplete reply";
      return null;
    }
  }
示例#3
0
 /**
  * Set float range filter. Only match records if attribute value is beetwen min and max
  * (inclusive).
  */
 public void SetFilterFloatRange(String attribute, float min, float max, boolean exclude)
     throws SphinxException {
   myAssert(min <= max, "min must be less or equal to max");
   try {
     writeNetUTF8(_filters, attribute);
     _filters.writeInt(SPH_FILTER_FLOATRANGE);
     _filters.writeFloat(min);
     _filters.writeFloat(max);
     _filters.writeInt(exclude ? 1 : 0);
   } catch (Exception e) {
     myAssert(false, "IOException: " + e.getMessage());
   }
   _filterCount++;
 }
示例#4
0
  /** Set values filter. Only match records where attribute value is in given set. */
  public void SetFilter(String attribute, long[] values, boolean exclude) throws SphinxException {
    myAssert(values != null && values.length > 0, "values array must not be null or empty");
    myAssert(
        attribute != null && attribute.length() > 0, "attribute name must not be null or empty");

    try {
      writeNetUTF8(_filters, attribute);
      _filters.writeInt(SPH_FILTER_VALUES);
      _filters.writeInt(values.length);
      for (int i = 0; i < values.length; i++) _filters.writeLong(values[i]);
      _filters.writeInt(exclude ? 1 : 0);

    } catch (Exception e) {
      myAssert(false, "IOException: " + e.getMessage());
    }
    _filterCount++;
  }
示例#5
0
  /** Add new query with current settings to current search request. */
  public int AddQuery(String query, String index, String comment) throws SphinxException {
    ByteArrayOutputStream req = new ByteArrayOutputStream();

    /* build request */
    try {
      DataOutputStream out = new DataOutputStream(req);
      out.writeInt(_offset);
      out.writeInt(_limit);
      out.writeInt(_mode);
      out.writeInt(_ranker);
      if (_ranker == SPH_RANK_EXPR) {
        writeNetUTF8(out, _rankexpr);
      }
      out.writeInt(_sort);
      writeNetUTF8(out, _sortby);
      writeNetUTF8(out, query);
      int weightLen = _weights != null ? _weights.length : 0;

      out.writeInt(weightLen);
      if (_weights != null) {
        for (int i = 0; i < _weights.length; i++) out.writeInt(_weights[i]);
      }

      writeNetUTF8(out, index);
      out.writeInt(0);
      out.writeInt(_minId);
      out.writeInt(_maxId);

      /* filters */
      out.writeInt(_filterCount);
      out.write(_rawFilters.toByteArray());

      /* group-by, max matches, sort-by-group flag */
      out.writeInt(_groupFunc);
      writeNetUTF8(out, _groupBy);
      out.writeInt(_maxMatches);
      writeNetUTF8(out, _groupSort);

      out.writeInt(_cutoff);
      out.writeInt(_retrycount);
      out.writeInt(_retrydelay);

      writeNetUTF8(out, _groupDistinct);

      /* anchor point */
      if (_latitudeAttr == null
          || _latitudeAttr.length() == 0
          || _longitudeAttr == null
          || _longitudeAttr.length() == 0) {
        out.writeInt(0);
      } else {
        out.writeInt(1);
        writeNetUTF8(out, _latitudeAttr);
        writeNetUTF8(out, _longitudeAttr);
        out.writeFloat(_latitude);
        out.writeFloat(_longitude);
      }

      /* per-index weights */
      out.writeInt(_indexWeights.size());
      for (Iterator e = _indexWeights.keySet().iterator(); e.hasNext(); ) {
        String indexName = (String) e.next();
        Integer weight = (Integer) _indexWeights.get(indexName);
        writeNetUTF8(out, indexName);
        out.writeInt(weight.intValue());
      }

      /* max query time */
      out.writeInt(_maxQueryTime);

      /* per-field weights */
      out.writeInt(_fieldWeights.size());
      for (Iterator e = _fieldWeights.keySet().iterator(); e.hasNext(); ) {
        String field = (String) e.next();
        Integer weight = (Integer) _fieldWeights.get(field);
        writeNetUTF8(out, field);
        out.writeInt(weight.intValue());
      }

      /* comment */
      writeNetUTF8(out, comment);

      /* overrides */
      out.writeInt(_overrideTypes.size());
      for (Iterator e = _overrideTypes.keySet().iterator(); e.hasNext(); ) {
        String attr = (String) e.next();
        Integer type = (Integer) _overrideTypes.get(attr);
        Map values = (Map) _overrideValues.get(attr);

        writeNetUTF8(out, attr);
        out.writeInt(type.intValue());
        out.writeInt(values.size());

        for (Iterator e2 = values.keySet().iterator(); e2.hasNext(); ) {
          Long id = (Long) e2.next();
          out.writeLong(id.longValue());
          switch (type.intValue()) {
            case SPH_ATTR_FLOAT:
              out.writeFloat(((Float) values.get(id)).floatValue());
              break;
            case SPH_ATTR_BIGINT:
              out.writeLong(((Long) values.get(id)).longValue());
              break;
            default:
              out.writeInt(((Integer) values.get(id)).intValue());
              break;
          }
        }
      }

      /* select-list */
      writeNetUTF8(out, _select);

      /* done! */
      out.flush();
      int qIndex = _reqs.size();
      _reqs.add(qIndex, req.toByteArray());
      return qIndex;

    } catch (Exception e) {
      myAssert(false, "error in AddQuery(): " + e + ": " + e.getMessage());

    } finally {
      try {
        _filters.close();
        _rawFilters.close();
      } catch (IOException e) {
        myAssert(false, "error in AddQuery(): " + e + ": " + e.getMessage());
      }
    }
    return -1;
  }
示例#6
0
  /**
   * Connect to searchd server and generate excerpts (snippets) from given documents.
   *
   * @param opts maps String keys to String or Integer values (see the documentation for complete
   *     keys list).
   * @return null on failure, array of snippets on success.
   */
  public String[] BuildExcerpts(String[] docs, String index, String words, Map opts)
      throws SphinxException {
    myAssert(docs != null && docs.length > 0, "BuildExcerpts: Have no documents to process");
    myAssert(
        index != null && index.length() > 0, "BuildExcerpts: Have no index to process documents");
    myAssert(words != null && words.length() > 0, "BuildExcerpts: Have no words to highlight");
    if (opts == null) opts = new LinkedHashMap();

    /* fixup options */
    if (!opts.containsKey("before_match")) opts.put("before_match", "<b>");
    if (!opts.containsKey("after_match")) opts.put("after_match", "</b>");
    if (!opts.containsKey("chunk_separator")) opts.put("chunk_separator", "...");
    if (!opts.containsKey("html_strip_mode")) opts.put("html_strip_mode", "index");
    if (!opts.containsKey("limit")) opts.put("limit", new Integer(256));
    if (!opts.containsKey("limit_passages")) opts.put("limit_passages", new Integer(0));
    if (!opts.containsKey("limit_words")) opts.put("limit_words", new Integer(0));
    if (!opts.containsKey("around")) opts.put("around", new Integer(5));
    if (!opts.containsKey("start_passage_id")) opts.put("start_passage_id", new Integer(1));
    if (!opts.containsKey("exact_phrase")) opts.put("exact_phrase", new Integer(0));
    if (!opts.containsKey("single_passage")) opts.put("single_passage", new Integer(0));
    if (!opts.containsKey("use_boundaries")) opts.put("use_boundaries", new Integer(0));
    if (!opts.containsKey("weight_order")) opts.put("weight_order", new Integer(0));
    if (!opts.containsKey("load_files")) opts.put("load_files", new Integer(0));
    if (!opts.containsKey("allow_empty")) opts.put("allow_empty", new Integer(0));
    if (!opts.containsKey("query_mode")) opts.put("query_mode", new Integer(0));
    if (!opts.containsKey("force_all_words")) opts.put("force_all_words", new Integer(0));

    /* build request */
    ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
    DataOutputStream req = new DataOutputStream(reqBuf);
    try {
      req.writeInt(0);
      int iFlags = 1; /* remove_spaces */
      if (((Integer) opts.get("exact_phrase")).intValue() != 0) iFlags |= 2;
      if (((Integer) opts.get("single_passage")).intValue() != 0) iFlags |= 4;
      if (((Integer) opts.get("use_boundaries")).intValue() != 0) iFlags |= 8;
      if (((Integer) opts.get("weight_order")).intValue() != 0) iFlags |= 16;
      if (((Integer) opts.get("query_mode")).intValue() != 0) iFlags |= 32;
      if (((Integer) opts.get("force_all_words")).intValue() != 0) iFlags |= 64;
      if (((Integer) opts.get("load_files")).intValue() != 0) iFlags |= 128;
      if (((Integer) opts.get("allow_empty")).intValue() != 0) iFlags |= 256;
      req.writeInt(iFlags);
      writeNetUTF8(req, index);
      writeNetUTF8(req, words);

      /* send options */
      writeNetUTF8(req, (String) opts.get("before_match"));
      writeNetUTF8(req, (String) opts.get("after_match"));
      writeNetUTF8(req, (String) opts.get("chunk_separator"));
      req.writeInt(((Integer) opts.get("limit")).intValue());
      req.writeInt(((Integer) opts.get("around")).intValue());

      req.writeInt(((Integer) opts.get("limit_passages")).intValue());
      req.writeInt(((Integer) opts.get("limit_words")).intValue());
      req.writeInt(((Integer) opts.get("start_passage_id")).intValue());
      writeNetUTF8(req, (String) opts.get("html_strip_mode"));

      /* send documents */
      req.writeInt(docs.length);
      for (int i = 0; i < docs.length; i++) writeNetUTF8(req, docs[i]);

      req.flush();

    } catch (Exception e) {
      _error = "internal error: failed to build request: " + e;
      return null;
    }

    DataInputStream in = _DoRequest(SEARCHD_COMMAND_EXCERPT, VER_COMMAND_EXCERPT, reqBuf);
    if (in == null) return null;

    try {
      String[] res = new String[docs.length];
      for (int i = 0; i < docs.length; i++) res[i] = readNetUTF8(in);
      return res;

    } catch (Exception e) {
      _error = "incomplete reply";
      return null;
    }
  }