Example #1
0
  private static Long getVer(SolrQueryRequest req) throws Exception {
    String response = JQ(req);
    Map rsp = (Map) ObjectBuilder.fromJSON(response);
    Map doc = null;
    if (rsp.containsKey("doc")) {
      doc = (Map) rsp.get("doc");
    } else if (rsp.containsKey("docs")) {
      List lst = (List) rsp.get("docs");
      if (lst.size() > 0) {
        doc = (Map) lst.get(0);
      }
    } else if (rsp.containsKey("response")) {
      Map responseMap = (Map) rsp.get("response");
      List lst = (List) responseMap.get("docs");
      if (lst.size() > 0) {
        doc = (Map) lst.get(0);
      }
    }

    if (doc == null) return null;

    return (Long) doc.get("_version_");
  }
Example #2
0
  @Test
  public void testRandomJoin() throws Exception {
    int indexIter = 50 * RANDOM_MULTIPLIER;
    int queryIter = 50 * RANDOM_MULTIPLIER;

    // groups of fields that have any chance of matching... used to
    // increase test effectiveness by avoiding 0 resultsets much of the time.
    String[][] compat =
        new String[][] {
          {"small_s", "small2_s", "small2_ss", "small3_ss"},
          {"small_i", "small2_i", "small2_is", "small3_is"}
        };

    while (--indexIter >= 0) {
      int indexSize = random().nextInt(20 * RANDOM_MULTIPLIER);

      List<FldType> types = new ArrayList<FldType>();
      types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4)));
      types.add(new FldType("score_f", ONE_ONE, new FVal(1, 100))); // field used to score
      types.add(
          new FldType("small_s", ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 3), 1, 1)));
      types.add(
          new FldType("small2_s", ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 3), 1, 1)));
      types.add(
          new FldType("small2_ss", ZERO_TWO, new SVal('a', (char) ('c' + indexSize / 3), 1, 1)));
      types.add(new FldType("small3_ss", new IRange(0, 25), new SVal('A', 'z', 1, 1)));
      types.add(new FldType("small_i", ZERO_ONE, new IRange(0, 5 + indexSize / 3)));
      types.add(new FldType("small2_i", ZERO_ONE, new IRange(0, 5 + indexSize / 3)));
      types.add(new FldType("small2_is", ZERO_TWO, new IRange(0, 5 + indexSize / 3)));
      types.add(new FldType("small3_is", new IRange(0, 25), new IRange(0, 100)));

      clearIndex();
      Map<Comparable, Doc> model = indexDocs(types, null, indexSize);
      Map<String, Map<Comparable, Set<Comparable>>> pivots =
          new HashMap<String, Map<Comparable, Set<Comparable>>>();

      for (int qiter = 0; qiter < queryIter; qiter++) {
        String fromField;
        String toField;
        if (random().nextInt(100) < 5) {
          // pick random fields 5% of the time
          fromField = types.get(random().nextInt(types.size())).fname;
          // pick the same field 50% of the time we pick a random field (since other fields won't
          // match anything)
          toField =
              (random().nextInt(100) < 50)
                  ? fromField
                  : types.get(random().nextInt(types.size())).fname;
        } else {
          // otherwise, pick compatible fields that have a chance of matching indexed tokens
          String[] group = compat[random().nextInt(compat.length)];
          fromField = group[random().nextInt(group.length)];
          toField = group[random().nextInt(group.length)];
        }

        Map<Comparable, Set<Comparable>> pivot = pivots.get(fromField + "/" + toField);
        if (pivot == null) {
          pivot = createJoinMap(model, fromField, toField);
          pivots.put(fromField + "/" + toField, pivot);
        }

        Collection<Doc> fromDocs = model.values();
        Set<Comparable> docs = join(fromDocs, pivot);
        List<Doc> docList = new ArrayList<Doc>(docs.size());
        for (Comparable id : docs) docList.add(model.get(id));
        Collections.sort(docList, createComparator("_docid_", true, false, false, false));
        List sortedDocs = new ArrayList();
        for (Doc doc : docList) {
          if (sortedDocs.size() >= 10) break;
          sortedDocs.add(doc.toObject(h.getCore().getLatestSchema()));
        }

        Map<String, Object> resultSet = new LinkedHashMap<String, Object>();
        resultSet.put("numFound", docList.size());
        resultSet.put("start", 0);
        resultSet.put("docs", sortedDocs);

        // todo: use different join queries for better coverage

        SolrQueryRequest req =
            req(
                "wt",
                "json",
                "indent",
                "true",
                "echoParams",
                "all",
                "q",
                "{!join from="
                    + fromField
                    + " to="
                    + toField
                    + (random().nextInt(4) == 0 ? " fromIndex=collection1" : "")
                    + "}*:*");

        String strResponse = h.query(req);

        Object realResponse = ObjectBuilder.fromJSON(strResponse);
        String err = JSONTestUtil.matchObj("/response", realResponse, resultSet);
        if (err != null) {
          log.error(
              "JOIN MISMATCH: "
                  + err
                  + "\n\trequest="
                  + req
                  + "\n\tresult="
                  + strResponse
                  + "\n\texpected="
                  + JSONUtil.toJSON(resultSet)
                  + "\n\tmodel="
                  + JSONUtil.toJSON(model));

          // re-execute the request... good for putting a breakpoint here for debugging
          String rsp = h.query(req);

          fail(err);
        }
      }
    }
  }