// TODO: randomize
  public IndexSearcher setUp(Random random, Similarity similarity, int numDocs) throws IOException {
    Directory directory = new MockDirectoryWrapper(random, new RAMDirectory());
    PayloadAnalyzer analyzer = new PayloadAnalyzer();
    IndexWriter writer =
        new IndexWriter(
            directory,
            new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer).setSimilarity(similarity));
    // writer.infoStream = System.out;
    for (int i = 0; i < numDocs; i++) {
      Document doc = new Document();
      doc.add(new Field(FIELD, English.intToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
      doc.add(
          new Field(
              MULTI_FIELD,
              English.intToEnglish(i) + "  " + English.intToEnglish(i),
              Field.Store.YES,
              Field.Index.ANALYZED));
      doc.add(
          new Field(
              NO_PAYLOAD_FIELD, English.intToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
      writer.addDocument(doc);
    }
    reader = IndexReader.open(writer, true);
    writer.close();

    IndexSearcher searcher = LuceneTestCase.newSearcher(reader);
    searcher.setSimilarity(similarity);
    return searcher;
  }
  /**
   * Basic test using Index &amp; Realtime Get with internal versioning. This test ensures routing
   * works correctly across versions.
   */
  public void testInternalVersion() throws Exception {
    createIndex("test");
    final boolean routing = randomBoolean();
    int numDocs = randomIntBetween(10, 20);
    for (int i = 0; i < numDocs; i++) {
      String routingKey = routing ? randomRealisticUnicodeOfLength(10) : null;
      String id = Integer.toString(i);
      assertThat(
          id,
          client()
              .prepareIndex("test", "type1", id)
              .setRouting(routingKey)
              .setSource("field1", English.intToEnglish(i))
              .get()
              .isCreated(),
          is(true));
      GetResponse get =
          client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(1).get();
      assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true));
      assertThat(get.getVersion(), equalTo(1L));
      client()
          .prepareIndex("test", "type1", id)
          .setRouting(routingKey)
          .setSource("field1", English.intToEnglish(i))
          .execute()
          .actionGet();
      get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(2).get();
      assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true));
      assertThat(get.getVersion(), equalTo(2L));
    }

    assertVersionCreated(compatibilityVersion(), "test");
  }
  /** Upgrades a single node to the current version */
  public void testIndexUpgradeSingleNode() throws Exception {
    assertAcked(
        prepareCreate("test")
            .setSettings(
                Settings.builder()
                    .put(
                        "index.routing.allocation.exclude._name",
                        backwardsCluster().newNodePattern())
                    .put(indexSettings())));
    ensureYellow();
    int numDocs = randomIntBetween(100, 150);
    IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
    for (int i = 0; i < numDocs; i++) {
      docs[i] =
          client()
              .prepareIndex("test", "type1", String.valueOf(i))
              .setSource(
                  "field1",
                  English.intToEnglish(i),
                  "num_int",
                  randomInt(),
                  "num_double",
                  randomDouble());
    }

    indexRandom(true, docs);
    assertAllShardsOnNodes("test", backwardsCluster().backwardsNodePattern());
    disableAllocation("test");
    backwardsCluster().allowOnAllNodes("test");
    SearchResponse countResponse = client().prepareSearch().setSize(0).get();
    assertHitCount(countResponse, numDocs);
    backwardsCluster().upgradeOneNode();
    ensureYellow();
    if (randomBoolean()) {
      for (int i = 0; i < numDocs; i++) {
        docs[i] =
            client()
                .prepareIndex("test", "type1", String.valueOf(i))
                .setSource(
                    "field1",
                    English.intToEnglish(i),
                    "num_int",
                    randomInt(),
                    "num_double",
                    randomDouble());
      }
      indexRandom(true, docs);
    }
    enableAllocation("test");
    ensureYellow();
    final int numIters = randomIntBetween(1, 20);
    for (int i = 0; i < numIters; i++) {
      assertHitCount(client().prepareSearch().setSize(0).get(), numDocs);
      assertSimpleSort("num_double", "num_int");
    }
    assertVersionCreated(compatibilityVersion(), "test");
  }
  @Test
  public void testMultipleRescores() throws Exception {
    int numDocs = indexRandomNumbers("keyword", 1);
    QueryRescorer eightIsGreat =
        RescoreBuilder.queryRescorer(
                QueryBuilders.functionScoreQuery(
                        QueryBuilders.termQuery("field1", English.intToEnglish(8)))
                    .boostMode(CombineFunction.REPLACE)
                    .add(ScoreFunctionBuilders.scriptFunction("1000.0f")))
            .setScoreMode("total");
    QueryRescorer sevenIsBetter =
        RescoreBuilder.queryRescorer(
                QueryBuilders.functionScoreQuery(
                        QueryBuilders.termQuery("field1", English.intToEnglish(7)))
                    .boostMode(CombineFunction.REPLACE)
                    .add(ScoreFunctionBuilders.scriptFunction("10000.0f")))
            .setScoreMode("total");

    // First set the rescore window large enough that both rescores take effect
    SearchRequestBuilder request = client().prepareSearch().setRescoreWindow(numDocs);
    request.addRescorer(eightIsGreat).addRescorer(sevenIsBetter);
    SearchResponse response = request.get();
    assertFirstHit(response, hasId("7"));
    assertSecondHit(response, hasId("8"));

    // Now squash the second rescore window so it never gets to see a seven
    response =
        request
            .setSize(1)
            .clearRescorers()
            .addRescorer(eightIsGreat)
            .addRescorer(sevenIsBetter, 1)
            .get();
    assertFirstHit(response, hasId("8"));
    // We have no idea what the second hit will be because we didn't get a chance to look for seven

    // Now use one rescore to drag the number we're looking for into the window of another
    QueryRescorer ninetyIsGood =
        RescoreBuilder.queryRescorer(
                QueryBuilders.functionScoreQuery(QueryBuilders.queryString("*ninety*"))
                    .boostMode(CombineFunction.REPLACE)
                    .add(ScoreFunctionBuilders.scriptFunction("1000.0f")))
            .setScoreMode("total");
    QueryRescorer oneToo =
        RescoreBuilder.queryRescorer(
                QueryBuilders.functionScoreQuery(QueryBuilders.queryString("*one*"))
                    .boostMode(CombineFunction.REPLACE)
                    .add(ScoreFunctionBuilders.scriptFunction("1000.0f")))
            .setScoreMode("total");
    request.clearRescorers().addRescorer(ninetyIsGood).addRescorer(oneToo, 10);
    response = request.setSize(2).get();
    assertFirstHit(response, hasId("91"));
    assertFirstHit(response, hasScore(2001.0f));
    assertSecondHit(
        response, hasScore(1001.0f)); // Not sure which one it is but it is ninety something
  }
示例#5
0
  // setup the index
  @Override
  public void setUp() throws Exception {
    super.setUp();
    indexDir = _TestUtil.getTempDir("RAMDirIndex");

    Directory dir = newFSDirectory(indexDir);
    IndexWriter writer =
        new IndexWriter(
            dir,
            new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
                .setOpenMode(OpenMode.CREATE));
    // add some documents
    Document doc = null;
    for (int i = 0; i < docsToAdd; i++) {
      doc = new Document();
      doc.add(
          newField(
              "content",
              English.intToEnglish(i).trim(),
              Field.Store.YES,
              Field.Index.NOT_ANALYZED));
      writer.addDocument(doc);
    }
    assertEquals(docsToAdd, writer.maxDoc());
    writer.close();
    dir.close();
  }
  private String[] randomData() throws Exception {

    final int numIndices = scaledRandomIntBetween(1, 5);
    final String[] indices = new String[numIndices];

    for (int i = 0; i < numIndices; i++) {
      indices[i] = INDEX_PREFIX + i;
      final int numDocs = scaledRandomIntBetween(1, 100);
      final IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];

      for (int j = 0; j < numDocs; j++) {
        docs[j] =
            client()
                .prepareIndex(indices[i], INDEX_TYPE)
                .setSource(
                    BenchmarkTestUtil.TestIndexField.INT_FIELD.toString(), randomInt(),
                    BenchmarkTestUtil.TestIndexField.FLOAT_FIELD.toString(), randomFloat(),
                    BenchmarkTestUtil.TestIndexField.BOOLEAN_FIELD.toString(), randomBoolean(),
                    BenchmarkTestUtil.TestIndexField.STRING_FIELD.toString(),
                        English.intToEnglish(j));
      }

      indexRandom(true, docs);
    }

    flushAndRefresh();
    return indices;
  }
 public void initIndex(Directory dir) throws Throwable {
   IndexWriter writer =
       new IndexWriter(
           dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
   for (int j = 0; j < 7; j++) {
     Document d = new Document();
     int n = random().nextInt();
     d.add(newTextField("contents", English.intToEnglish(n), Field.Store.NO));
     writer.addDocument(d);
   }
   writer.close();
 }
 /**
  * Basic test using Index &amp; Realtime Get with external versioning. This test ensures routing
  * works correctly across versions.
  */
 public void testExternalVersion() throws Exception {
   createIndex("test");
   final boolean routing = randomBoolean();
   int numDocs = randomIntBetween(10, 20);
   for (int i = 0; i < numDocs; i++) {
     String id = Integer.toString(i);
     String routingKey = routing ? randomRealisticUnicodeOfLength(10) : null;
     final long version = randomIntBetween(0, Integer.MAX_VALUE);
     client()
         .prepareIndex("test", "type1", id)
         .setRouting(routingKey)
         .setVersion(version)
         .setVersionType(VersionType.EXTERNAL)
         .setSource("field1", English.intToEnglish(i))
         .get();
     GetResponse get =
         client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(version).get();
     assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true));
     assertThat(get.getVersion(), equalTo(version));
     final long nextVersion = version + randomIntBetween(0, Integer.MAX_VALUE);
     client()
         .prepareIndex("test", "type1", id)
         .setRouting(routingKey)
         .setVersion(nextVersion)
         .setVersionType(VersionType.EXTERNAL)
         .setSource("field1", English.intToEnglish(i))
         .get();
     get =
         client()
             .prepareGet("test", "type1", id)
             .setRouting(routingKey)
             .setVersion(nextVersion)
             .get();
     assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true));
     assertThat(get.getVersion(), equalTo(nextVersion));
   }
 }
 /**
  * Test that ensures that we will never recover from a newer to an older version (we are not
  * forward compatible)
  */
 public void testNoRecoveryFromNewNodes() throws ExecutionException, InterruptedException {
   assertAcked(
       prepareCreate("test")
           .setSettings(
               Settings.builder()
                   .put(
                       "index.routing.allocation.exclude._name",
                       backwardsCluster().backwardsNodePattern())
                   .put(indexSettings())));
   if (backwardsCluster().numNewDataNodes() == 0) {
     backwardsCluster().startNewNode();
   }
   ensureYellow();
   assertAllShardsOnNodes("test", backwardsCluster().newNodePattern());
   if (randomBoolean()) {
     backwardsCluster().allowOnAllNodes("test");
   }
   int numDocs = randomIntBetween(100, 150);
   IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
   for (int i = 0; i < numDocs; i++) {
     docs[i] =
         client()
             .prepareIndex("test", "type1", randomRealisticUnicodeOfLength(10) + String.valueOf(i))
             .setSource(
                 "field1",
                 English.intToEnglish(i),
                 "num_int",
                 randomInt(),
                 "num_double",
                 randomDouble());
   }
   indexRandom(true, docs);
   backwardsCluster().allowOnAllNodes("test");
   while (ensureYellow() != ClusterHealthStatus.GREEN) {
     backwardsCluster().startNewNode();
   }
   assertAllShardsOnNodes("test", backwardsCluster().newNodePattern());
   SearchResponse countResponse = client().prepareSearch().setSize(0).get();
   assertHitCount(countResponse, numDocs);
   final int numIters = randomIntBetween(10, 20);
   for (int i = 0; i < numIters; i++) {
     countResponse = client().prepareSearch().setSize(0).get();
     assertHitCount(countResponse, numDocs);
     assertSimpleSort("num_double", "num_int");
   }
   assertVersionCreated(compatibilityVersion(), "test");
 }
 /**
  * Very basic bw compat test with a mixed version cluster random indexing and lookup by ID via
  * term query
  */
 public void testIndexAndSearch() throws Exception {
   createIndex("test");
   int numDocs = randomIntBetween(10, 20);
   List<IndexRequestBuilder> builder = new ArrayList<>();
   for (int i = 0; i < numDocs; i++) {
     String id = Integer.toString(i);
     builder.add(
         client()
             .prepareIndex("test", "type1", id)
             .setSource("field1", English.intToEnglish(i), "the_id", id));
   }
   indexRandom(true, builder);
   for (int i = 0; i < numDocs; i++) {
     String id = Integer.toString(i);
     assertHitCount(
         client().prepareSearch().setQuery(QueryBuilders.termQuery("the_id", id)).get(), 1);
   }
   assertVersionCreated(compatibilityVersion(), "test");
 }
示例#11
0
    public void update(IndexWriter writer) throws IOException {
      // Add 10 docs:
      FieldType customType = new FieldType(StringField.TYPE_NOT_STORED);
      customType.setStoreTermVectors(true);
      for (int j = 0; j < 10; j++) {
        Document d = new Document();
        int n = random().nextInt();
        d.add(newField("id", Integer.toString(nextID++), customType));
        d.add(newTextField("contents", English.intToEnglish(n), Field.Store.NO));
        writer.addDocument(d);
      }

      // Delete 5 docs:
      int deleteID = nextID - 1;
      for (int j = 0; j < 5; j++) {
        writer.deleteDocuments(new Term("id", "" + deleteID));
        deleteID -= 2;
      }
    }
  private int indexRandomNumbers(String analyzer, int shards) throws Exception {
    Builder builder =
        ImmutableSettings.settingsBuilder()
            .put(indexSettings())
            .put(SETTING_NUMBER_OF_REPLICAS, between(0, 1));

    if (shards > 0) {
      builder.put(SETTING_NUMBER_OF_SHARDS, shards);
    }

    client()
        .admin()
        .indices()
        .prepareCreate("test")
        .addMapping(
            "type1",
            jsonBuilder()
                .startObject()
                .startObject("type1")
                .startObject("properties")
                .startObject("field1")
                .field("analyzer", analyzer)
                .field("type", "string")
                .endObject()
                .endObject()
                .endObject()
                .endObject())
        .setSettings(builder)
        .get();
    int numDocs = atLeast(100);
    IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
    for (int i = 0; i < numDocs; i++) {
      docs[i] =
          client()
              .prepareIndex("test", "type1", String.valueOf(i))
              .setSource("field1", English.intToEnglish(i));
    }

    indexRandom(true, docs);
    ensureGreen();
    return numDocs;
  }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   dir = newDirectory();
   RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
   int numDocs = TestUtil.nextInt(random(), 2049, 4000);
   for (int i = 0; i < numDocs; i++) {
     Document document = new Document();
     document.add(newTextField("english", English.intToEnglish(i), Field.Store.NO));
     document.add(newTextField("oddeven", (i % 2 == 0) ? "even" : "odd", Field.Store.NO));
     document.add(new NumericDocValuesField("int", random().nextInt()));
     document.add(new NumericDocValuesField("long", random().nextLong()));
     document.add(new FloatDocValuesField("float", random().nextFloat()));
     document.add(new DoubleDocValuesField("double", random().nextDouble()));
     if (i == 545) document.add(new DoubleDocValuesField("onefield", 45.72));
     iw.addDocument(document);
   }
   reader = iw.getReader();
   iw.close();
   searcher = newSearcher(reader);
 }
  public void testRecoverFromPreviousVersion() throws ExecutionException, InterruptedException {
    if (backwardsCluster().numNewDataNodes() == 0) {
      backwardsCluster().startNewNode();
    }
    assertAcked(
        prepareCreate("test")
            .setSettings(
                Settings.builder()
                    .put(
                        "index.routing.allocation.exclude._name",
                        backwardsCluster().newNodePattern())
                    .put(indexSettings())));
    ensureYellow();
    assertAllShardsOnNodes("test", backwardsCluster().backwardsNodePattern());
    int numDocs = randomIntBetween(100, 150);
    ArrayList<String> ids = new ArrayList<>();
    logger.info(" --> indexing [{}] docs", numDocs);
    IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
    for (int i = 0; i < numDocs; i++) {
      String id = randomRealisticUnicodeOfLength(10) + String.valueOf(i);
      ids.add(id);
      docs[i] =
          client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(i));
    }
    indexRandom(true, docs);
    SearchResponse countResponse = client().prepareSearch().setSize(0).get();
    assertHitCount(countResponse, numDocs);

    if (randomBoolean()) {
      logger.info(" --> moving index to new nodes");
      backwardsCluster().allowOnlyNewNodes("test");
    } else {
      logger.info(" --> allow index to on all nodes");
      backwardsCluster().allowOnAllNodes("test");
    }

    logger.info(" --> indexing [{}] more docs", numDocs);
    // sometimes index while relocating
    if (randomBoolean()) {
      for (int i = 0; i < numDocs; i++) {
        String id = randomRealisticUnicodeOfLength(10) + String.valueOf(numDocs + i);
        ids.add(id);
        docs[i] =
            client()
                .prepareIndex("test", "type1", id)
                .setSource("field1", English.intToEnglish(numDocs + i));
      }
      indexRandom(true, docs);
      if (compatibilityVersion().before(Version.V_1_3_0)) {
        // issue another refresh through a new node to side step issue #6545
        assertNoFailures(
            backwardsCluster()
                .internalCluster()
                .dataNodeClient()
                .admin()
                .indices()
                .prepareRefresh()
                .setIndicesOptions(IndicesOptions.lenientExpandOpen())
                .execute()
                .get());
      }
      numDocs *= 2;
    }

    logger.info(" --> waiting for relocation to complete", numDocs);
    ensureYellow("test"); // move all shards to the new node (it waits on relocation)
    final int numIters = randomIntBetween(10, 20);
    for (int i = 0; i < numIters; i++) {
      assertSearchHits(
          client().prepareSearch().setSize(ids.size()).get(), ids.toArray(new String[ids.size()]));
    }
    assertVersionCreated(compatibilityVersion(), "test");
  }
  @Test
  public void testRandomExceptions() throws IOException, InterruptedException, ExecutionException {
    final int numShards = between(1, 5);
    String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("test")
            .field("type", "string")
            .field("index", "not_analyzed")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();
    final double exceptionRate;
    final double exceptionOnOpenRate;
    if (frequently()) {
      if (randomBoolean()) {
        if (randomBoolean()) {
          exceptionOnOpenRate = 1.0 / between(5, 100);
          exceptionRate = 0.0d;
        } else {
          exceptionRate = 1.0 / between(5, 100);
          exceptionOnOpenRate = 0.0d;
        }
      } else {
        exceptionOnOpenRate = 1.0 / between(5, 100);
        exceptionRate = 1.0 / between(5, 100);
      }
    } else {
      // rarely no exception
      exceptionRate = 0d;
      exceptionOnOpenRate = 0d;
    }

    Builder settings =
        settingsBuilder()
            .put("index.number_of_shards", numShards)
            .put("index.number_of_replicas", randomIntBetween(0, 1))
            .put(MockDirectoryHelper.RANDOM_IO_EXCEPTION_RATE, exceptionRate)
            .put(MockDirectoryHelper.RANDOM_IO_EXCEPTION_RATE_ON_OPEN, exceptionOnOpenRate)
            .put(MockDirectoryHelper.CHECK_INDEX_ON_CLOSE, true);
    logger.info("creating index: [test] using settings: [{}]", settings.build().getAsMap());
    client()
        .admin()
        .indices()
        .prepareCreate("test")
        .setSettings(settings)
        .addMapping("type", mapping)
        .execute()
        .actionGet();
    ClusterHealthResponse clusterHealthResponse =
        client()
            .admin()
            .cluster()
            .health(
                Requests.clusterHealthRequest()
                    .waitForYellowStatus()
                    .timeout(TimeValue.timeValueSeconds(5)))
            .get(); // it's OK to timeout here
    final int numDocs;
    final boolean expectAllShardsFailed;
    if (clusterHealthResponse.isTimedOut()) {
      /* some seeds just won't let you create the index at all and we enter a ping-pong mode
       * trying one node after another etc. that is ok but we need to make sure we don't wait
       * forever when indexing documents so we set numDocs = 1 and expecte all shards to fail
       * when we search below.*/
      logger.info("ClusterHealth timed out - only index one doc and expect searches to fail");
      numDocs = 1;
      expectAllShardsFailed = true;
    } else {
      numDocs = between(10, 100);
      expectAllShardsFailed = false;
    }
    long numCreated = 0;
    boolean[] added = new boolean[numDocs];
    for (int i = 0; i < numDocs; i++) {
      try {
        IndexResponse indexResponse =
            client()
                .prepareIndex("test", "type", "" + i)
                .setTimeout(TimeValue.timeValueSeconds(1))
                .setSource("test", English.intToEnglish(i))
                .get();
        if (indexResponse.isCreated()) {
          numCreated++;
          added[i] = true;
        }
      } catch (ElasticSearchException ex) {
      }
    }
    logger.info("Start Refresh");
    RefreshResponse refreshResponse =
        client()
            .admin()
            .indices()
            .prepareRefresh("test")
            .execute()
            .get(); // don't assert on failures here
    final boolean refreshFailed =
        refreshResponse.getShardFailures().length != 0 || refreshResponse.getFailedShards() != 0;
    logger.info(
        "Refresh failed [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ",
        refreshFailed,
        refreshResponse.getFailedShards(),
        refreshResponse.getShardFailures().length,
        refreshResponse.getSuccessfulShards(),
        refreshResponse.getTotalShards());

    final int numSearches = atLeast(10);
    // we don't check anything here really just making sure we don't leave any open files or a
    // broken index behind.
    for (int i = 0; i < numSearches; i++) {
      try {
        int docToQuery = between(0, numDocs - 1);
        long expectedResults = added[docToQuery] ? 1 : 0;
        logger.info("Searching for [test:{}]", English.intToEnglish(docToQuery));
        SearchResponse searchResponse =
            client()
                .prepareSearch()
                .setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery)))
                .get();
        logger.info(
            "Successful shards: [{}]  numShards: [{}]",
            searchResponse.getSuccessfulShards(),
            numShards);
        if (searchResponse.getSuccessfulShards() == numShards && !refreshFailed) {
          assertThat(searchResponse.getHits().getTotalHits(), Matchers.equalTo(expectedResults));
        }
        // check match all
        searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get();
        if (searchResponse.getSuccessfulShards() == numShards && !refreshFailed) {
          assertThat(searchResponse.getHits().getTotalHits(), Matchers.equalTo(numCreated));
        }

      } catch (SearchPhaseExecutionException ex) {
        if (!expectAllShardsFailed) {
          throw ex;
        } else {
          logger.info("expected SearchPhaseException: [{}]", ex.getMessage());
        }
      }
    }
  }
  /**
   * Test that allocates an index on one or more old nodes and then do a rolling upgrade one node
   * after another is shut down and restarted from a newer version and we verify that all documents
   * are still around after each nodes upgrade.
   */
  public void testIndexRollingUpgrade() throws Exception {
    String[] indices = new String[randomIntBetween(1, 3)];
    for (int i = 0; i < indices.length; i++) {
      indices[i] = "test" + i;
      assertAcked(
          prepareCreate(indices[i])
              .setSettings(
                  Settings.builder()
                      .put(
                          "index.routing.allocation.exclude._name",
                          backwardsCluster().newNodePattern())
                      .put(indexSettings())));
    }

    int numDocs = randomIntBetween(100, 150);
    IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
    String[] indexForDoc = new String[docs.length];
    for (int i = 0; i < numDocs; i++) {
      docs[i] =
          client()
              .prepareIndex(
                  indexForDoc[i] = RandomPicks.randomFrom(getRandom(), indices),
                  "type1",
                  String.valueOf(i))
              .setSource(
                  "field1",
                  English.intToEnglish(i),
                  "num_int",
                  randomInt(),
                  "num_double",
                  randomDouble());
    }
    indexRandom(true, docs);
    for (String index : indices) {
      assertAllShardsOnNodes(index, backwardsCluster().backwardsNodePattern());
    }
    disableAllocation(indices);
    backwardsCluster().allowOnAllNodes(indices);
    logClusterState();
    boolean upgraded;
    do {
      logClusterState();
      SearchResponse countResponse = client().prepareSearch().setSize(0).get();
      assertHitCount(countResponse, numDocs);
      assertSimpleSort("num_double", "num_int");
      upgraded = backwardsCluster().upgradeOneNode();
      ensureYellow();
      countResponse = client().prepareSearch().setSize(0).get();
      assertHitCount(countResponse, numDocs);
      for (int i = 0; i < numDocs; i++) {
        docs[i] =
            client()
                .prepareIndex(indexForDoc[i], "type1", String.valueOf(i))
                .setSource(
                    "field1",
                    English.intToEnglish(i),
                    "num_int",
                    randomInt(),
                    "num_double",
                    randomDouble());
      }
      indexRandom(true, docs);
    } while (upgraded);
    enableAllocation(indices);
    ensureYellow();
    SearchResponse countResponse = client().prepareSearch().setSize(0).get();
    assertHitCount(countResponse, numDocs);
    assertSimpleSort("num_double", "num_int");

    String[] newIndices = new String[randomIntBetween(1, 3)];

    for (int i = 0; i < newIndices.length; i++) {
      newIndices[i] = "new_index" + i;
      createIndex(newIndices[i]);
    }
    assertVersionCreated(
        Version.CURRENT, newIndices); // new indices are all created with the new version
    assertVersionCreated(compatibilityVersion(), indices);
  }
  @Test
  public void testScoring() throws Exception {
    int numDocs = indexRandomNumbers("keyword");

    String[] scoreModes = new String[] {"max", "min", "avg", "total", "multiply", ""};
    float primaryWeight = 1.1f;
    float secondaryWeight = 1.6f;

    for (String scoreMode : scoreModes) {
      for (int i = 0; i < numDocs - 4; i++) {
        String[] intToEnglish =
            new String[] {
              English.intToEnglish(i),
              English.intToEnglish(i + 1),
              English.intToEnglish(i + 2),
              English.intToEnglish(i + 3)
            };

        QueryRescorer rescoreQuery =
            RescoreBuilder.queryRescorer(
                    QueryBuilders.boolQuery()
                        .disableCoord(true)
                        .should(
                            QueryBuilders.functionScoreQuery(
                                    QueryBuilders.termQuery("field1", intToEnglish[0]))
                                .boostMode(CombineFunction.REPLACE)
                                .add(ScoreFunctionBuilders.scriptFunction("5.0f")))
                        .should(
                            QueryBuilders.functionScoreQuery(
                                    QueryBuilders.termQuery("field1", intToEnglish[1]))
                                .boostMode(CombineFunction.REPLACE)
                                .add(ScoreFunctionBuilders.scriptFunction("7.0f")))
                        .should(
                            QueryBuilders.functionScoreQuery(
                                    QueryBuilders.termQuery("field1", intToEnglish[3]))
                                .boostMode(CombineFunction.REPLACE)
                                .add(ScoreFunctionBuilders.scriptFunction("0.0f"))))
                .setQueryWeight(primaryWeight)
                .setRescoreQueryWeight(secondaryWeight);

        if (!"".equals(scoreMode)) {
          rescoreQuery.setScoreMode(scoreMode);
        }

        SearchResponse rescored =
            client()
                .prepareSearch()
                .setPreference("test") // ensure we hit the same shards for tie-breaking
                .setQuery(
                    QueryBuilders.boolQuery()
                        .disableCoord(true)
                        .should(
                            QueryBuilders.functionScoreQuery(
                                    QueryBuilders.termQuery("field1", intToEnglish[0]))
                                .boostMode(CombineFunction.REPLACE)
                                .add(ScoreFunctionBuilders.scriptFunction("2.0f")))
                        .should(
                            QueryBuilders.functionScoreQuery(
                                    QueryBuilders.termQuery("field1", intToEnglish[1]))
                                .boostMode(CombineFunction.REPLACE)
                                .add(ScoreFunctionBuilders.scriptFunction("3.0f")))
                        .should(
                            QueryBuilders.functionScoreQuery(
                                    QueryBuilders.termQuery("field1", intToEnglish[2]))
                                .boostMode(CombineFunction.REPLACE)
                                .add(ScoreFunctionBuilders.scriptFunction("5.0f")))
                        .should(
                            QueryBuilders.functionScoreQuery(
                                    QueryBuilders.termQuery("field1", intToEnglish[3]))
                                .boostMode(CombineFunction.REPLACE)
                                .add(ScoreFunctionBuilders.scriptFunction("0.2f"))))
                .setFrom(0)
                .setSize(10)
                .setRescorer(rescoreQuery)
                .setRescoreWindow(50)
                .execute()
                .actionGet();

        assertHitCount(rescored, 4);

        if ("total".equals(scoreMode) || "".equals(scoreMode)) {
          assertFirstHit(rescored, hasId(String.valueOf(i + 1)));
          assertSecondHit(rescored, hasId(String.valueOf(i)));
          assertThirdHit(rescored, hasId(String.valueOf(i + 2)));
          assertThat(
              rescored.getHits().getHits()[0].getScore(),
              equalTo(3.0f * primaryWeight + 7.0f * secondaryWeight));
          assertThat(
              rescored.getHits().getHits()[1].getScore(),
              equalTo(2.0f * primaryWeight + 5.0f * secondaryWeight));
          assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight));
          assertThat(
              rescored.getHits().getHits()[3].getScore(),
              equalTo(0.2f * primaryWeight + 0.0f * secondaryWeight));
        } else if ("max".equals(scoreMode)) {
          assertFirstHit(rescored, hasId(String.valueOf(i + 1)));
          assertSecondHit(rescored, hasId(String.valueOf(i)));
          assertThirdHit(rescored, hasId(String.valueOf(i + 2)));
          assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(7.0f * secondaryWeight));
          assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(5.0f * secondaryWeight));
          assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight));
          assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.2f * primaryWeight));
        } else if ("min".equals(scoreMode)) {
          assertFirstHit(rescored, hasId(String.valueOf(i + 2)));
          assertSecondHit(rescored, hasId(String.valueOf(i + 1)));
          assertThirdHit(rescored, hasId(String.valueOf(i)));
          assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(5.0f * primaryWeight));
          assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(3.0f * primaryWeight));
          assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(2.0f * primaryWeight));
          assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.0f * secondaryWeight));
        } else if ("avg".equals(scoreMode)) {
          assertFirstHit(rescored, hasId(String.valueOf(i + 1)));
          assertSecondHit(rescored, hasId(String.valueOf(i + 2)));
          assertThirdHit(rescored, hasId(String.valueOf(i)));
          assertThat(
              rescored.getHits().getHits()[0].getScore(),
              equalTo((3.0f * primaryWeight + 7.0f * secondaryWeight) / 2.0f));
          assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(5.0f * primaryWeight));
          assertThat(
              rescored.getHits().getHits()[2].getScore(),
              equalTo((2.0f * primaryWeight + 5.0f * secondaryWeight) / 2.0f));
          assertThat(
              rescored.getHits().getHits()[3].getScore(), equalTo((0.2f * primaryWeight) / 2.0f));
        } else if ("multiply".equals(scoreMode)) {
          assertFirstHit(rescored, hasId(String.valueOf(i + 1)));
          assertSecondHit(rescored, hasId(String.valueOf(i)));
          assertThirdHit(rescored, hasId(String.valueOf(i + 2)));
          assertThat(
              rescored.getHits().getHits()[0].getScore(),
              equalTo(3.0f * primaryWeight * 7.0f * secondaryWeight));
          assertThat(
              rescored.getHits().getHits()[1].getScore(),
              equalTo(2.0f * primaryWeight * 5.0f * secondaryWeight));
          assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight));
          assertThat(
              rescored.getHits().getHits()[3].getScore(),
              equalTo(0.2f * primaryWeight * 0.0f * secondaryWeight));
        }
      }
    }
  }
  @Test
  // forces QUERY_THEN_FETCH because of https://github.com/elasticsearch/elasticsearch/issues/4829
  public void testEquivalence() throws Exception {
    int numDocs = indexRandomNumbers("whitespace");

    final int iters = atLeast(50);
    for (int i = 0; i < iters; i++) {
      int resultSize = between(5, 30);
      int rescoreWindow = between(1, 3) * resultSize;
      String intToEnglish = English.intToEnglish(between(0, numDocs - 1));
      String query = intToEnglish.split(" ")[0];
      SearchResponse rescored =
          client()
              .prepareSearch()
              .setSearchType(SearchType.QUERY_THEN_FETCH)
              .setPreference("test") // ensure we hit the same shards for tie-breaking
              .setQuery(
                  QueryBuilders.matchQuery("field1", query).operator(MatchQueryBuilder.Operator.OR))
              .setFrom(0)
              .setSize(resultSize)
              .setRescorer(
                  RescoreBuilder.queryRescorer(
                          QueryBuilders.constantScoreQuery(
                              QueryBuilders.matchPhraseQuery("field1", intToEnglish).slop(3)))
                      .setQueryWeight(1.0f)
                      .setRescoreQueryWeight(
                          0.0f)) // no weight - so we basically use the same score as the actual
                                 // query
              .setRescoreWindow(rescoreWindow)
              .execute()
              .actionGet();

      SearchResponse plain =
          client()
              .prepareSearch()
              .setSearchType(SearchType.QUERY_THEN_FETCH)
              .setPreference("test") // ensure we hit the same shards for tie-breaking
              .setQuery(
                  QueryBuilders.matchQuery("field1", query).operator(MatchQueryBuilder.Operator.OR))
              .setFrom(0)
              .setSize(resultSize)
              .execute()
              .actionGet();

      // check equivalence
      assertEquivalent(query, plain, rescored);

      rescored =
          client()
              .prepareSearch()
              .setSearchType(SearchType.QUERY_THEN_FETCH)
              .setPreference("test") // ensure we hit the same shards for tie-breaking
              .setQuery(
                  QueryBuilders.matchQuery("field1", query).operator(MatchQueryBuilder.Operator.OR))
              .setFrom(0)
              .setSize(resultSize)
              .setRescorer(
                  RescoreBuilder.queryRescorer(
                          QueryBuilders.constantScoreQuery(
                              QueryBuilders.matchPhraseQuery("field1", "not in the index").slop(3)))
                      .setQueryWeight(1.0f)
                      .setRescoreQueryWeight(1.0f))
              .setRescoreWindow(rescoreWindow)
              .execute()
              .actionGet();
      // check equivalence
      assertEquivalent(query, plain, rescored);

      rescored =
          client()
              .prepareSearch()
              .setSearchType(SearchType.QUERY_THEN_FETCH)
              .setPreference("test") // ensure we hit the same shards for tie-breaking
              .setQuery(
                  QueryBuilders.matchQuery("field1", query).operator(MatchQueryBuilder.Operator.OR))
              .setFrom(0)
              .setSize(resultSize)
              .setRescorer(
                  RescoreBuilder.queryRescorer(
                          QueryBuilders.matchPhraseQuery("field1", intToEnglish).slop(0))
                      .setQueryWeight(1.0f)
                      .setRescoreQueryWeight(1.0f))
              .setRescoreWindow(2 * rescoreWindow)
              .execute()
              .actionGet();
      // check equivalence or if the first match differs we check if the phrase is a substring of
      // the top doc
      assertEquivalentOrSubstringMatch(intToEnglish, plain, rescored);
    }
  }
  @Test
  public void testDuelStrings() throws Exception {
    Random random = getRandom();
    int atLeast = scaledRandomIntBetween(1000, 1500);
    for (int i = 0; i < atLeast; i++) {
      Document d = new Document();
      d.add(new StringField("_id", "" + i, Field.Store.NO));
      if (random.nextInt(15) != 0) {
        int[] numbers = getNumbers(random, Integer.MAX_VALUE);
        for (int j : numbers) {
          final String s = English.longToEnglish(j);
          d.add(new StringField("bytes", s, Field.Store.NO));
          d.add(new SortedSetDocValuesField("bytes", new BytesRef(s)));
        }
        if (random.nextInt(10) == 0) {
          d.add(new StringField("bytes", "", Field.Store.NO));
          d.add(new SortedSetDocValuesField("bytes", new BytesRef()));
        }
      }
      writer.addDocument(d);
      if (random.nextInt(10) == 0) {
        refreshReader();
      }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<>();
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "fst")), Type.Bytes);
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes")),
        Type.Bytes);
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Bytes);
    // TODO add filters
    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<>(typeMap.entrySet());
    Preprocessor pre = new Preprocessor();
    while (!list.isEmpty()) {
      Entry<FieldDataType, Type> left;
      Entry<FieldDataType, Type> right;
      if (list.size() > 1) {
        left = list.remove(random.nextInt(list.size()));
        right = list.remove(random.nextInt(list.size()));
      } else {
        right = left = list.remove(0);
      }
      ifdService.clear();
      IndexFieldData<?> leftFieldData =
          getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT));

      ifdService.clear();
      IndexFieldData<?> rightFieldData =
          getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT));

      duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre);
      duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre);

      DirectoryReader perSegment = DirectoryReader.open(writer, true);
      CompositeReaderContext composite = perSegment.getContext();
      List<AtomicReaderContext> leaves = composite.leaves();
      for (AtomicReaderContext atomicReaderContext : leaves) {
        duelFieldDataBytes(random, atomicReaderContext, leftFieldData, rightFieldData, pre);
      }
      perSegment.close();
    }
  }
  // creates 8 fields with different options and does "duels" of fields against each other
  public void test() throws Exception {
    Directory dir = newDirectory();
    Analyzer analyzer =
        new Analyzer(Analyzer.PER_FIELD_REUSE_STRATEGY) {
          @Override
          protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
            Tokenizer tokenizer = new MockTokenizer(reader);
            if (fieldName.contains("payloadsFixed")) {
              TokenFilter filter = new MockFixedLengthPayloadFilter(new Random(0), tokenizer, 1);
              return new TokenStreamComponents(tokenizer, filter);
            } else if (fieldName.contains("payloadsVariable")) {
              TokenFilter filter = new MockVariableLengthPayloadFilter(new Random(0), tokenizer);
              return new TokenStreamComponents(tokenizer, filter);
            } else {
              return new TokenStreamComponents(tokenizer);
            }
          }
        };
    IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
    iwc.setCodec(_TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat()));
    // TODO we could actually add more fields implemented with different PFs
    // or, just put this test into the usual rotation?
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc.clone());
    Document doc = new Document();
    FieldType docsOnlyType = new FieldType(TextField.TYPE_NOT_STORED);
    // turn this on for a cross-check
    docsOnlyType.setStoreTermVectors(true);
    docsOnlyType.setIndexOptions(IndexOptions.DOCS_ONLY);

    FieldType docsAndFreqsType = new FieldType(TextField.TYPE_NOT_STORED);
    // turn this on for a cross-check
    docsAndFreqsType.setStoreTermVectors(true);
    docsAndFreqsType.setIndexOptions(IndexOptions.DOCS_AND_FREQS);

    FieldType positionsType = new FieldType(TextField.TYPE_NOT_STORED);
    // turn these on for a cross-check
    positionsType.setStoreTermVectors(true);
    positionsType.setStoreTermVectorPositions(true);
    positionsType.setStoreTermVectorOffsets(true);
    positionsType.setStoreTermVectorPayloads(true);
    FieldType offsetsType = new FieldType(positionsType);
    offsetsType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
    Field field1 = new Field("field1docs", "", docsOnlyType);
    Field field2 = new Field("field2freqs", "", docsAndFreqsType);
    Field field3 = new Field("field3positions", "", positionsType);
    Field field4 = new Field("field4offsets", "", offsetsType);
    Field field5 = new Field("field5payloadsFixed", "", positionsType);
    Field field6 = new Field("field6payloadsVariable", "", positionsType);
    Field field7 = new Field("field7payloadsFixedOffsets", "", offsetsType);
    Field field8 = new Field("field8payloadsVariableOffsets", "", offsetsType);
    doc.add(field1);
    doc.add(field2);
    doc.add(field3);
    doc.add(field4);
    doc.add(field5);
    doc.add(field6);
    doc.add(field7);
    doc.add(field8);
    for (int i = 0; i < MAXDOC; i++) {
      String stringValue =
          Integer.toString(i)
              + " verycommon "
              + English.intToEnglish(i).replace('-', ' ')
              + " "
              + _TestUtil.randomSimpleString(random());
      field1.setStringValue(stringValue);
      field2.setStringValue(stringValue);
      field3.setStringValue(stringValue);
      field4.setStringValue(stringValue);
      field5.setStringValue(stringValue);
      field6.setStringValue(stringValue);
      field7.setStringValue(stringValue);
      field8.setStringValue(stringValue);
      iw.addDocument(doc);
    }
    iw.close();
    verify(dir);
    _TestUtil.checkIndex(dir); // for some extra coverage, checkIndex before we forceMerge
    iwc.setOpenMode(OpenMode.APPEND);
    IndexWriter iw2 = new IndexWriter(dir, iwc.clone());
    iw2.forceMerge(1);
    iw2.close();
    verify(dir);
    dir.close();
  }
示例#21
0
  public void testIndexAndRelocateConcurrently() throws ExecutionException, InterruptedException {
    int halfNodes = randomIntBetween(1, 3);
    Settings[] nodeSettings =
        Stream.concat(
                Stream.generate(() -> Settings.builder().put("node.attr.color", "blue").build())
                    .limit(halfNodes),
                Stream.generate(() -> Settings.builder().put("node.attr.color", "red").build())
                    .limit(halfNodes))
            .toArray(Settings[]::new);
    List<String> nodes = internalCluster().startNodes(nodeSettings);
    String[] blueNodes = nodes.subList(0, halfNodes).stream().toArray(String[]::new);
    String[] redNodes = nodes.subList(halfNodes, nodes.size()).stream().toArray(String[]::new);
    logger.info("blue nodes: {}", (Object) blueNodes);
    logger.info("red nodes: {}", (Object) redNodes);
    ensureStableCluster(halfNodes * 2);

    assertAcked(
        prepareCreate(
            "test",
            Settings.builder()
                .put("index.routing.allocation.exclude.color", "blue")
                .put(indexSettings())
                .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomInt(halfNodes - 1))));
    assertAllShardsOnNodes("test", redNodes);
    int numDocs = randomIntBetween(100, 150);
    ArrayList<String> ids = new ArrayList<>();
    logger.info(" --> indexing [{}] docs", numDocs);
    IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
    for (int i = 0; i < numDocs; i++) {
      String id = randomRealisticUnicodeOfLength(10) + String.valueOf(i);
      ids.add(id);
      docs[i] =
          client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(i));
    }
    indexRandom(true, docs);
    SearchResponse countResponse = client().prepareSearch("test").get();
    assertHitCount(countResponse, numDocs);

    logger.info(" --> moving index to new nodes");
    Settings build =
        Settings.builder()
            .put("index.routing.allocation.exclude.color", "red")
            .put("index.routing.allocation.include.color", "blue")
            .build();
    client()
        .admin()
        .indices()
        .prepareUpdateSettings("test")
        .setSettings(build)
        .execute()
        .actionGet();

    // index while relocating
    logger.info(" --> indexing [{}] more docs", numDocs);
    for (int i = 0; i < numDocs; i++) {
      String id = randomRealisticUnicodeOfLength(10) + String.valueOf(numDocs + i);
      ids.add(id);
      docs[i] =
          client()
              .prepareIndex("test", "type1", id)
              .setSource("field1", English.intToEnglish(numDocs + i));
    }
    indexRandom(true, docs);
    numDocs *= 2;

    logger.info(" --> waiting for relocation to complete");
    ensureGreen("test"); // move all shards to the new nodes (it waits on relocation)

    final int numIters = randomIntBetween(10, 20);
    for (int i = 0; i < numIters; i++) {
      logger.info(" --> checking iteration {}", i);
      SearchResponse afterRelocation = client().prepareSearch().setSize(ids.size()).get();
      assertNoFailures(afterRelocation);
      assertSearchHits(afterRelocation, ids.toArray(new String[ids.size()]));
    }
  }