private static void duelFieldDataLong(
      Random random,
      AtomicReaderContext context,
      IndexNumericFieldData left,
      IndexNumericFieldData right)
      throws Exception {
    AtomicNumericFieldData leftData =
        random.nextBoolean() ? left.load(context) : left.loadDirect(context);
    AtomicNumericFieldData rightData =
        random.nextBoolean() ? right.load(context) : right.loadDirect(context);

    int numDocs = context.reader().maxDoc();
    SortedNumericDocValues leftLongValues = leftData.getLongValues();
    SortedNumericDocValues rightLongValues = rightData.getLongValues();
    for (int i = 0; i < numDocs; i++) {
      leftLongValues.setDocument(i);
      rightLongValues.setDocument(i);
      int numValues = leftLongValues.count();
      long previous = 0;
      assertThat(numValues, equalTo(rightLongValues.count()));
      for (int j = 0; j < numValues; j++) {
        long current;
        assertThat(leftLongValues.valueAt(j), equalTo(current = rightLongValues.valueAt(j)));
        if (j > 0) {
          assertThat(previous, lessThan(current));
        }
        previous = current;
      }
    }
  }
  private static void duelFieldDataDouble(
      Random random,
      AtomicReaderContext context,
      IndexNumericFieldData left,
      IndexNumericFieldData right)
      throws Exception {
    AtomicNumericFieldData leftData =
        random.nextBoolean() ? left.load(context) : left.loadDirect(context);
    AtomicNumericFieldData rightData =
        random.nextBoolean() ? right.load(context) : right.loadDirect(context);

    int numDocs = context.reader().maxDoc();
    SortedNumericDoubleValues leftDoubleValues = leftData.getDoubleValues();
    SortedNumericDoubleValues rightDoubleValues = rightData.getDoubleValues();
    for (int i = 0; i < numDocs; i++) {
      leftDoubleValues.setDocument(i);
      rightDoubleValues.setDocument(i);
      int numValues = leftDoubleValues.count();
      assertThat(numValues, equalTo(rightDoubleValues.count()));
      double previous = 0;
      for (int j = 0; j < numValues; j++) {
        double current = rightDoubleValues.valueAt(j);
        if (Double.isNaN(current)) {
          assertTrue(Double.isNaN(leftDoubleValues.valueAt(j)));
        } else {
          assertThat(leftDoubleValues.valueAt(j), closeTo(current, 0.0001));
        }
        if (j > 0) {
          assertThat(Double.compare(previous, current), lessThan(0));
        }
        previous = current;
      }
    }
  }
 private int[] getNumbers(Random random, int margin) {
   if (random.nextInt(20) == 0) {
     int[] num = new int[1 + random.nextInt(10)];
     for (int i = 0; i < num.length; i++) {
       int v = (random.nextBoolean() ? -1 * random.nextInt(margin) : random.nextInt(margin));
       num[i] = v;
     }
     return num;
   }
   return new int[] {
     (random.nextBoolean() ? -1 * random.nextInt(margin) : random.nextInt(margin))
   };
 }
  private static void duelFieldDataGeoPoint(
      Random random,
      AtomicReaderContext context,
      IndexGeoPointFieldData left,
      IndexGeoPointFieldData right,
      Distance precision)
      throws Exception {
    AtomicGeoPointFieldData leftData =
        random.nextBoolean() ? left.load(context) : left.loadDirect(context);
    AtomicGeoPointFieldData rightData =
        random.nextBoolean() ? right.load(context) : right.loadDirect(context);

    int numDocs = context.reader().maxDoc();
    MultiGeoPointValues leftValues = leftData.getGeoPointValues();
    MultiGeoPointValues rightValues = rightData.getGeoPointValues();
    for (int i = 0; i < numDocs; ++i) {
      leftValues.setDocument(i);
      final int numValues = leftValues.count();
      rightValues.setDocument(i);
      ;
      assertEquals(numValues, rightValues.count());
      List<GeoPoint> leftPoints = Lists.newArrayList();
      List<GeoPoint> rightPoints = Lists.newArrayList();
      for (int j = 0; j < numValues; ++j) {
        GeoPoint l = leftValues.valueAt(j);
        leftPoints.add(new GeoPoint(l.getLat(), l.getLon()));
        GeoPoint r = rightValues.valueAt(j);
        rightPoints.add(new GeoPoint(r.getLat(), r.getLon()));
      }
      for (GeoPoint l : leftPoints) {
        assertTrue(
            "Couldn't find " + l + " among " + rightPoints, contains(l, rightPoints, precision));
      }
      for (GeoPoint r : rightPoints) {
        assertTrue(
            "Couldn't find " + r + " among " + leftPoints, contains(r, leftPoints, precision));
      }
    }
  }
Exemplo n.º 5
0
 public LuceneManagedDirectoryService(Random random, boolean preventDoubleWrite) {
   super(new ShardId("fake", 1), ImmutableSettings.EMPTY);
   this.dirs = new Directory[1 + random.nextInt(5)];
   for (int i = 0; i < dirs.length; i++) {
     dirs[i] = newDirectory(random);
     if (dirs[i] instanceof MockDirectoryWrapper) {
       ((MockDirectoryWrapper) dirs[i]).setPreventDoubleWrite(preventDoubleWrite);
       // TODO: fix this test to handle virus checker
       ((MockDirectoryWrapper) dirs[i]).setEnableVirusScanner(false);
     }
   }
   this.random = random;
 }
  private static void duelFieldDataBytes(
      Random random,
      AtomicReaderContext context,
      IndexFieldData<?> left,
      IndexFieldData<?> right,
      Preprocessor pre)
      throws Exception {
    AtomicFieldData leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context);
    AtomicFieldData rightData =
        random.nextBoolean() ? right.load(context) : right.loadDirect(context);

    int numDocs = context.reader().maxDoc();
    SortedBinaryDocValues leftBytesValues = leftData.getBytesValues();
    SortedBinaryDocValues rightBytesValues = rightData.getBytesValues();
    BytesRef leftSpare = new BytesRef();
    BytesRef rightSpare = new BytesRef();

    for (int i = 0; i < numDocs; i++) {
      leftBytesValues.setDocument(i);
      rightBytesValues.setDocument(i);
      int numValues = leftBytesValues.count();
      assertThat(numValues, equalTo(rightBytesValues.count()));
      BytesRef previous = null;
      for (int j = 0; j < numValues; j++) {
        rightSpare.copyBytes(rightBytesValues.valueAt(j));
        leftSpare.copyBytes(leftBytesValues.valueAt(j));
        if (previous != null) {
          assertThat(pre.compare(previous, rightSpare), lessThan(0));
        }
        previous = BytesRef.deepCopyOf(rightSpare);
        pre.toString(rightSpare);
        pre.toString(leftSpare);
        assertThat(pre.toString(leftSpare), equalTo(pre.toString(rightSpare)));
      }
    }
  }
  public void testDuelGeoPoints() throws Exception {
    final String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("geopoint")
            .field("type", "geo_point")
            .startObject("fielddata")
            .field("format", "doc_values")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping);

    Random random = getRandom();
    int atLeast = scaledRandomIntBetween(1000, 1500);
    int maxValuesPerDoc = randomBoolean() ? 1 : randomIntBetween(2, 40);
    // to test deduplication
    double defaultLat = randomDouble() * 180 - 90;
    double defaultLon = randomDouble() * 360 - 180;
    for (int i = 0; i < atLeast; i++) {
      final int numValues = randomInt(maxValuesPerDoc);
      XContentBuilder doc = XContentFactory.jsonBuilder().startObject().startArray("geopoint");
      for (int j = 0; j < numValues; ++j) {
        if (randomBoolean()) {
          doc.startObject().field("lat", defaultLat).field("lon", defaultLon).endObject();
        } else {
          doc.startObject()
              .field("lat", randomDouble() * 180 - 90)
              .field("lon", randomDouble() * 360 - 180)
              .endObject();
        }
      }
      doc = doc.endArray().endObject();
      final ParsedDocument d = mapper.parse("type", Integer.toString(i), doc.bytes());

      writer.addDocument(d.rootDoc());
      if (random.nextInt(10) == 0) {
        refreshReader();
      }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<>();
    final Distance precision = new Distance(1, randomFrom(DistanceUnit.values()));
    typeMap.put(
        new FieldDataType("geo_point", ImmutableSettings.builder().put("format", "array")),
        Type.GeoPoint);
    typeMap.put(
        new FieldDataType(
            "geo_point",
            ImmutableSettings.builder().put("format", "compressed").put("precision", precision)),
        Type.GeoPoint);
    typeMap.put(
        new FieldDataType("geo_point", ImmutableSettings.builder().put("format", "doc_values")),
        Type.GeoPoint);

    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<>(typeMap.entrySet());
    while (!list.isEmpty()) {
      Entry<FieldDataType, Type> left;
      Entry<FieldDataType, Type> right;
      if (list.size() > 1) {
        left = list.remove(random.nextInt(list.size()));
        right = list.remove(random.nextInt(list.size()));
      } else {
        right = left = list.remove(0);
      }
      ifdService.clear();
      IndexGeoPointFieldData leftFieldData =
          getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT));

      ifdService.clear();
      IndexGeoPointFieldData rightFieldData =
          getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT));

      duelFieldDataGeoPoint(random, context, leftFieldData, rightFieldData, precision);
      duelFieldDataGeoPoint(random, context, rightFieldData, leftFieldData, precision);

      DirectoryReader perSegment = DirectoryReader.open(writer, true);
      CompositeReaderContext composite = perSegment.getContext();
      List<AtomicReaderContext> leaves = composite.leaves();
      for (AtomicReaderContext atomicReaderContext : leaves) {
        duelFieldDataGeoPoint(
            random, atomicReaderContext, leftFieldData, rightFieldData, precision);
      }
      perSegment.close();
    }
  }
  @Test
  public void testDuelAllTypesSingleValue() throws Exception {
    final String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("bytes")
            .field("type", "string")
            .field("index", "not_analyzed")
            .startObject("fielddata")
            .field("format", LuceneTestCase.defaultCodecSupportsSortedSet() ? "doc_values" : "fst")
            .endObject()
            .endObject()
            .startObject("byte")
            .field("type", "byte")
            .startObject("fielddata")
            .field("format", "doc_values")
            .endObject()
            .endObject()
            .startObject("short")
            .field("type", "short")
            .startObject("fielddata")
            .field("format", "doc_values")
            .endObject()
            .endObject()
            .startObject("integer")
            .field("type", "integer")
            .startObject("fielddata")
            .field("format", "doc_values")
            .endObject()
            .endObject()
            .startObject("long")
            .field("type", "long")
            .startObject("fielddata")
            .field("format", "doc_values")
            .endObject()
            .endObject()
            .startObject("float")
            .field("type", "float")
            .startObject("fielddata")
            .field("format", "doc_values")
            .endObject()
            .endObject()
            .startObject("double")
            .field("type", "double")
            .startObject("fielddata")
            .field("format", "doc_values")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();
    final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping);
    Random random = getRandom();
    int atLeast = scaledRandomIntBetween(1000, 1500);
    for (int i = 0; i < atLeast; i++) {
      String s = Integer.toString(randomByte());

      XContentBuilder doc = XContentFactory.jsonBuilder().startObject();
      for (String fieldName :
          Arrays.asList("bytes", "byte", "short", "integer", "long", "float", "double")) {
        doc = doc.field(fieldName, s);
      }

      doc = doc.endObject();

      final ParsedDocument d = mapper.parse("type", Integer.toString(i), doc.bytes());

      writer.addDocument(d.rootDoc());

      if (random.nextInt(10) == 0) {
        refreshReader();
      }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<>();
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "fst")), Type.Bytes);
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes")),
        Type.Bytes);
    typeMap.put(
        new FieldDataType("byte", ImmutableSettings.builder().put("format", "array")),
        Type.Integer);
    typeMap.put(
        new FieldDataType("short", ImmutableSettings.builder().put("format", "array")),
        Type.Integer);
    typeMap.put(
        new FieldDataType("int", ImmutableSettings.builder().put("format", "array")), Type.Integer);
    typeMap.put(
        new FieldDataType("long", ImmutableSettings.builder().put("format", "array")), Type.Long);
    typeMap.put(
        new FieldDataType("double", ImmutableSettings.builder().put("format", "array")),
        Type.Double);
    typeMap.put(
        new FieldDataType("float", ImmutableSettings.builder().put("format", "array")), Type.Float);
    typeMap.put(
        new FieldDataType("byte", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Integer);
    typeMap.put(
        new FieldDataType("short", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Integer);
    typeMap.put(
        new FieldDataType("int", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Integer);
    typeMap.put(
        new FieldDataType("long", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Long);
    typeMap.put(
        new FieldDataType("double", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Double);
    typeMap.put(
        new FieldDataType("float", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Float);
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Bytes);
    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<>(typeMap.entrySet());
    Preprocessor pre = new ToDoublePreprocessor();
    while (!list.isEmpty()) {
      Entry<FieldDataType, Type> left;
      Entry<FieldDataType, Type> right;
      if (list.size() > 1) {
        left = list.remove(random.nextInt(list.size()));
        right = list.remove(random.nextInt(list.size()));
      } else {
        right = left = list.remove(0);
      }

      ifdService.clear();
      IndexFieldData<?> leftFieldData =
          getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT));
      ifdService.clear();
      IndexFieldData<?> rightFieldData =
          getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT));
      duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre);
      duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre);

      DirectoryReader perSegment = DirectoryReader.open(writer, true);
      CompositeReaderContext composite = perSegment.getContext();
      List<AtomicReaderContext> leaves = composite.leaves();
      for (AtomicReaderContext atomicReaderContext : leaves) {
        duelFieldDataBytes(random, atomicReaderContext, leftFieldData, rightFieldData, pre);
      }
    }
  }
  @Test
  public void testDuelStrings() throws Exception {
    Random random = getRandom();
    int atLeast = scaledRandomIntBetween(1000, 1500);
    for (int i = 0; i < atLeast; i++) {
      Document d = new Document();
      d.add(new StringField("_id", "" + i, Field.Store.NO));
      if (random.nextInt(15) != 0) {
        int[] numbers = getNumbers(random, Integer.MAX_VALUE);
        for (int j : numbers) {
          final String s = English.longToEnglish(j);
          d.add(new StringField("bytes", s, Field.Store.NO));
          d.add(new SortedSetDocValuesField("bytes", new BytesRef(s)));
        }
        if (random.nextInt(10) == 0) {
          d.add(new StringField("bytes", "", Field.Store.NO));
          d.add(new SortedSetDocValuesField("bytes", new BytesRef()));
        }
      }
      writer.addDocument(d);
      if (random.nextInt(10) == 0) {
        refreshReader();
      }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<>();
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "fst")), Type.Bytes);
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes")),
        Type.Bytes);
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Bytes);
    // TODO add filters
    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<>(typeMap.entrySet());
    Preprocessor pre = new Preprocessor();
    while (!list.isEmpty()) {
      Entry<FieldDataType, Type> left;
      Entry<FieldDataType, Type> right;
      if (list.size() > 1) {
        left = list.remove(random.nextInt(list.size()));
        right = list.remove(random.nextInt(list.size()));
      } else {
        right = left = list.remove(0);
      }
      ifdService.clear();
      IndexFieldData<?> leftFieldData =
          getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT));

      ifdService.clear();
      IndexFieldData<?> rightFieldData =
          getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT));

      duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre);
      duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre);

      DirectoryReader perSegment = DirectoryReader.open(writer, true);
      CompositeReaderContext composite = perSegment.getContext();
      List<AtomicReaderContext> leaves = composite.leaves();
      for (AtomicReaderContext atomicReaderContext : leaves) {
        duelFieldDataBytes(random, atomicReaderContext, leftFieldData, rightFieldData, pre);
      }
      perSegment.close();
    }
  }
Exemplo n.º 10
0
  @Test
  public void testDuelDoubles() throws Exception {
    final String mapping =
        XContentFactory.jsonBuilder()
            .startObject()
            .startObject("type")
            .startObject("properties")
            .startObject("float")
            .field("type", "float")
            .startObject("fielddata")
            .field("format", "doc_values")
            .endObject()
            .endObject()
            .startObject("double")
            .field("type", "double")
            .startObject("fielddata")
            .field("format", "doc_values")
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .endObject()
            .string();

    final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping);
    Random random = getRandom();
    int atLeast = scaledRandomIntBetween(1000, 1500);
    final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 40);
    float[] values = new float[maxNumValues];
    for (int i = 0; i < atLeast; i++) {
      int numValues = randomInt(maxNumValues);
      float def = randomBoolean() ? randomFloat() : Float.NaN;
      // FD loses values if they are duplicated, so we must deduplicate for this test
      Set<Float> vals = new HashSet<Float>();
      for (int j = 0; j < numValues; ++j) {
        if (randomBoolean()) {
          vals.add(def);
        } else {
          vals.add(randomFloat());
        }
      }
      numValues = vals.size();
      int upto = 0;
      for (Float f : vals) {
        values[upto++] = f.floatValue();
      }

      XContentBuilder doc = XContentFactory.jsonBuilder().startObject().startArray("float");
      for (int j = 0; j < numValues; ++j) {
        doc = doc.value(values[j]);
      }
      doc = doc.endArray().startArray("double");
      for (int j = 0; j < numValues; ++j) {
        doc = doc.value(values[j]);
      }
      doc = doc.endArray().endObject();

      final ParsedDocument d = mapper.parse("type", Integer.toString(i), doc.bytes());

      writer.addDocument(d.rootDoc());
      if (random.nextInt(10) == 0) {
        refreshReader();
      }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<>();
    typeMap.put(
        new FieldDataType("double", ImmutableSettings.builder().put("format", "array")),
        Type.Double);
    typeMap.put(
        new FieldDataType("float", ImmutableSettings.builder().put("format", "array")), Type.Float);
    typeMap.put(
        new FieldDataType("double", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Double);
    typeMap.put(
        new FieldDataType("float", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Float);
    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<>(typeMap.entrySet());
    while (!list.isEmpty()) {
      Entry<FieldDataType, Type> left;
      Entry<FieldDataType, Type> right;
      if (list.size() > 1) {
        left = list.remove(random.nextInt(list.size()));
        right = list.remove(random.nextInt(list.size()));
      } else {
        right = left = list.remove(0);
      }
      ifdService.clear();
      IndexNumericFieldData leftFieldData =
          getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT));

      ifdService.clear();
      IndexNumericFieldData rightFieldData =
          getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT));

      duelFieldDataDouble(random, context, leftFieldData, rightFieldData);
      duelFieldDataDouble(random, context, rightFieldData, leftFieldData);

      DirectoryReader perSegment = DirectoryReader.open(writer, true);
      CompositeReaderContext composite = perSegment.getContext();
      List<AtomicReaderContext> leaves = composite.leaves();
      for (AtomicReaderContext atomicReaderContext : leaves) {
        duelFieldDataDouble(random, atomicReaderContext, leftFieldData, rightFieldData);
      }
    }
  }
Exemplo n.º 11
0
  @Test
  public void testRecoveryDiff() throws IOException, InterruptedException {
    int numDocs = 2 + random().nextInt(100);
    List<Document> docs = new ArrayList<>();
    for (int i = 0; i < numDocs; i++) {
      Document doc = new Document();
      doc.add(
          new StringField("id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
      doc.add(
          new TextField(
              "body",
              TestUtil.randomRealisticUnicodeString(random()),
              random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
      doc.add(
          new SortedDocValuesField(
              "dv", new BytesRef(TestUtil.randomRealisticUnicodeString(random()))));
      docs.add(doc);
    }
    long seed = random().nextLong();
    Store.MetadataSnapshot first;
    {
      Random random = new Random(seed);
      IndexWriterConfig iwc =
          new IndexWriterConfig(new MockAnalyzer(random)).setCodec(actualDefaultCodec());
      iwc.setMergePolicy(NoMergePolicy.INSTANCE);
      iwc.setUseCompoundFile(random.nextBoolean());
      iwc.setMaxThreadStates(1);
      final ShardId shardId = new ShardId(new Index("index"), 1);
      DirectoryService directoryService = new LuceneManagedDirectoryService(random);
      Store store =
          new Store(
              shardId,
              ImmutableSettings.EMPTY,
              directoryService,
              randomDistributor(random, directoryService),
              new DummyShardLock(shardId));
      IndexWriter writer = new IndexWriter(store.directory(), iwc);
      final boolean lotsOfSegments = rarely(random);
      for (Document d : docs) {
        writer.addDocument(d);
        if (lotsOfSegments && random.nextBoolean()) {
          writer.commit();
        } else if (rarely(random)) {
          writer.commit();
        }
      }
      writer.commit();
      writer.close();
      first = store.getMetadata();
      assertDeleteContent(store, directoryService);
      store.close();
    }
    long time = new Date().getTime();
    while (time == new Date().getTime()) {
      Thread.sleep(10); // bump the time
    }
    Store.MetadataSnapshot second;
    Store store;
    {
      Random random = new Random(seed);
      IndexWriterConfig iwc =
          new IndexWriterConfig(new MockAnalyzer(random)).setCodec(actualDefaultCodec());
      iwc.setMergePolicy(NoMergePolicy.INSTANCE);
      iwc.setUseCompoundFile(random.nextBoolean());
      iwc.setMaxThreadStates(1);
      final ShardId shardId = new ShardId(new Index("index"), 1);
      DirectoryService directoryService = new LuceneManagedDirectoryService(random);
      store =
          new Store(
              shardId,
              ImmutableSettings.EMPTY,
              directoryService,
              randomDistributor(random, directoryService),
              new DummyShardLock(shardId));
      IndexWriter writer = new IndexWriter(store.directory(), iwc);
      final boolean lotsOfSegments = rarely(random);
      for (Document d : docs) {
        writer.addDocument(d);
        if (lotsOfSegments && random.nextBoolean()) {
          writer.commit();
        } else if (rarely(random)) {
          writer.commit();
        }
      }
      writer.commit();
      writer.close();
      second = store.getMetadata();
    }
    Store.RecoveryDiff diff = first.recoveryDiff(second);
    assertThat(first.size(), equalTo(second.size()));
    for (StoreFileMetaData md : first) {
      assertThat(second.get(md.name()), notNullValue());
      // si files are different - containing timestamps etc
      assertThat(second.get(md.name()).isSame(md), equalTo(false));
    }
    assertThat(diff.different.size(), equalTo(first.size()));
    assertThat(
        diff.identical.size(),
        equalTo(0)); // in lucene 5 nothing is identical - we use random ids in file headers
    assertThat(diff.missing, empty());

    // check the self diff
    Store.RecoveryDiff selfDiff = first.recoveryDiff(first);
    assertThat(selfDiff.identical.size(), equalTo(first.size()));
    assertThat(selfDiff.different, empty());
    assertThat(selfDiff.missing, empty());

    // lets add some deletes
    Random random = new Random(seed);
    IndexWriterConfig iwc =
        new IndexWriterConfig(new MockAnalyzer(random)).setCodec(actualDefaultCodec());
    iwc.setMergePolicy(NoMergePolicy.INSTANCE);
    iwc.setUseCompoundFile(random.nextBoolean());
    iwc.setMaxThreadStates(1);
    iwc.setOpenMode(IndexWriterConfig.OpenMode.APPEND);
    IndexWriter writer = new IndexWriter(store.directory(), iwc);
    writer.deleteDocuments(new Term("id", Integer.toString(random().nextInt(numDocs))));
    writer.commit();
    writer.close();
    Store.MetadataSnapshot metadata = store.getMetadata();
    StoreFileMetaData delFile = null;
    for (StoreFileMetaData md : metadata) {
      if (md.name().endsWith(".liv")) {
        delFile = md;
        break;
      }
    }
    Store.RecoveryDiff afterDeleteDiff = metadata.recoveryDiff(second);
    if (delFile != null) {
      assertThat(
          afterDeleteDiff.identical.size(), equalTo(metadata.size() - 2)); // segments_N + del file
      assertThat(afterDeleteDiff.different.size(), equalTo(0));
      assertThat(afterDeleteDiff.missing.size(), equalTo(2));
    } else {
      // an entire segment must be missing (single doc segment got dropped)
      assertThat(afterDeleteDiff.identical.size(), greaterThan(0));
      assertThat(afterDeleteDiff.different.size(), equalTo(0));
      assertThat(afterDeleteDiff.missing.size(), equalTo(1)); // the commit file is different
    }

    // check the self diff
    selfDiff = metadata.recoveryDiff(metadata);
    assertThat(selfDiff.identical.size(), equalTo(metadata.size()));
    assertThat(selfDiff.different, empty());
    assertThat(selfDiff.missing, empty());

    // add a new commit
    iwc = new IndexWriterConfig(new MockAnalyzer(random)).setCodec(actualDefaultCodec());
    iwc.setMergePolicy(NoMergePolicy.INSTANCE);
    iwc.setUseCompoundFile(
        true); // force CFS - easier to test here since we know it will add 3 files
    iwc.setMaxThreadStates(1);
    iwc.setOpenMode(IndexWriterConfig.OpenMode.APPEND);
    writer = new IndexWriter(store.directory(), iwc);
    writer.addDocument(docs.get(0));
    writer.close();

    Store.MetadataSnapshot newCommitMetaData = store.getMetadata();
    Store.RecoveryDiff newCommitDiff = newCommitMetaData.recoveryDiff(metadata);
    if (delFile != null) {
      assertThat(
          newCommitDiff.identical.size(),
          equalTo(
              newCommitMetaData.size()
                  - 5)); // segments_N, del file, cfs, cfe, si for the new segment
      assertThat(newCommitDiff.different.size(), equalTo(1)); // the del file must be different
      assertThat(newCommitDiff.different.get(0).name(), endsWith(".liv"));
      assertThat(
          newCommitDiff.missing.size(), equalTo(4)); // segments_N,cfs, cfe, si for the new segment
    } else {
      assertThat(
          newCommitDiff.identical.size(),
          equalTo(newCommitMetaData.size() - 4)); // segments_N, cfs, cfe, si for the new segment
      assertThat(newCommitDiff.different.size(), equalTo(0));
      assertThat(
          newCommitDiff.missing.size(),
          equalTo(
              4)); // an entire segment must be missing (single doc segment got dropped)  plus the
                   // commit is different
    }

    store.deleteContent();
    IOUtils.close(store);
  }
Exemplo n.º 12
0
 private Distributor randomDistributor(Random random, DirectoryService service)
     throws IOException {
   return random.nextBoolean()
       ? new LeastUsedDistributor(service)
       : new RandomWeightedDistributor(service);
 }
Exemplo n.º 13
0
 @Override
 public long throttleTimeInNanos() {
   return random.nextInt(1000);
 }