Esempio n. 1
0
 InsaneReader(LeafReader in, String insaneField) {
   super(in);
   this.insaneField = insaneField;
   ArrayList<FieldInfo> filteredInfos = new ArrayList<>();
   for (FieldInfo fi : in.getFieldInfos()) {
     if (fi.name.equals(insaneField)) {
       filteredInfos.add(
           new FieldInfo(
               fi.name,
               fi.number,
               fi.hasVectors(),
               fi.omitsNorms(),
               fi.hasPayloads(),
               fi.getIndexOptions(),
               DocValuesType.NONE,
               -1,
               Collections.emptyMap(),
               fi.getPointDimensionCount(),
               fi.getPointNumBytes()));
     } else {
       filteredInfos.add(fi);
     }
   }
   fieldInfos = new FieldInfos(filteredInfos.toArray(new FieldInfo[filteredInfos.size()]));
 }
  @Override
  public void writeField(FieldInfo fieldInfo, PointsReader reader) throws IOException {

    PointValues values = reader.getValues(fieldInfo.name);
    boolean singleValuePerDoc = values.size() == values.getDocCount();

    try (BKDWriter writer =
        new BKDWriter(
            writeState.segmentInfo.maxDoc(),
            writeState.directory,
            writeState.segmentInfo.name,
            fieldInfo.getPointDimensionCount(),
            fieldInfo.getPointNumBytes(),
            maxPointsInLeafNode,
            maxMBSortInHeap,
            values.size(),
            singleValuePerDoc)) {

      if (values instanceof MutablePointValues) {
        final long fp = writer.writeField(dataOut, fieldInfo.name, (MutablePointValues) values);
        if (fp != -1) {
          indexFPs.put(fieldInfo.name, fp);
        }
        return;
      }

      values.intersect(
          new IntersectVisitor() {
            @Override
            public void visit(int docID) {
              throw new IllegalStateException();
            }

            public void visit(int docID, byte[] packedValue) throws IOException {
              writer.add(packedValue, docID);
            }

            @Override
            public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
              return Relation.CELL_CROSSES_QUERY;
            }
          });

      // We could have 0 points on merge since all docs with dimensional fields may be deleted:
      if (writer.getPointCount() > 0) {
        indexFPs.put(fieldInfo.name, writer.finish(dataOut));
      }
    }
  }
  @Override
  public void merge(MergeState mergeState) throws IOException {
    if (mergeState.segmentInfo.getIndexSort() != null) {
      // TODO: can we gain back some optos even if index is sorted?  E.g. if sort results in large
      // chunks of contiguous docs from one sub
      // being copied over...?
      super.merge(mergeState);
      return;
    }

    for (PointsReader reader : mergeState.pointsReaders) {
      if (reader instanceof Lucene60PointsReader == false) {
        // We can only bulk merge when all to-be-merged segments use our format:
        super.merge(mergeState);
        return;
      }
    }
    for (PointsReader reader : mergeState.pointsReaders) {
      if (reader != null) {
        reader.checkIntegrity();
      }
    }

    for (FieldInfo fieldInfo : mergeState.mergeFieldInfos) {
      if (fieldInfo.getPointDimensionCount() != 0) {
        if (fieldInfo.getPointDimensionCount() == 1) {

          boolean singleValuePerDoc = true;

          // Worst case total maximum size (if none of the points are deleted):
          long totMaxSize = 0;
          for (int i = 0; i < mergeState.pointsReaders.length; i++) {
            PointsReader reader = mergeState.pointsReaders[i];
            if (reader != null) {
              FieldInfos readerFieldInfos = mergeState.fieldInfos[i];
              FieldInfo readerFieldInfo = readerFieldInfos.fieldInfo(fieldInfo.name);
              if (readerFieldInfo != null && readerFieldInfo.getPointDimensionCount() > 0) {
                PointValues values = reader.getValues(fieldInfo.name);
                if (values != null) {
                  totMaxSize += values.size();
                  singleValuePerDoc &= values.size() == values.getDocCount();
                }
              }
            }
          }

          // System.out.println("MERGE: field=" + fieldInfo.name);
          // Optimize the 1D case to use BKDWriter.merge, which does a single merge sort of the
          // already sorted incoming segments, instead of trying to sort all points again as if
          // we were simply reindexing them:
          try (BKDWriter writer =
              new BKDWriter(
                  writeState.segmentInfo.maxDoc(),
                  writeState.directory,
                  writeState.segmentInfo.name,
                  fieldInfo.getPointDimensionCount(),
                  fieldInfo.getPointNumBytes(),
                  maxPointsInLeafNode,
                  maxMBSortInHeap,
                  totMaxSize,
                  singleValuePerDoc)) {
            List<BKDReader> bkdReaders = new ArrayList<>();
            List<MergeState.DocMap> docMaps = new ArrayList<>();
            for (int i = 0; i < mergeState.pointsReaders.length; i++) {
              PointsReader reader = mergeState.pointsReaders[i];

              if (reader != null) {

                // we confirmed this up above
                assert reader instanceof Lucene60PointsReader;
                Lucene60PointsReader reader60 = (Lucene60PointsReader) reader;

                // NOTE: we cannot just use the merged fieldInfo.number (instead of resolving to
                // this
                // reader's FieldInfo as we do below) because field numbers can easily be different
                // when addIndexes(Directory...) copies over segments from another index:

                FieldInfos readerFieldInfos = mergeState.fieldInfos[i];
                FieldInfo readerFieldInfo = readerFieldInfos.fieldInfo(fieldInfo.name);
                if (readerFieldInfo != null && readerFieldInfo.getPointDimensionCount() > 0) {
                  BKDReader bkdReader = reader60.readers.get(readerFieldInfo.number);
                  if (bkdReader != null) {
                    bkdReaders.add(bkdReader);
                    docMaps.add(mergeState.docMaps[i]);
                  }
                }
              }
            }

            long fp = writer.merge(dataOut, docMaps, bkdReaders);
            if (fp != -1) {
              indexFPs.put(fieldInfo.name, fp);
            }
          }
        } else {
          mergeOneField(mergeState, fieldInfo);
        }
      }
    }

    finish();
  }