public TermVectorsWriter(Directory directory, String segment, FieldInfos fieldInfos)
      throws IOException {
    // Open files for TermVector storage
    tvx = directory.createOutput(segment + TVX_EXTENSION);
    tvx.writeInt(FORMAT_VERSION);
    tvd = directory.createOutput(segment + TVD_EXTENSION);
    tvd.writeInt(FORMAT_VERSION);
    tvf = directory.createOutput(segment + TVF_EXTENSION);
    tvf.writeInt(FORMAT_VERSION);

    this.fieldInfos = fieldInfos;
    fields = new Vector(fieldInfos.size());
    terms = new Vector();
  }
Example #2
0
 public void writeChecksums() throws IOException {
   String checksumName = CHECKSUMS_PREFIX + System.currentTimeMillis();
   ImmutableMap<String, StoreFileMetaData> files = list();
   synchronized (mutex) {
     Map<String, String> checksums = new HashMap<String, String>();
     for (StoreFileMetaData metaData : files.values()) {
       if (metaData.checksum() != null) {
         checksums.put(metaData.name(), metaData.checksum());
       }
     }
     IndexOutput output = directory.createOutput(checksumName, IOContext.DEFAULT, true);
     output.writeInt(0); // version
     output.writeStringStringMap(checksums);
     output.close();
   }
   for (StoreFileMetaData metaData : files.values()) {
     if (metaData.name().startsWith(CHECKSUMS_PREFIX) && !checksumName.equals(metaData.name())) {
       try {
         directory.deleteFileChecksum(metaData.name());
       } catch (Exception e) {
         // ignore
       }
     }
   }
 }
 @Override
 protected void finishInternal(int docCount) throws IOException {
   final int numValues = hash.size();
   final IndexOutput datOut = getOrCreateDataOut();
   datOut.writeInt(size);
   if (size != -1) {
     final BytesRef bytesRef = new BytesRef(size);
     for (int i = 0; i < numValues; i++) {
       hash.get(i, bytesRef);
       datOut.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length);
     }
   }
   final IndexOutput idxOut = getOrCreateIndexOut();
   idxOut.writeInt(numValues);
   writeIndex(idxOut, docCount, numValues, docToEntry);
 }
Example #4
0
  final void finishCommit(Directory dir) throws IOException {
    if (pendingSegnOutput == null) throw new IllegalStateException("prepareCommit was not called");
    boolean success = false;
    try {
      pendingSegnOutput.finishCommit();
      pendingSegnOutput.close();
      pendingSegnOutput = null;
      success = true;
    } finally {
      if (!success) rollbackCommit(dir);
    }

    // NOTE: if we crash here, we have left a segments_N
    // file in the directory in a possibly corrupt state (if
    // some bytes made it to stable storage and others
    // didn't).  But, the segments_N file includes checksum
    // at the end, which should catch this case.  So when a
    // reader tries to read it, it will throw a
    // CorruptIndexException, which should cause the retry
    // logic in SegmentInfos to kick in and load the last
    // good (previous) segments_N-1 file.

    final String fileName =
        IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", generation);
    success = false;
    try {
      dir.sync(Collections.singleton(fileName));
      success = true;
    } finally {
      if (!success) {
        try {
          dir.deleteFile(fileName);
        } catch (Throwable t) {
          // Suppress so we keep throwing the original exception
        }
      }
    }

    lastGeneration = generation;

    try {
      IndexOutput genOutput = dir.createOutput(IndexFileNames.SEGMENTS_GEN);
      try {
        genOutput.writeInt(FORMAT_LOCKLESS);
        genOutput.writeLong(generation);
        genOutput.writeLong(generation);
      } finally {
        genOutput.close();
      }
    } catch (ThreadInterruptedException t) {
      throw t;
    } catch (Throwable t) {
      // It's OK if we fail to write this file since it's
      // used only as one of the retry fallbacks.
    }
  }
  private void insertData(ByteBufferDirectory dir, int bufferSizeInBytes) throws IOException {
    byte[] test = new byte[] {1, 2, 3, 4, 5, 6, 7, 8};
    IndexOutput indexOutput = dir.createOutput("value1", IOContext.DEFAULT);
    indexOutput.writeBytes(new byte[] {2, 4, 6, 7, 8}, 5);
    indexOutput.writeInt(-1);
    indexOutput.writeLong(10);
    indexOutput.writeInt(0);
    indexOutput.writeInt(0);
    indexOutput.writeBytes(test, 8);
    indexOutput.writeBytes(test, 5);

    indexOutput.seek(0);
    indexOutput.writeByte((byte) 8);
    if (bufferSizeInBytes > 4) {
      indexOutput.seek(2);
      indexOutput.writeBytes(new byte[] {1, 2}, 2);
    }

    indexOutput.close();
  }
  private void insertData(CoherenceDirectory dir, String fileName) throws IOException {
    byte[] test = new byte[] {1, 2, 3, 4, 5, 6, 7, 8};
    IndexOutput indexOutput = dir.createOutput(fileName);
    indexOutput.writeBytes(new byte[] {2, 4, 6, 7, 8}, 5);
    indexOutput.writeInt(-1);
    indexOutput.writeLong(10);
    indexOutput.writeInt(0);
    indexOutput.writeInt(0);
    indexOutput.writeBytes(test, 8);
    indexOutput.writeBytes(test, 5);

    indexOutput.seek(0);
    indexOutput.writeByte((byte) 8);
    if (dir.getBucketSize() > 4) {
      indexOutput.seek(2);
      indexOutput.writeBytes(new byte[] {1, 2}, 2);
    }

    indexOutput.close();
  }
    @Override
    public void close() throws IOException {
      delegateFieldsConsumer.close();
      // Now we are done accumulating values for these fields
      List<Entry<FieldInfo, FuzzySet>> nonSaturatedBlooms =
          new ArrayList<Map.Entry<FieldInfo, FuzzySet>>();

      for (Entry<FieldInfo, FuzzySet> entry : bloomFilters.entrySet()) {
        FuzzySet bloomFilter = entry.getValue();
        if (!bloomFilterFactory.isSaturated(bloomFilter, entry.getKey())) {
          nonSaturatedBlooms.add(entry);
        }
      }
      String bloomFileName =
          IndexFileNames.segmentFileName(
              state.segmentInfo.name, state.segmentSuffix, BLOOM_EXTENSION);
      IndexOutput bloomOutput = null;
      try {
        bloomOutput = state.directory.createOutput(bloomFileName, state.context);
        CodecUtil.writeHeader(bloomOutput, BLOOM_CODEC_NAME, BLOOM_CODEC_VERSION);
        // remember the name of the postings format we will delegate to
        bloomOutput.writeString(delegatePostingsFormat.getName());

        // First field in the output file is the number of fields+blooms saved
        bloomOutput.writeInt(nonSaturatedBlooms.size());
        for (Entry<FieldInfo, FuzzySet> entry : nonSaturatedBlooms) {
          FieldInfo fieldInfo = entry.getKey();
          FuzzySet bloomFilter = entry.getValue();
          bloomOutput.writeInt(fieldInfo.number);
          saveAppropriatelySizedBloomFilter(bloomOutput, bloomFilter, fieldInfo);
        }
      } finally {
        IOUtils.close(bloomOutput);
      }
      // We are done with large bitsets so no need to keep them hanging around
      bloomFilters.clear();
    }
 @Override
 public void writeInt(int i) throws IOException {
   delegate.writeInt(i);
 }
  private void write(Directory directory) throws IOException {

    long nextGeneration = getNextPendingGeneration();
    String segmentFileName =
        IndexFileNames.fileNameFromGeneration(IndexFileNames.PENDING_SEGMENTS, "", nextGeneration);

    // Always advance the generation on write:
    generation = nextGeneration;

    IndexOutput segnOutput = null;
    boolean success = false;

    try {
      segnOutput = directory.createOutput(segmentFileName, IOContext.DEFAULT);
      CodecUtil.writeIndexHeader(
          segnOutput,
          "segments",
          VERSION_CURRENT,
          StringHelper.randomId(),
          Long.toString(nextGeneration, Character.MAX_RADIX));
      segnOutput.writeVInt(Version.LATEST.major);
      segnOutput.writeVInt(Version.LATEST.minor);
      segnOutput.writeVInt(Version.LATEST.bugfix);

      segnOutput.writeLong(version);
      segnOutput.writeInt(counter); // write counter
      segnOutput.writeInt(size());

      if (size() > 0) {

        Version minSegmentVersion = null;

        // We do a separate loop up front so we can write the minSegmentVersion before
        // any SegmentInfo; this makes it cleaner to throw IndexFormatTooOldExc at read time:
        for (SegmentCommitInfo siPerCommit : this) {
          Version segmentVersion = siPerCommit.info.getVersion();
          if (minSegmentVersion == null || segmentVersion.onOrAfter(minSegmentVersion) == false) {
            minSegmentVersion = segmentVersion;
          }
        }

        segnOutput.writeVInt(minSegmentVersion.major);
        segnOutput.writeVInt(minSegmentVersion.minor);
        segnOutput.writeVInt(minSegmentVersion.bugfix);
      }

      // write infos
      for (SegmentCommitInfo siPerCommit : this) {
        SegmentInfo si = siPerCommit.info;
        segnOutput.writeString(si.name);
        byte segmentID[] = si.getId();
        // TODO: remove this in lucene 6, we don't need to include 4.x segments in commits anymore
        if (segmentID == null) {
          segnOutput.writeByte((byte) 0);
        } else {
          if (segmentID.length != StringHelper.ID_LENGTH) {
            throw new IllegalStateException(
                "cannot write segment: invalid id segment="
                    + si.name
                    + "id="
                    + StringHelper.idToString(segmentID));
          }
          segnOutput.writeByte((byte) 1);
          segnOutput.writeBytes(segmentID, segmentID.length);
        }
        segnOutput.writeString(si.getCodec().getName());
        segnOutput.writeLong(siPerCommit.getDelGen());
        int delCount = siPerCommit.getDelCount();
        if (delCount < 0 || delCount > si.maxDoc()) {
          throw new IllegalStateException(
              "cannot write segment: invalid maxDoc segment="
                  + si.name
                  + " maxDoc="
                  + si.maxDoc()
                  + " delCount="
                  + delCount);
        }
        segnOutput.writeInt(delCount);
        segnOutput.writeLong(siPerCommit.getFieldInfosGen());
        segnOutput.writeLong(siPerCommit.getDocValuesGen());
        segnOutput.writeSetOfStrings(siPerCommit.getFieldInfosFiles());
        final Map<Integer, Set<String>> dvUpdatesFiles = siPerCommit.getDocValuesUpdatesFiles();
        segnOutput.writeInt(dvUpdatesFiles.size());
        for (Entry<Integer, Set<String>> e : dvUpdatesFiles.entrySet()) {
          segnOutput.writeInt(e.getKey());
          segnOutput.writeSetOfStrings(e.getValue());
        }
      }
      segnOutput.writeMapOfStrings(userData);
      CodecUtil.writeFooter(segnOutput);
      segnOutput.close();
      directory.sync(Collections.singleton(segmentFileName));
      success = true;
    } finally {
      if (success) {
        pendingCommit = true;
      } else {
        // We hit an exception above; try to close the file
        // but suppress any exception:
        IOUtils.closeWhileHandlingException(segnOutput);
        // Try not to leave a truncated segments_N file in
        // the index:
        IOUtils.deleteFilesIgnoringExceptions(directory, segmentFileName);
      }
    }
  }