/** @tests {@link java.nio.MappedByteBuffer#load()} */
  public void test_load() throws IOException {
    FileInputStream fileInputStream = new FileInputStream(tmpFile);
    FileChannel fileChannelRead = fileInputStream.getChannel();
    MappedByteBuffer mmbRead = fileChannelRead.map(MapMode.READ_ONLY, 0, fileChannelRead.size());

    assertEquals(mmbRead, mmbRead.load());

    RandomAccessFile randomFile = new RandomAccessFile(tmpFile, "rw");
    FileChannel fileChannelReadWrite = randomFile.getChannel();
    MappedByteBuffer mmbReadWrite =
        fileChannelReadWrite.map(FileChannel.MapMode.READ_WRITE, 0, fileChannelReadWrite.size());

    assertEquals(mmbReadWrite, mmbReadWrite.load());

    fileChannelRead.close();
    fileChannelReadWrite.close();
  }
Example #2
0
 private Data deserializeFile(DataInput in) throws IOException, FileNotFoundException {
   byte[] me = new byte[Number160.BYTE_ARRAY_SIZE];
   in.readFully(me);
   Number160 hash = new Number160(me);
   RandomAccessFile file = new RandomAccessFile(new File(path, hash.toString()), "r");
   FileChannel inChannel = file.getChannel();
   MappedByteBuffer buffer = inChannel.map(FileChannel.MapMode.READ_ONLY, 0, inChannel.size());
   buffer.load();
   ByteBuf buf = Unpooled.wrappedBuffer(buffer);
   Data data = Data.decodeHeader(buf, signatureFactory);
   data.decodeBuffer(buf);
   data.decodeDone(buf, signatureFactory);
   file.close();
   return data;
 }
  /** @param args */
  public static void main(String[] args) {
    String docPath = null;
    for (int i = 0; i < args.length; i++) { // iterate over cli parameter tokens
      if (args[i].startsWith("-")) { // assume we found a switch
        // get the relevant enum
        CLISwitch sw = CLISwitch.getEnumFromSwitch(args[i]);
        if (sw == null) { // unsupported CLI switch
          logger.log(Level.WARNING, "Unsupported switch: " + args[i] + ". Quitting.");
          System.exit(-1);
        }

        if (sw.getHasFollowingValue()) { // handle values for switches
          if (args.length > i + 1
              && !args[i + 1].startsWith(
                  "-")) { // we still have an array index after this one and it's not a switch
            sw.setValue(args[++i]);
          } else { // value is missing or malformed
            logger.log(
                Level.WARNING, "Invalid or missing parameter after " + args[i] + ". Quitting.");
            System.exit(-1);
          }
        } else { // activate the value-less switches
          sw.setValue(null);
        }
      } else { // assume we found the document's path/name
        docPath = args[i];
      }
    }

    // display help dialog if HELP-switch is given
    if (CLISwitch.HELP.getIsActive()) {
      printHelp();
      System.exit(0);
    }

    // start off with the verbosity recognition -- lots of the other
    // stuff can be skipped if this is set too high
    if (CLISwitch.VERBOSITY2.getIsActive()) {
      logger.setLevel(Level.ALL);
      logger.log(Level.INFO, "Verbosity: '-vv'; Logging level set to ALL.");

      // output the found language resource folders
      String languagesList = "";
      for (String language : ResourceScanner.getInstance().getDetectedResourceFolders()) {
        languagesList += System.getProperty("line.separator") + "- " + language;
      }
      logger.log(Level.INFO, "Listing detected language folders:" + languagesList);
    } else if (CLISwitch.VERBOSITY.getIsActive()) {
      logger.setLevel(Level.INFO);
      logger.log(Level.INFO, "Verbosity: '-v'; Logging level set to INFO and above.");
    } else {
      logger.setLevel(Level.WARNING);
      logger.log(
          Level.INFO,
          "Verbosity -v/-vv NOT FOUND OR RECOGNIZED; Logging level set to WARNING and above.");
    }

    // Check input encoding
    String encodingType = null;
    if (CLISwitch.ENCODING.getIsActive()) {
      encodingType = CLISwitch.ENCODING.getValue().toString();
      logger.log(Level.INFO, "Encoding '-e': " + encodingType);
    } else {
      // Encoding type not found
      encodingType = CLISwitch.ENCODING.getValue().toString();
      logger.log(Level.INFO, "Encoding '-e': NOT FOUND OR RECOGNIZED; set to 'UTF-8'");
    }

    // Check output format
    OutputType outputType = null;
    if (CLISwitch.OUTPUTTYPE.getIsActive()) {
      outputType = OutputType.valueOf(CLISwitch.OUTPUTTYPE.getValue().toString().toUpperCase());
      logger.log(Level.INFO, "Output '-o': " + outputType.toString().toUpperCase());
    } else {
      // Output type not found
      outputType = (OutputType) CLISwitch.OUTPUTTYPE.getValue();
      logger.log(
          Level.INFO,
          "Output '-o': NOT FOUND OR RECOGNIZED; set to " + outputType.toString().toUpperCase());
    }

    // Check language
    Language language = null;
    if (CLISwitch.LANGUAGE.getIsActive()) {
      language = Language.getLanguageFromString((String) CLISwitch.LANGUAGE.getValue());

      if (language == Language.WILDCARD
          && !ResourceScanner.getInstance()
              .getDetectedResourceFolders()
              .contains(language.getName())) {
        logger.log(
            Level.SEVERE,
            "Language '-l': " + CLISwitch.LANGUAGE.getValue() + " NOT RECOGNIZED; aborting.");
        printHelp();
        System.exit(-1);
      } else {
        logger.log(Level.INFO, "Language '-l': " + language.getName());
      }
    } else {
      // Language not found
      language = Language.getLanguageFromString((String) CLISwitch.LANGUAGE.getValue());
      logger.log(
          Level.INFO, "Language '-l': NOT FOUND; set to " + language.toString().toUpperCase());
    }

    // Check type
    DocumentType type = null;
    if (CLISwitch.DOCTYPE.getIsActive()) {
      try {
        if (CLISwitch.DOCTYPE
            .getValue()
            .equals("narrative")) { // redirect "narrative" to "narratives"
          CLISwitch.DOCTYPE.setValue("narratives");
        }
        type = DocumentType.valueOf(CLISwitch.DOCTYPE.getValue().toString().toUpperCase());
      } catch (IllegalArgumentException e) {
        logger.log(
            Level.WARNING,
            "Type '-t': NOT RECOGNIZED. These are the available options: "
                + Arrays.asList(DocumentType.values()));
        System.exit(-1);
      }
      logger.log(Level.INFO, "Type '-t': " + type.toString().toUpperCase());
    } else {
      // Type not found
      type = (DocumentType) CLISwitch.DOCTYPE.getValue();
      logger.log(Level.INFO, "Type '-t': NOT FOUND; set to " + type.toString().toUpperCase());
    }

    // Check document creation time
    Date dct = null;
    if (CLISwitch.DCT.getIsActive()) {
      try {
        DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
        dct = formatter.parse(CLISwitch.DCT.getValue().toString());
        logger.log(Level.INFO, "Document Creation Time '-dct': " + dct.toString());
      } catch (Exception e) {
        // DCT was not parseable
        logger.log(Level.WARNING, "Document Creation Time '-dct': NOT RECOGNIZED. Quitting.");
        printHelp();
        System.exit(-1);
      }
    } else {
      if ((type == DocumentType.NEWS) || (type == DocumentType.COLLOQUIAL)) {
        // Dct needed
        dct = (Date) CLISwitch.DCT.getValue();
        logger.log(
            Level.INFO,
            "Document Creation Time '-dct': NOT FOUND; set to local date ("
                + dct.toString()
                + ").");
      } else {
        logger.log(Level.INFO, "Document Creation Time '-dct': NOT FOUND; skipping.");
      }
    }

    // Handle locale switch
    String locale = (String) CLISwitch.LOCALE.getValue();
    Locale myLocale = null;
    if (CLISwitch.LOCALE.getIsActive()) {
      // check if the requested locale is available
      for (Locale l : Locale.getAvailableLocales()) {
        if (l.toString().toLowerCase().equals(locale.toLowerCase())) myLocale = l;
      }

      try {
        Locale.setDefault(myLocale); // try to set the locale
        logger.log(Level.INFO, "Locale '-locale': " + myLocale.toString());
      } catch (Exception e) { // if the above fails, spit out error message and available locales
        logger.log(
            Level.WARNING,
            "Supplied locale parameter couldn't be resolved to a working locale. Try one of these:");
        logger.log(
            Level.WARNING,
            Arrays.asList(Locale.getAvailableLocales()).toString()); // list available locales
        printHelp();
        System.exit(-1);
      }
    } else {
      // no -locale parameter supplied: just show default locale
      logger.log(
          Level.INFO,
          "Locale '-locale': NOT FOUND, set to environment locale: "
              + Locale.getDefault().toString());
    }

    // Read configuration from file
    String configPath = CLISwitch.CONFIGFILE.getValue().toString();
    try {
      logger.log(Level.INFO, "Configuration path '-c': " + configPath);

      readConfigFile(configPath);

      logger.log(Level.FINE, "Config initialized");
    } catch (Exception e) {
      e.printStackTrace();
      logger.log(
          Level.WARNING,
          "Config could not be initialized! Please supply the -c switch or "
              + "put a config.props into this directory.");
      printHelp();
      System.exit(-1);
    }

    // Set the preprocessing POS tagger
    POSTagger posTagger = null;
    if (CLISwitch.POSTAGGER.getIsActive()) {
      try {
        posTagger = POSTagger.valueOf(CLISwitch.POSTAGGER.getValue().toString().toUpperCase());
      } catch (IllegalArgumentException e) {
        logger.log(
            Level.WARNING,
            "Given POS Tagger doesn't exist. Please specify a valid one as listed in the help.");
        printHelp();
        System.exit(-1);
      }
      logger.log(Level.INFO, "POS Tagger '-pos': " + posTagger.toString().toUpperCase());
    } else {
      // Type not found
      posTagger = (POSTagger) CLISwitch.POSTAGGER.getValue();
      logger.log(
          Level.INFO,
          "POS Tagger '-pos': NOT FOUND OR RECOGNIZED; set to "
              + posTagger.toString().toUpperCase());
    }

    // Set whether or not to use the Interval Tagger
    Boolean doIntervalTagging = false;
    if (CLISwitch.INTERVALS.getIsActive()) {
      doIntervalTagging = CLISwitch.INTERVALS.getIsActive();
      logger.log(Level.INFO, "Interval Tagger '-it': " + doIntervalTagging.toString());
    } else {
      logger.log(
          Level.INFO,
          "Interval Tagger '-it': NOT FOUND OR RECOGNIZED; set to " + doIntervalTagging.toString());
    }

    // make sure we have a document path
    if (docPath == null) {
      logger.log(Level.WARNING, "No input file given; aborting.");
      printHelp();
      System.exit(-1);
    }

    // Run HeidelTime
    RandomAccessFile aFile = null;
    MappedByteBuffer buffer = null;
    FileChannel inChannel = null;
    PrintWriter pwOut = null;
    try {
      logger.log(Level.INFO, "Reading document using charset: " + encodingType);

      aFile = new RandomAccessFile(docPath, "r");
      inChannel = aFile.getChannel();
      buffer = inChannel.map(FileChannel.MapMode.READ_ONLY, 0, inChannel.size());
      buffer.load();
      byte[] inArr = new byte[(int) inChannel.size()];

      for (int i = 0; i < buffer.limit(); i++) {
        inArr[i] = buffer.get();
      }

      // double-newstring should not be necessary, but without this, it's not running on Windows (?)
      String input = new String(new String(inArr, encodingType).getBytes("UTF-8"), "UTF-8");

      HeidelTimeStandalone standalone =
          new HeidelTimeStandalone(language, type, outputType, null, posTagger, doIntervalTagging);
      String out = standalone.process(input, dct);

      // Print output always as UTF-8
      pwOut = new PrintWriter(new OutputStreamWriter(System.out, "UTF-8"));
      pwOut.println(out);
    } catch (Exception e) {
      e.printStackTrace();
    } finally {
      if (pwOut != null) {
        pwOut.close();
      }
      if (buffer != null) {
        buffer.clear();
      }
      if (inChannel != null) {
        try {
          inChannel.close();
        } catch (IOException e) {
        }
      }
      if (aFile != null) {
        try {
          aFile.close();
        } catch (IOException e) {
        }
      }
    }
  }
  /**
   * initializes the channel and mapped bytebuffer
   *
   * @param channel FileChannel
   * @param mapMode FileChannel.MapMode
   * @throws IOException
   */
  private void init(FileChannel channel, FileChannel.MapMode mapMode) throws IOException {

    this.channel = channel;
    this.mappedByteBuffer = channel.map(mapMode, 0L, channel.size());
    mappedByteBuffer.load();
  }
Example #5
0
  /**
   * This constructor handles cases where the file exists and is non-empty.
   *
   * @param file The name of the file to be opened.
   * @param useDirectBuffers true if a buffer should be allocated using {@link
   *     ByteBuffer#allocateDirect(int)} rather than {@link ByteBuffer#allocate(int)}. This has no
   *     effect for the {@link BufferMode#Disk} and {@link BufferMode#Mapped} modes.
   * @param readOnly When true, the file is opened in a read-only mode and it is an error if the
   *     file does not exist.
   * @param forceWrites When true, the file is opened in "rwd" mode and individual IOs are forced to
   *     disk. This option SHOULD be false since we only need to write through to disk on commit,
   *     not on each IO.
   * @param writeCacheEnabled When <code>true</code>, the {@link DiskOnlyStrategy} will allocate a
   *     direct {@link ByteBuffer} from the {@link DirectBufferPool} to service as a write cache.
   * @param writeCacheBufferCount The #of buffers to allocate for the {@link WriteCacheService}.
   * @param validateChecksum When <code>true</code>, the checksum stored in the root blocks of an
   *     existing file will be validated when the file is opened. See {@link
   *     Options#VALIDATE_CHECKSUM}.
   * @param alternateRootBlock When <code>true</code> the prior root block will be used. This option
   *     may be used when a commit record is valid but the data associated with the commit point is
   *     invalid. There are two root blocks. Normally the one which has been most recently written
   *     will be loaded on restart. When this option is specified, the older of the two root blocks
   *     will be loaded instead. <strong>If you use this option and then do a commit then the more
   *     recent of the root blocks will be lost and any data associated with that commit point will
   *     be lost as well!</strong>
   * @param ignoreBadRootBlock When <code>true</code>, the application will be allowed to proceed
   *     with one damaged root block. The undamaged root block will be automatically chosen.
   * @throws RuntimeException if there is a problem preparing the file for use by the journal.
   */
  FileMetadata(
      final File file,
      final boolean useDirectBuffers,
      final boolean readOnly,
      final ForceEnum forceWrites,
      final boolean writeCacheEnabled,
      final int writeCacheBufferCount,
      final boolean validateChecksum,
      final boolean alternateRootBlock,
      final boolean ignoreBadRootBlock,
      final Properties properties)
      throws RuntimeException {

    if (file == null) throw new IllegalArgumentException();

    if (!file.exists() || file.length() == 0)
      throw new IllegalArgumentException(
          "File does not exist or is empty: " + file.getAbsolutePath());

    if (readOnly && forceWrites != ForceEnum.No) {

      throw new IllegalArgumentException(
          "'"
              + Options.FORCE_WRITES
              + "'='"
              + forceWrites
              + "' may not be used with '"
              + Options.READ_ONLY
              + "'");
    }

    this.writeCacheEnabled = writeCacheEnabled;

    this.writeCacheBufferCount = writeCacheBufferCount;

    this.fileMode = (readOnly ? "r" : forceWrites.asFileMode());

    this.readOnly = readOnly;

    this.exists = file.exists();

    this.file = file;

    this.properties = properties;

    try {

      /*
       * Open / create and obtain shared/exclusive lock if possible. Sets
       * [raf] as a side-effect.
       */
      opener.reopenChannel();

      /*
       * The file already exists (but not for temporary files).
       *
       * Note: this next line will throw IOException if there is a file
       * lock contention.
       *
       * Note: [raf] was initialized by [opener.reopenChannel()] above!
       */
      this.extent = raf.length();

      this.userExtent = extent - headerSize0;

      if (this.extent <= headerSize0) {

        /*
         * By throwing an exception for files that are not large enough
         * to contain the MAGIC, VERSION, and both root blocks we avoid
         * IO errors when trying to read those data and are able to
         * reject files based on whether they have bad magic, version,
         * or root blocks.
         */

        throw new RuntimeException(
            "File too small to contain a valid journal: " + file.getAbsoluteFile());
      }

      /*
       * Note: The code to read the MAGIC, VERSION, and root blocks is
       * shared by DumpJournal (code is copy by value) and in part by the
       * rollback() method on AbstractJournal.
       */

      /*
       * Read the MAGIC and VERSION.
       */
      raf.seek(0L);
      try {
        /*
         * Note: this next line will throw IOException if there is a
         * file lock contention.
         */
        magic = raf.readInt();
      } catch (IOException ex) {
        throw new RuntimeException(
            "Can not read magic. Is file locked by another process? file=" + file, ex);
      }
      if (magic != MAGIC)
        throw new RuntimeException(
            "Bad journal magic: file=" + file + ", expected=" + MAGIC + ", actual=" + magic);
      version = raf.readInt();
      if (version != VERSION1)
        throw new RuntimeException(
            "Bad journal version: file=" + file + ", expected=" + VERSION1 + ", actual=" + version);

      /*
       * Check root blocks (magic, timestamps), choose root block, read
       * constants (slotSize, segmentId).
       */
      final RootBlockUtility tmp =
          new RootBlockUtility(
              opener, file, validateChecksum, alternateRootBlock, ignoreBadRootBlock);
      //            this.rootBlock0 = tmp.rootBlock0;
      //            this.rootBlock1 = tmp.rootBlock1;
      this.rootBlock = tmp.rootBlock;
      //			{
      //
      //				final ChecksumUtility checker = validateChecksum ? ChecksumUtility.threadChk
      //						.get()
      //						: null;
      //
      //				// final FileChannel channel = raf.getChannel();
      //				final ByteBuffer tmp0 = ByteBuffer.allocate(RootBlockView.SIZEOF_ROOT_BLOCK);
      //				final ByteBuffer tmp1 = ByteBuffer.allocate(RootBlockView.SIZEOF_ROOT_BLOCK);
      //				FileChannelUtility.readAll(opener, tmp0, OFFSET_ROOT_BLOCK0);
      //				FileChannelUtility.readAll(opener, tmp1, OFFSET_ROOT_BLOCK1);
      //				tmp0.position(0); // resets the position.
      //				tmp1.position(0);
      //				try {
      //					rootBlock0 = new RootBlockView(true, tmp0, checker);
      //				} catch (RootBlockException ex) {
      //					log.warn("Bad root block zero: " + ex);
      //				}
      //				try {
      //					rootBlock1 = new RootBlockView(false, tmp1, checker);
      //				} catch (RootBlockException ex) {
      //					log.warn("Bad root block one: " + ex);
      //				}
      //				if (rootBlock0 == null && rootBlock1 == null) {
      //					throw new RuntimeException(
      //							"Both root blocks are bad - journal is not usable: "
      //									+ file);
      //				}
      //				if (alternateRootBlock)
      //					log.warn("Using alternate root block");
      //				/*
      //				 * Choose the root block based on the commit counter.
      //				 *
      //				 * Note: The commit counters MAY be equal. This will happen if
      //				 * we rollback the journal and override the current root block
      //				 * with the alternate root block.
      //				 */
      //				final long cc0 = rootBlock0==null?-1L:rootBlock0.getCommitCounter();
      //				final long cc1 = rootBlock1==null?-1L:rootBlock1.getCommitCounter();
      //				this.rootBlock = (cc0 > cc1 ? (alternateRootBlock ? rootBlock1
      //						: rootBlock0) : (alternateRootBlock ? rootBlock0
      //						: rootBlock1));
      //
      //			}

      this.bufferMode = BufferMode.getDefaultBufferMode(rootBlock.getStoreType());

      if (userExtent > bufferMode.getMaxExtent()) {

        /*
         * Verify that we can address this many bytes with this
         * strategy.
         */

        throw new RuntimeException(AbstractBufferStrategy.ERR_MAX_EXTENT);
      }

      // use the offset bits from the root block.
      this.offsetBits = rootBlock.getOffsetBits();

      /*
       * The offset into the user extent at which the next record will be
       * written.
       */
      this.nextOffset = rootBlock.getNextOffset();

      this.createTime = rootBlock.getCreateTime();

      this.closeTime = rootBlock.getCloseTime();

      if (closeTime != 0L && !readOnly) {

        throw new RuntimeException(
            "Journal is closed for writes: file=" + file + ", closedTime=" + closeTime);
      }

      switch (bufferMode) {
        case Direct:
          {
            // Allocate the buffer buffer.
            buffer =
                (useDirectBuffers
                    ? ByteBuffer.allocateDirect((int) userExtent)
                    : ByteBuffer.allocate((int) userExtent));
            // Setup to read data from file into the buffer.
            if (nextOffset > Integer.MAX_VALUE) {
              throw new RuntimeException(
                  "This file is too large for a buffered mode: use " + BufferMode.Disk);
            }
            buffer.limit((int) nextOffset);
            buffer.position(0);
            if (nextOffset > 0) {
              // Read the file image into the direct buffer.
              FileChannelUtility.readAll(opener, buffer, headerSize0);
            }
            break;
          }
        case Mapped:
          {
            // Map the file.
            boolean loadMappedFile = false; // @todo expose as property.
            buffer =
                opener.reopenChannel().map(FileChannel.MapMode.READ_WRITE, headerSize0, extent);
            if (loadMappedFile) {
              /*
               * Load the image into mapped memory. Generally, I would
               * think that you are better off NOT loading the image. When
               * you want the image in memory, use the Direct mode
               * instead. It should be MUCH faster and has better control
               * over the amount and timing of the IO.
               */
              ((MappedByteBuffer) buffer).load();
            }
            break;
          }
        case Disk:
          buffer = null;
          break;
        case DiskWORM:
          buffer = null;
          break;
        case DiskRW:
          buffer = null;
          break;
        default:
          throw new AssertionError();
      }

      /*
       * Note: there should be no processing required on restart since the
       * intention of transactions that did not commit will not be
       * visible.
       */

      this.useChecksums = useChecksums(rootBlock);

    } catch (IOException ex) {

      throw new RuntimeException("file=" + file, ex);
    }
  }
Example #6
0
  /** Creates a temporary compressed representation of the given cache entry. */
  protected void compressFile(final FileCacheEntry entry) {
    try {
      final File tmpCompressedFile =
          File.createTempFile(
              String.valueOf(entry.plainFile.hashCode()), ".tmpzip", compressedFilesFolder);
      tmpCompressedFile.deleteOnExit();

      InputStream in = null;
      OutputStream out = null;
      try {
        in = new FileInputStream(entry.plainFile);
        out = new GZIPOutputStream(new FileOutputStream(tmpCompressedFile));

        final byte[] tmp = new byte[1024];

        do {
          final int readNow = in.read(tmp);
          if (readNow == -1) {
            break;
          }

          out.write(tmp, 0, readNow);
        } while (true);
      } finally {
        if (in != null) {
          try {
            in.close();
          } catch (IOException ignored) {
          }
        }
        if (out != null) {
          try {
            out.close();
          } catch (IOException ignored) {
          }
        }
      }

      final long size = tmpCompressedFile.length();

      switch (entry.type) {
        case HEAP:
        case MAPPED:
          {
            final FileInputStream cFis = new FileInputStream(tmpCompressedFile);

            try {
              final FileChannel cFileChannel = cFis.getChannel();

              final MappedByteBuffer compressedBb =
                  cFileChannel.map(FileChannel.MapMode.READ_ONLY, 0, size);

              if (entry.type == CacheType.HEAP) {
                compressedBb.load();
              }

              entry.compressedBb = compressedBb;
            } finally {
              cFis.close();
            }

            break;
          }
        case FILE:
          {
            break;
          }

        default:
          throw new IllegalStateException("The type is not supported: " + entry.type);
      }

      entry.compressedFileSize = size;
      entry.compressedFile = tmpCompressedFile;
    } catch (IOException e) {
      LOGGER.log(Level.FINE, "Can not compress file: " + entry.plainFile, e);
    }
  }
Example #7
0
  /**
   * Map the file to a {@link ByteBuffer}
   *
   * @return the preinitialized {@link FileCacheEntry}
   */
  private FileCacheEntry tryMapFileToBuffer(final File file) {

    final long size = file.length();
    if (size > getMaxEntrySize()) {
      return null;
    }

    final CacheType type;
    final ByteBuffer bb;
    FileChannel fileChannel = null;
    FileInputStream stream = null;
    try {
      if (size > getMinEntrySize()) {
        if (addMappedMemorySize(size) > getMaxLargeFileCacheSize()) {
          // Cache full
          subMappedMemorySize(size);
          return null;
        }

        type = CacheType.MAPPED;
      } else {
        if (addHeapSize(size) > getMaxSmallFileCacheSize()) {
          // Cache full
          subHeapSize(size);
          return null;
        }

        type = CacheType.HEAP;
      }

      stream = new FileInputStream(file);
      fileChannel = stream.getChannel();

      bb = fileChannel.map(FileChannel.MapMode.READ_ONLY, 0, size);

      if (type == CacheType.HEAP) {
        ((MappedByteBuffer) bb).load();
      }
    } catch (Exception e) {
      notifyProbesError(this, e);
      return null;
    } finally {
      if (stream != null) {
        try {
          stream.close();
        } catch (IOException ignored) {
          notifyProbesError(this, ignored);
        }
      }
      if (fileChannel != null) {
        try {
          fileChannel.close();
        } catch (IOException ignored) {
          notifyProbesError(this, ignored);
        }
      }
    }

    final FileCacheEntry entry = new FileCacheEntry(this);
    entry.type = type;
    entry.plainFileSize = size;
    entry.bb = bb;

    return entry;
  }