Example #1
0
 public RhizomeAck(RandomAccessFile ra, int length) throws IOException {
   this.bundleIdPrefix = new byte[BUNDLE_ID_PREFIX_BYTES];
   ra.readFully(this.bundleIdPrefix);
   this.offset = ra.readLong();
   // read optional message time field
   if (length >= BUNDLE_ID_PREFIX_BYTES + 16) this.messageTime = ra.readLong();
   else this.messageTime = 0;
 }
Example #2
0
  private boolean readIndex() {
    if (triedToReadIndex || !usePreindexedCache) {
      return false;
    }

    boolean ret = false;
    synchronized (this) {
      triedToReadIndex = true;
      RandomAccessFile raf = null;
      try {
        File indexFileName = getIndexFile();
        raf = new RandomAccessFile(indexFileName, "r");

        long fileStamp = raf.readLong();
        if (zipFile.lastModified() != fileStamp) {
          ret = false;
        } else {
          directories = new LinkedHashMap<RelativeDirectory, DirectoryEntry>();
          int numDirs = raf.readInt();
          for (int nDirs = 0; nDirs < numDirs; nDirs++) {
            int dirNameBytesLen = raf.readInt();
            byte[] dirNameBytes = new byte[dirNameBytesLen];
            raf.read(dirNameBytes);

            RelativeDirectory dirNameStr = getRelativeDirectory(new String(dirNameBytes, "UTF-8"));
            DirectoryEntry de = new DirectoryEntry(dirNameStr, this);
            de.numEntries = raf.readInt();
            de.writtenOffsetOffset = raf.readLong();
            directories.put(dirNameStr, de);
          }
          ret = true;
          zipFileLastModified = fileStamp;
        }
      } catch (Throwable t) {
        // Do nothing
      } finally {
        if (raf != null) {
          try {
            raf.close();
          } catch (Throwable tt) {
            // Do nothing
          }
        }
      }
      if (ret == true) {
        readFromIndex = true;
      }
    }

    return ret;
  }
Example #3
0
 /** {@inheritDoc} */
 public long readLong() throws IOException {
   fileSize -= 8;
   if (fileSize < 0) {
     throw new EOFException();
   }
   return dataInput.readLong();
 }
  @Override
  public ITmfEvent parseEvent(final ITmfContext context) {

    if (!(fEventStream instanceof TmfTraceStub)) {
      return null;
    }

    // Highly inefficient...
    final RandomAccessFile stream = ((TmfTraceStub) fEventStream).getStream();
    if (stream == null) {
      return null;
    }

    //           String name = eventStream.getName();
    //           name = name.substring(name.lastIndexOf('/') + 1);

    // no need to use synchronized since it's already cover by the calling method

    long location = 0;
    if (context != null && context.getLocation() != null) {
      location = (Long) context.getLocation().getLocationInfo();
      try {
        stream.seek(location);

        final long ts = stream.readLong();
        stream.readUTF(); /* Previously source, now unused */
        final String type = stream.readUTF();
        stream.readInt(); /* Previously reference, now unused */
        final int typeIndex = Integer.parseInt(type.substring(typePrefix.length()));
        final String[] fields = new String[typeIndex];
        for (int i = 0; i < typeIndex; i++) {
          fields[i] = stream.readUTF();
        }

        final StringBuffer content = new StringBuffer("[");
        if (typeIndex > 0) {
          content.append(fields[0]);
        }
        for (int i = 1; i < typeIndex; i++) {
          content.append(", ").append(fields[i]);
        }
        content.append("]");

        final TmfEventField root =
            new TmfEventField(ITmfEventField.ROOT_FIELD_ID, content.toString(), null);
        final ITmfEvent event =
            new TmfEvent(
                fEventStream,
                ITmfContext.UNKNOWN_RANK,
                fEventStream.createTimestamp(ts * 1000000L),
                fTypes[typeIndex],
                root);
        return event;
      } catch (final EOFException e) {
      } catch (final IOException e) {
      }
    }
    return null;
  }
Example #5
0
  /**
   * Create a new DBWriter according to the input file name.
   *
   * @param fileName the name of the file
   * @exception FileNotFoundException from library call
   * @exception IOException from library call or if file is corrupted
   */
  public DBWriter(String fileName) throws IOException {
    outStream = new RandomAccessFile(fileName, "rw");

    lastPosition = outStream.length();

    // case this is a new database
    if (lastPosition == 0) {
      wroteColumnNames = false;

      headerSize = 0;
      numRows = 0;
      CRC = 0;
      needReposition = false;
    }
    // case this might be an old database
    else if (lastPosition > MIN_DATA_OFFSET) {
      // check that we have a valid database here
      checkID();

      wroteColumnNames = true;

      // read header data
      outStream.seek(HEAD_SIZE_OFFSET);
      headerSize = outStream.readLong();
      numRows = outStream.readLong();
      numColumns = outStream.readLong();

      // read description
      outStream.seek(headerSize - DESCRIPTION_SIZE - ID_SIZE - CRC_SIZE);
      description = "";
      for (int i = 0; i < DESCRIPTION_LENGTH; i++) description += outStream.readChar();
      description = description.trim();

      // read CRC
      outStream.seek(headerSize - CRC_SIZE);
      CRC = outStream.readInt();

      // by default we append to an existing database
      needReposition = true;
    }
    // case this is not a database
    else throw new IOException("Attempting to load invalid database");
  }
Example #6
0
 private long internalOffsetOfElement(RandomAccessFile randomSerializeIndexFile, long index)
     throws IOException {
   long offsetOffset = 8 * index;
   if (randomSerializeIndexFile.length() < offsetOffset + 8) {
     throw new IndexOutOfBoundsException("Invalid index: " + index + "!");
   }
   randomSerializeIndexFile.seek(offsetOffset);
   // if(logger.isDebugEnabled()) logger.debug("Offset of element {}: {}", index, result);
   return randomSerializeIndexFile.readLong();
 }
Example #7
0
  /**
   * read one record from the EUID index file
   *
   * @return
   */
  public EuidIndexFileRecord readRecord() {
    EuidIndexFileRecord r = null;

    byte[] bs = new byte[euidLenght];

    try {
      raf.read(bs);
      r = new EuidIndexFileRecord(new String(bs), raf.readLong());
    } catch (IOException e) {
      logger.info(e.getMessage());
    }

    return r;
  }
Example #8
0
 public final long readLong() throws IOException {
   if (mIsLittleEndian) {
     mRaf.readFully(mByteBuffer, 0, 8);
     return (long) (mByteBuffer[7]) << 56
         | (long) (mByteBuffer[6] & 0xff) << 48
         | (long) (mByteBuffer[5] & 0xff) << 40
         | (long) (mByteBuffer[4] & 0xff) << 32
         | (long) (mByteBuffer[3] & 0xff) << 24
         | (long) (mByteBuffer[2] & 0xff) << 16
         | (long) (mByteBuffer[1] & 0xff) << 8
         | (long) (mByteBuffer[0] & 0xff);
   } else {
     return mRaf.readLong();
   }
 }
Example #9
0
  private NameFileIdEntry readNextNameIdEntry() throws IOException {
    try {
      final int nameSize = nameIdMapHolder.readInt();
      byte[] serializedName = new byte[nameSize];

      nameIdMapHolder.readFully(serializedName);

      final String name = OStringSerializer.INSTANCE.deserialize(serializedName, 0);
      final long fileId = nameIdMapHolder.readLong();

      return new NameFileIdEntry(name, fileId);
    } catch (EOFException eof) {
      return null;
    }
  }
 protected long readHeader(RandomAccessFile raFile) throws IOException {
   raFile.seek(0);
   if (raFile.length() == 0) return -1;
   String versionHint = raFile.readUTF();
   if (!"GH".equals(versionHint))
     throw new IllegalArgumentException(
         "Not a GraphHopper file! Expected 'GH' as file marker but was " + versionHint);
   // use a separate version field
   int majorVersion = raFile.readInt();
   if (majorVersion != version())
     throw new IllegalArgumentException(
         "This GraphHopper file has the wrong version! "
             + "Expected "
             + version()
             + " but was "
             + majorVersion);
   long bytes = raFile.readLong();
   segmentSize(raFile.readInt());
   for (int i = 0; i < header.length; i++) {
     header[i] = raFile.readInt();
   }
   return bytes;
 }
 @Benchmark
 public long readLongSlow() throws IOException {
   long value = raf.readLong();
   addToOffset(8);
   return value;
 }
Example #12
0
  public DMGMetadata(RandomAccessFile dmgFile) throws IOException {
    dmgFile.seek(dmgFile.length() - PLIST_ADDRESS_1);
    long plistBegin1 = dmgFile.readLong();
    long plistEnd = dmgFile.readLong();
    dmgFile.seek(dmgFile.length() - PLIST_ADDRESS_2);
    long plistBegin2 = dmgFile.readLong();
    long plistSize = dmgFile.readLong();

    rawData = new byte[(int) (dmgFile.length() - plistBegin1)];
    dmgFile.seek(plistBegin1);
    dmgFile.readFully(rawData);

    plistXmlData = new byte[(int) plistSize];
    dmgFile.seek(plistBegin1);
    dmgFile.readFully(plistXmlData);

    unknown1_256 = new byte[256];
    dmgFile.readFully(unknown1_256);

    LinkedList<PartitionBlockList> blockListList = new LinkedList<PartitionBlockList>();
    int length = dmgFile.readInt();
    byte[] fourcc = new byte[4];
    dmgFile.readFully(fourcc);
    String fourccString = new String(fourcc, "US-ASCII");
    dmgFile.seek(dmgFile.getFilePointer() - 4);
    while (fourccString.equals("mish")) {
      blockListList.add(new PartitionBlockList(dmgFile, length));
      length = dmgFile.readInt();
      dmgFile.readFully(fourcc);
      fourccString = new String(fourcc, "US-ASCII");
      dmgFile.seek(dmgFile.getFilePointer() - 4);
    }
    blockLists = blockListList.toArray(new PartitionBlockList[blockListList.size()]);

    unknown2_12 = new byte[12];
    dmgFile.readFully(unknown2_12);

    LinkedList<APMPartition> partitionList = new LinkedList<APMPartition>();
    byte[] currentPartitionEntry = new byte[0x200];
    dmgFile.readFully(currentPartitionEntry);
    byte[] pmSig = new byte[2];
    pmSig[0] = currentPartitionEntry[0];
    pmSig[1] = currentPartitionEntry[1];
    while (new String(pmSig, "US-ASCII").equals("PM")) {
      partitionList.addLast(new APMPartition(currentPartitionEntry));
      dmgFile.readFully(currentPartitionEntry);
      pmSig[0] = currentPartitionEntry[0];
      pmSig[1] = currentPartitionEntry[1];
    }
    while (onlyZeros(currentPartitionEntry)) dmgFile.readFully(currentPartitionEntry);
    partitions = partitionList.toArray(new APMPartition[partitionList.size()]);

    unknown3_unknown = new byte[(int) (dmgFile.length() - dmgFile.getFilePointer() - 512)];
    dmgFile.readFully(unknown3_unknown);

    koly = new byte[512];
    dmgFile.seek(dmgFile.length() - koly.length);
    dmgFile.readFully(koly);

    if (dmgFile.getFilePointer() != dmgFile.length())
      System.out.println(
          "MISCALCULATION! FP=" + dmgFile.getFilePointer() + " LENGTH=" + dmgFile.length());
  }
Example #13
0
    private void initEntries() {
      if (entriesInited) {
        return;
      }

      if (!zipFileIndex.readFromIndex) {
        int from =
            -Arrays.binarySearch(zipFileIndex.entries, new Entry(dirName, ZipFileIndex.MIN_CHAR))
                - 1;
        int to = -Arrays.binarySearch(zipFileIndex.entries, new Entry(dirName, MAX_CHAR)) - 1;

        for (int i = from; i < to; i++) {
          entries.add(zipFileIndex.entries[i]);
        }
      } else {
        File indexFile = zipFileIndex.getIndexFile();
        if (indexFile != null) {
          RandomAccessFile raf = null;
          try {
            raf = new RandomAccessFile(indexFile, "r");
            raf.seek(writtenOffsetOffset);

            for (int nFiles = 0; nFiles < numEntries; nFiles++) {
              // Read the name bytes
              int zfieNameBytesLen = raf.readInt();
              byte[] zfieNameBytes = new byte[zfieNameBytesLen];
              raf.read(zfieNameBytes);
              String eName = new String(zfieNameBytes, "UTF-8");

              // Read isDir
              boolean eIsDir = raf.readByte() == (byte) 0 ? false : true;

              // Read offset of bytes in the real Jar/Zip file
              int eOffset = raf.readInt();

              // Read size of the file in the real Jar/Zip file
              int eSize = raf.readInt();

              // Read compressed size of the file in the real Jar/Zip file
              int eCsize = raf.readInt();

              // Read java time stamp of the file in the real Jar/Zip file
              long eJavaTimestamp = raf.readLong();

              Entry rfie = new Entry(dirName, eName);
              rfie.isDir = eIsDir;
              rfie.offset = eOffset;
              rfie.size = eSize;
              rfie.compressedSize = eCsize;
              rfie.javatime = eJavaTimestamp;
              entries.add(rfie);
            }
          } catch (Throwable t) {
            // Do nothing
          } finally {
            try {
              if (raf != null) {
                raf.close();
              }
            } catch (Throwable t) {
              // Do nothing
            }
          }
        }
      }

      entriesInited = true;
    }
Example #14
0
    public long readLong() throws IOException {

      return (raf.readLong());
    }
Example #15
0
 /** This reads a binary long from the file. */
 public long readBinaryLong(int col, int row) throws IOException {
   raf.seek(row * nBytesPerRow + columnStartAt[col]);
   return raf.readLong();
 }
Example #16
0
  private void loadPluginsIntoClassLoader() {
    File pluginsFile = environment.pluginsFile();
    if (!pluginsFile.exists()) {
      return;
    }
    if (!pluginsFile.isDirectory()) {
      return;
    }

    ClassLoader classLoader = settings.getClassLoader();
    Class classLoaderClass = classLoader.getClass();
    Method addURL = null;
    while (!classLoaderClass.equals(Object.class)) {
      try {
        addURL = classLoaderClass.getDeclaredMethod("addURL", URL.class);
        addURL.setAccessible(true);
        break;
      } catch (NoSuchMethodException e) {
        // no method, try the parent
        classLoaderClass = classLoaderClass.getSuperclass();
      }
    }
    if (addURL == null) {
      logger.debug(
          "Failed to find addURL method on classLoader [" + classLoader + "] to add methods");
      return;
    }

    File[] pluginsFiles = pluginsFile.listFiles();
    for (File pluginFile : pluginsFiles) {
      if (!pluginFile.getName().endsWith(".zip")) {
        continue;
      }
      if (logger.isTraceEnabled()) {
        logger.trace("Processing [{}]", pluginFile);
      }

      String pluginNameNoExtension =
          pluginFile.getName().substring(0, pluginFile.getName().lastIndexOf('.'));
      File extractedPluginDir =
          new File(new File(environment.workFile(), "plugins"), pluginNameNoExtension);
      extractedPluginDir.mkdirs();

      File stampsDir = new File(new File(environment.workFile(), "plugins"), "_stamps");
      stampsDir.mkdirs();

      boolean extractPlugin = true;
      File stampFile = new File(stampsDir, pluginNameNoExtension + ".stamp");
      if (stampFile.exists()) {
        // read it, and check if its the same size as the pluginFile
        RandomAccessFile raf = null;
        try {
          raf = new RandomAccessFile(stampFile, "r");
          long size = raf.readLong();
          if (size == pluginFile.length()) {
            extractPlugin = false;
            if (logger.isTraceEnabled()) {
              logger.trace("--- No need to extract plugin, same size [" + size + "]");
            }
          }
        } catch (Exception e) {
          // ignore and extract the plugin
        } finally {
          if (raf != null) {
            try {
              raf.close();
            } catch (IOException e) {
              // ignore
            }
          }
        }
      }

      if (extractPlugin) {
        if (logger.isTraceEnabled()) {
          logger.trace("--- Extracting plugin to [" + extractedPluginDir + "]");
        }
        deleteRecursively(extractedPluginDir, false);

        ZipFile zipFile = null;
        try {
          zipFile = new ZipFile(pluginFile);
          Enumeration<? extends ZipEntry> zipEntries = zipFile.entries();
          while (zipEntries.hasMoreElements()) {
            ZipEntry zipEntry = zipEntries.nextElement();
            if (!(zipEntry.getName().endsWith(".jar") || zipEntry.getName().endsWith(".zip"))) {
              continue;
            }
            String name = zipEntry.getName().replace('\\', '/');
            File target = new File(extractedPluginDir, name);
            Streams.copy(zipFile.getInputStream(zipEntry), new FileOutputStream(target));
          }
        } catch (Exception e) {
          logger.warn("Failed to extract plugin [" + pluginFile + "], ignoring...", e);
          continue;
        } finally {
          if (zipFile != null) {
            try {
              zipFile.close();
            } catch (IOException e) {
              // ignore
            }
          }
        }

        try {
          RandomAccessFile raf = new RandomAccessFile(stampFile, "rw");
          raf.writeLong(pluginFile.length());
          raf.close();
        } catch (Exception e) {
          // ignore
        }
      }

      try {
        for (File jarToAdd : extractedPluginDir.listFiles()) {
          if (!(jarToAdd.getName().endsWith(".jar") || jarToAdd.getName().endsWith(".zip"))) {
            continue;
          }
          addURL.invoke(classLoader, jarToAdd.toURI().toURL());
        }
      } catch (Exception e) {
        logger.warn("Failed to add plugin [" + pluginFile + "]", e);
      }
    }
  }
Example #17
0
    /* (non-Javadoc)
     * @see tlc2.tool.fp.DiskFPSet#mergeNewEntries(long[], int, java.io.RandomAccessFile, java.io.RandomAccessFile)
     */
    protected void mergeNewEntries(RandomAccessFile inRAF, RandomAccessFile outRAF)
        throws IOException {
      final long buffLen = tblCnt.get();
      final TLCIterator itr = new TLCIterator(tbl);

      // Precompute the maximum value of the new file.
      // If this isn't the first merge, the index has
      // the last element of the disk file. In that case
      // the new maxVal is the larger element of the two
      // in-memory and on-disk elements.
      // The largest on-disk element is passed to the
      // iterator as a lower bound.
      long maxVal;
      if (index != null) {
        maxVal = itr.getLast(index[index.length - 1]);
      } else {
        maxVal = itr.getLast();
      }

      int indexLen = calculateIndexLen(buffLen);
      index = new long[indexLen];
      index[indexLen - 1] = maxVal;
      currIndex = 0;
      counter = 0;

      // initialize positions in "buff" and "inRAF"
      long value = 0L; // initialize only to make compiler happy
      boolean eof = false;
      if (fileCnt > 0) {
        try {
          value = inRAF.readLong();
        } catch (EOFException e) {
          eof = true;
        }
      } else {
        eof = true;
      }

      // merge while both lists still have elements remaining
      boolean eol = false;
      long fp = itr.next();
      while (!eof || !eol) {
        if ((value < fp || eol) && !eof) {
          writeFP(outRAF, value);
          try {
            value = inRAF.readLong();
          } catch (EOFException e) {
            eof = true;
          }
        } else {
          // prevent converting every long to String when assertion holds (this is expensive)
          if (value == fp) {
            Assert.check(false, EC.TLC_FP_VALUE_ALREADY_ON_DISK, String.valueOf(value));
          }
          writeFP(outRAF, fp);
          // we used one fp up, thus move to next one
          try {
            fp = itr.next();
          } catch (NoSuchElementException e) {
            // has read all elements?
            Assert.check(!itr.hasNext(), EC.GENERAL);
            Assert.check(itr.reads() == buffLen, EC.GENERAL);
            eol = true;
          }
        }
      }

      // both sets used up completely
      Assert.check(eof && eol, EC.GENERAL);

      // currIndex is amount of disk writes
      Assert.check(currIndex == indexLen - 1, EC.SYSTEM_INDEX_ERROR);

      // maintain object invariants
      fileCnt += buffLen;
    }
 static EventQueueBackingStore get(
     File checkpointFile,
     File backupCheckpointDir,
     int capacity,
     String name,
     boolean upgrade,
     boolean shouldBackup,
     boolean compressBackup)
     throws Exception {
   File metaDataFile = Serialization.getMetaDataFile(checkpointFile);
   RandomAccessFile checkpointFileHandle = null;
   try {
     boolean checkpointExists = checkpointFile.exists();
     boolean metaDataExists = metaDataFile.exists();
     if (metaDataExists) {
       // if we have a metadata file but no checkpoint file, we have a problem
       // delete everything in the checkpoint directory and force
       // a full replay.
       if (!checkpointExists || checkpointFile.length() == 0) {
         LOG.warn(
             "MetaData file for checkpoint "
                 + " exists but checkpoint does not. Checkpoint = "
                 + checkpointFile
                 + ", metaDataFile = "
                 + metaDataFile);
         throw new BadCheckpointException(
             "The last checkpoint was not completed correctly, "
                 + "since Checkpoint file does not exist while metadata "
                 + "file does.");
       }
     }
     // brand new, use v3
     if (!checkpointExists) {
       if (!checkpointFile.createNewFile()) {
         throw new IOException("Cannot create " + checkpointFile);
       }
       return new EventQueueBackingStoreFileV3(
           checkpointFile, capacity, name, backupCheckpointDir, shouldBackup, compressBackup);
     }
     // v3 due to meta file, version will be checked by backing store
     if (metaDataExists) {
       return new EventQueueBackingStoreFileV3(
           checkpointFile, capacity, name, backupCheckpointDir, shouldBackup, compressBackup);
     }
     checkpointFileHandle = new RandomAccessFile(checkpointFile, "r");
     int version = (int) checkpointFileHandle.readLong();
     if (Serialization.VERSION_2 == version) {
       if (upgrade) {
         return upgrade(
             checkpointFile, capacity, name, backupCheckpointDir, shouldBackup, compressBackup);
       }
       return new EventQueueBackingStoreFileV2(checkpointFile, capacity, name);
     }
     LOG.error("Found version " + Integer.toHexString(version) + " in " + checkpointFile);
     throw new BadCheckpointException(
         "Checkpoint file exists with "
             + Serialization.VERSION_3
             + " but no metadata file found.");
   } finally {
     if (checkpointFileHandle != null) {
       try {
         checkpointFileHandle.close();
       } catch (IOException e) {
         LOG.warn("Unable to close " + checkpointFile, e);
       }
     }
   }
 }