Ejemplo n.º 1
0
  // single pass attempt to copy if any can not accept the data then they are skipped
  // and true will be returned instead of false.
  private static boolean doingCopy(
      TapeWriteStage ss,
      int byteTailPos,
      int primaryTailPos,
      int totalPrimaryCopy,
      int totalBytesCopy) {

    IntBuffer primaryInts = RingBuffer.wrappedStructuredLayoutRingBuffer(ss.source);
    ByteBuffer secondaryBytes = RingBuffer.wrappedUnstructuredLayoutRingBufferA(ss.source);

    primaryInts.position(primaryTailPos);
    primaryInts.limit(
        primaryTailPos
            + totalPrimaryCopy); // TODO: AA, this will not work on the wrap, we must mask and do
                                 // muliple copies

    secondaryBytes.position(byteTailPos);
    secondaryBytes.limit(byteTailPos + totalBytesCopy);

    ss.header.clear();
    ss.headerAsInts.put(totalBytesCopy + (totalPrimaryCopy << 2));
    ss.headerAsInts.put(totalPrimaryCopy << 2);

    // TODO: must return false if there is no room to write.

    // TODO: BB, this creates a bit of garbage for the map, perhaps we should map larger blocks
    // expecting to use them for multiple writes.
    MappedByteBuffer mapped;
    try {
      mapped =
          ss.fileChannel.map(
              MapMode.READ_WRITE,
              ss.fileChannel.position(),
              8 + totalBytesCopy + (totalPrimaryCopy << 2));
      mapped.put(ss.header);

      IntBuffer asIntBuffer = mapped.asIntBuffer();
      asIntBuffer.position(2);
      asIntBuffer.put(primaryInts);

      mapped.position(mapped.position() + (totalPrimaryCopy << 2));
      mapped.put(secondaryBytes);

    } catch (IOException e) {
      throw new RuntimeException(e);
    }
    return true;
  }
Ejemplo n.º 2
0
 private int getNextPosition(int increment) {
   int result = nextPosition;
   nextPosition = nextPosition + increment;
   mappedDataBuffer.position(0);
   mappedDataBuffer.putInt(nextPosition);
   return result;
 }
Ejemplo n.º 3
0
  private static void parseData() throws IOException {
    TreeSet<String> fs = scanPath();
    parseMeta();
    if (fs.size() == 0) return;
    String absolutePath = new File(fs.first()).getCanonicalPath();
    String absolutePath2 = new File(name).getCanonicalPath();
    if (!absolutePath.equals(absolutePath2)) {
      System.out.println("meta file name not match first file, use first file");
      pos = 0;
    }

    for (String filename : fs) {
      RandomAccessFile f = new RandomAccessFile(filename, "r");
      MappedByteBuffer map = f.getChannel().map(MapMode.READ_ONLY, 0, f.length());
      map.position((int) pos);
      while (map.hasRemaining()) {
        int size = map.getInt();
        byte[] c = new byte[size];
        map.get(c);
        Message m = MessageFactory.getInstance().createMessageFrom(c);
        m.decompress();
        System.out.println("get content: " + asString(m));
      }
      map.clear();
      map = null;
      f.close();
      pos = 0;
    }
  }
  protected void sendMessage(String message) {
    buffer.position(0);
    for (int i = 0; i < message.length(); ++i) {
      buffer.putChar(message.charAt(i));
    }

    buffer.putChar('\0');
  }
Ejemplo n.º 5
0
 private void writeRecordHeaderToMappedBuffer(RecordHeader header) {
   mappedDataBuffer.position(header.getPosition());
   mappedDataBuffer.putInt(header.getDataSize());
   mappedDataBuffer.put(header.getFragmented());
   if (header.isFragmented()) {
     mappedDataBuffer.putInt(header.getNextPos());
   }
 }
Ejemplo n.º 6
0
 /**
  * @param offset 地区记录的起始偏移
  * @return 地区名字符串
  */
 private String readArea(int offset) {
   mbb.position(offset);
   byte b = mbb.get();
   if (b == REDIRECT_MODE_1 || b == REDIRECT_MODE_2) {
     int areaOffset = readInt3();
     if (areaOffset == 0) return Message.unknown_area;
     else return readString(areaOffset);
   } else return readString(offset);
 }
Ejemplo n.º 7
0
  /**
   * Process an entry recursively. If a leaf node is found, and it's the version node, return it.
   *
   * @param tree The list of nodes we've been to.
   * @param file The file we're processing.
   * @param entry A pointer to the start of the entry.
   * @param rsrcStart A pointer to the beginning of the rsrc section.
   * @param rsrcVirtualToRaw The conversion between the virtual and raw address.
   * @return The version, or 0 if it wasn't found in this entry.
   * @throws IOException If there's an error finding the version.
   */
  private static int processEntry(
      LinkedList<Integer> tree,
      MappedByteBuffer file,
      int entry,
      int rsrcStart,
      int rsrcVirtualToRaw,
      boolean byteorder)
      throws IOException {
    /* The address of the next node, or the address of the data.  The left-most bit tells us which
     * address this actually is. */
    int nextAddress = file.getInt(entry + 4);
    /* The version is stored in this so it can be returned. */
    int version;
    /* The size of the data */
    int dataSize;
    /* The buffer where we store the data (will be dataSize bytes) */
    byte[] buffer;
    /* The address of the data within the file (converted from the RVA) */
    int rawDataAddress;

    /* Add the identifier to the tree */
    tree.addLast(file.getInt(entry + 0));

    /* Check if it's a branch by checking the left-most bit.  If it's set, it's a branch. */
    if ((nextAddress & 0x80000000) != 0) {
      /* It's a branch, so move down to the next level. */
      version =
          processResourceRecord(
              tree, file, nextAddress & 0x7FFFFFFF, rsrcStart, rsrcVirtualToRaw, byteorder);

      /* We found the version and don't care about anything else */
      if (version != 0) return version;
    } else {
      /* Its a leaf, check if it's RT_VERSION.  If it is, we're done! */
      if (tree.get(0) == RT_VERSION) {
        /* Convert the relative address to the actual address by using rsrcVirtualToRaw */
        rawDataAddress = file.getInt(rsrcStart + nextAddress) + rsrcVirtualToRaw;
        /* Get the data size */
        dataSize = file.getInt(rsrcStart + nextAddress + 4);
        /* Allocate memory in the buffer to store the incoming data */
        buffer = new byte[dataSize];

        /* Set the position in the file to the address of the data */
        /* Get the data */
        file.position(rawDataAddress);
        file.get(buffer);

        /* Combine the data and return it. */
        if (byteorder)
          return buffer[0x3C] << 24 | buffer[0x3E] << 16 | buffer[0x38] << 8 | buffer[0x3A] << 0;
        else return buffer[0x3A] << 24 | buffer[0x38] << 16 | buffer[0x3E] << 8 | buffer[0x3C] << 0;
      }
    }

    return 0;
  }
Ejemplo n.º 8
0
  /**
   * Does the same thing as <code>readFully</code> do but without copying data (thread safe)
   *
   * @param length length of the bytes to read
   * @return buffer with portion of file content
   * @throws IOException on any fail of I/O operation
   */
  public synchronized ByteBuffer readBytes(int length) throws IOException {
    int remaining = buffer.remaining() - position;
    if (length > remaining)
      throw new IOException(
          String.format(
              "mmap segment underflow; remaining is %d but %d requested", remaining, length));

    if (length == 0) return ByteBufferUtil.EMPTY_BYTE_BUFFER;

    ByteBuffer bytes = buffer.duplicate();
    bytes.position(buffer.position() + position).limit(buffer.position() + position + length);
    position += length;

    // we have to copy the data in case we unreference the underlying sstable.  See CASSANDRA-3179
    ByteBuffer clone = ByteBuffer.allocate(bytes.remaining());
    clone.put(bytes);
    clone.flip();
    return clone;
  }
  protected String receiveMessage() {
    String message = new String();
    buffer.position(0);

    for (char c = buffer.getChar(); c != '\0'; c = buffer.getChar()) {
      message += c;
    }

    return message;
  }
Ejemplo n.º 10
0
  public void test_position() throws IOException {
    File tmp = File.createTempFile("hmy", "tmp");
    tmp.deleteOnExit();
    RandomAccessFile f = new RandomAccessFile(tmp, "rw");
    FileChannel ch = f.getChannel();
    MappedByteBuffer mbb = ch.map(MapMode.READ_WRITE, 0L, 100L);
    ch.close();

    mbb.putInt(1, 1);
    mbb.position(50);
    mbb.putInt(50);

    mbb.flip();
    mbb.get();
    assertEquals(1, mbb.getInt());

    mbb.position(50);
    assertEquals(50, mbb.getInt());
  }
Ejemplo n.º 11
0
 /**
  * 从offset位置读取四个字节的ip地址放入ip数组中,读取后的ip为big-endian格式,但是 文件中是little-endian形式,将会进行转换
  *
  * @param offset
  * @param ip
  */
 private void readIP(int offset, byte[] ip) {
   mbb.position(offset);
   mbb.get(ip);
   byte temp = ip[0];
   ip[0] = ip[3];
   ip[3] = temp;
   temp = ip[1];
   ip[1] = ip[2];
   ip[2] = temp;
 }
Ejemplo n.º 12
0
 public void nextFrame() throws IOException {
   if (!client.writeBusy()) {
     int nextPos = buffer.position();
     nextPos = Math.min(nextPos + frameSize, fileSize);
     buffer.limit(nextPos);
     client.write(buffer, false);
     if (nextPos >= fileSize) {
       this.close();
     }
   }
 }
Ejemplo n.º 13
0
 private RecordHeader readRecordHeaderFromMappedBuffer(int pos) {
   RecordHeader header = new RecordHeader();
   mappedDataBuffer.position(pos);
   header.setPosition(pos);
   header.setDataSize(mappedDataBuffer.getInt());
   header.setFragmented(mappedDataBuffer.get());
   if (header.isFragmented()) {
     header.setNextPos(mappedDataBuffer.getInt());
   }
   return header;
 }
Ejemplo n.º 14
0
 /**
  * 从内存映射文件的offset位置得到一个0结尾字符串
  *
  * @param offset 字符串起始偏移
  * @return 读取的字符串,出错返回空字符串
  */
 private String readString(int offset) {
   try {
     mbb.position(offset);
     int i;
     for (i = 0, buf[i] = mbb.get(); buf[i] != 0; buf[++i] = mbb.get()) ;
     if (i != 0) return Util.getString(buf, 0, i, "GBK");
   } catch (IllegalArgumentException e) {
     LogFactory.log("", Level.ERROR, e);
   }
   return "";
 }
Ejemplo n.º 15
0
 /**
  * @see java.io.RandomAccessFile#read(byte[], int, int)
  * @param bytes byte[]
  * @param off int offset
  * @param len int length
  * @return int bytes read or -1 on EOF
  * @throws IOException
  */
 public int read(byte bytes[], int off, int len) throws IOException {
   int pos = mappedByteBuffer.position();
   int limit = mappedByteBuffer.limit();
   if (pos == limit) return -1; // EOF
   int newlimit = pos + len - off;
   if (newlimit > limit) {
     len = limit - pos; // don't read beyond EOF
   }
   mappedByteBuffer.get(bytes, off, len);
   return len;
 }
Ejemplo n.º 16
0
 /**
  * 给定一个ip国家地区记录的偏移,返回一个IPLocation结构,此方法应用与内存映射文件方式
  *
  * @param offset 国家记录的起始偏移
  * @return IPLocation对象
  */
 private IPLocation getIPLocation(int offset) {
   // 跳过4字节ip
   mbb.position(offset + 4);
   // 读取第一个字节判断是否标志字节
   byte b = mbb.get();
   if (b == REDIRECT_MODE_1) {
     // 读取国家偏移
     int countryOffset = readInt3();
     // 跳转至偏移处
     mbb.position(countryOffset);
     // 再检查一次标志字节,因为这个时候这个地方仍然可能是个重定向
     b = mbb.get();
     if (b == REDIRECT_MODE_2) {
       loc.setCountry(readString(readInt3()));
       mbb.position(countryOffset + 4);
     } else loc.setCountry(readString(countryOffset));
     // 读取地区标志
     loc.setArea(readArea(mbb.position()));
   } else if (b == REDIRECT_MODE_2) {
     loc.setCountry(readString(readInt3()));
     loc.setArea(readArea(offset + 8));
   } else {
     loc.setCountry(readString(mbb.position() - 1));
     loc.setArea(readArea(mbb.position()));
   }
   return loc;
 }
Ejemplo n.º 17
0
  public double saveBinaryFileNoBuffer() throws IOException {
    double time = System.nanoTime();
    File file =
        new File("C:\\Users\\rvanduijnhoven\\Documents\\jsfoutput\\binFileWithoutBuffer.bin");
    FileChannel fileChannel = null;
    MappedByteBuffer map = null;
    int counter = 0;
    try {
      //            fileOut = new FileOutputStream(file);
      //            outPut = new DataOutputStream(fileOut);
      fileChannel = new RandomAccessFile(file, "rw").getChannel();
      map = fileChannel.map(FileChannel.MapMode.READ_WRITE, 0, 4096 * 128 * 128);
      counter = edges.size();
      for (Edge e : edges) {
        map.putDouble(e.X1);
        map.putDouble(e.Y1);
        map.putDouble(e.X2);
        map.putDouble(e.Y2);
        map.putDouble(e.color.getRed());
        map.putDouble(e.color.getGreen());
        map.putDouble(e.color.getBlue());
      }
    } catch (Exception ex) {
      ex.printStackTrace();
    }
    edges.clear();
    map.position(0);
    // Now read every edge from the file and draw it.
    try {
      //            fileIn = new FileInputStream(file);
      //            inPut = new DataInputStream(fileIn);
      fileChannel = new RandomAccessFile(file, "r").getChannel();
      map = fileChannel.map(FileChannel.MapMode.READ_ONLY, 0, 4096 * 128 * 128);
      for (int i = 0; i <= counter; i++) {
        double X1 = map.getDouble();
        double Y1 = map.getDouble();
        double X2 = map.getDouble();
        double Y2 = map.getDouble();
        double red = map.getDouble();
        double green = map.getDouble();
        double blue = map.getDouble();

        Edge e = new Edge(X1, Y1, X2, Y2, new Color(red, green, blue, 1));
        drawEdge(e);
      }
    } catch (Exception ex) {
      ex.printStackTrace();
    }

    return System.nanoTime() - time;
  }
Ejemplo n.º 18
0
  @Override
  public void put(StoredBlock block) throws BlockStoreException {
    final MappedByteBuffer buffer = this.buffer;
    if (buffer == null) throw new BlockStoreException("Store closed");

    lock.lock();
    try {
      int cursor = getRingCursor(buffer);
      if (cursor == getFileSize()) {
        // Wrapped around.
        cursor = FILE_PROLOGUE_BYTES;
      }
      buffer.position(cursor);
      Sha256Hash hash = block.getHeader().getHash();
      notFoundCache.remove(hash);
      buffer.put(hash.getBytes());
      block.serializeCompact(buffer);
      setRingCursor(buffer, buffer.position());
      blockCache.put(hash, block);
    } finally {
      lock.unlock();
    }
  }
Ejemplo n.º 19
0
  @Override
  public void setChainHead(StoredBlock chainHead) throws BlockStoreException {
    final MappedByteBuffer buffer = this.buffer;
    if (buffer == null) throw new BlockStoreException("Store closed");

    lock.lock();
    try {
      lastChainHead = chainHead;
      byte[] headHash = chainHead.getHeader().getHash().getBytes();
      buffer.position(8);
      buffer.put(headHash);
    } finally {
      lock.unlock();
    }
  }
Ejemplo n.º 20
0
  /**
   * Source: http://stackoverflow.com/questions/4349075/bitmapfactory-decoderesource
   * -returns-a-mutable-bitmap-in-android-2-2-and-an-immu
   *
   * <p>Converts a immutable bitmap to a mutable bitmap. This operation doesn't allocates more
   * memory that there is already allocated.
   *
   * @param imgIn - Source image. It will be released, and should not be used more
   * @return a copy of imgIn, but immutable.
   */
  public static Bitmap convertBitmapToMutable(Bitmap imgIn) {
    try {
      // this is the file going to use temporally to save the bytes.
      // This file will not be a image, it will store the raw image data.
      File file = new File(MyApp.context.getFilesDir() + File.separator + "temp.tmp");

      // Open an RandomAccessFile
      // Make sure you have added uses-permission
      // android:name="android.permission.WRITE_EXTERNAL_STORAGE"
      // into AndroidManifest.xml file
      RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw");

      // get the width and height of the source bitmap.
      int width = imgIn.getWidth();
      int height = imgIn.getHeight();
      Config type = imgIn.getConfig();

      // Copy the byte to the file
      // Assume source bitmap loaded using options.inPreferredConfig =
      // Config.ARGB_8888;
      FileChannel channel = randomAccessFile.getChannel();
      MappedByteBuffer map = channel.map(MapMode.READ_WRITE, 0, imgIn.getRowBytes() * height);
      imgIn.copyPixelsToBuffer(map);
      // recycle the source bitmap, this will be no longer used.
      imgIn.recycle();
      System.gc(); // try to force the bytes from the imgIn to be released

      // Create a new bitmap to load the bitmap again. Probably the memory
      // will be available.
      imgIn = Bitmap.createBitmap(width, height, type);
      map.position(0);
      // load it back from temporary
      imgIn.copyPixelsFromBuffer(map);
      // close the temporary file and channel , then delete that also
      channel.close();
      randomAccessFile.close();

      // delete the temporary file
      file.delete();

    } catch (FileNotFoundException e) {
      e.printStackTrace();
    } catch (IOException e) {
      e.printStackTrace();
    }

    return imgIn;
  }
Ejemplo n.º 21
0
 public void readFile(File file) {
   try {
     FileChannel channel = new FileInputStream(file).getChannel();
     MappedByteBuffer bb = channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size());
     IntBuffer ib = bb.asIntBuffer();
     _nx = ib.get();
     _ny = ib.get();
     _nz = ib.get();
     bb.position(4 * ib.position());
     DoubleBuffer db = bb.asDoubleBuffer();
     if (_a == null || _a.length != _nx * _ny * _nz) _a = new double[_nx * _ny * _nz];
     db.get(_a);
     channel.close();
   } catch (IOException e) {
     System.out.println(e.getMessage());
   }
 }
Ejemplo n.º 22
0
  @Override
  @Nullable
  public StoredBlock get(Sha256Hash hash) throws BlockStoreException {
    final MappedByteBuffer buffer = this.buffer;
    if (buffer == null) throw new BlockStoreException("Store closed");

    lock.lock();
    try {
      StoredBlock cacheHit = blockCache.get(hash);
      if (cacheHit != null) return cacheHit;
      if (notFoundCache.get(hash) != null) return null;

      // Starting from the current tip of the ring work backwards until we have either found the
      // block or
      // wrapped around.
      int cursor = getRingCursor(buffer);
      final int startingPoint = cursor;
      final int fileSize = getFileSize();
      final byte[] targetHashBytes = hash.getBytes();
      byte[] scratch = new byte[32];
      do {
        cursor -= RECORD_SIZE;
        if (cursor < FILE_PROLOGUE_BYTES) {
          // We hit the start, so wrap around.
          cursor = fileSize - RECORD_SIZE;
        }
        // Cursor is now at the start of the next record to check, so read the hash and compare it.
        buffer.position(cursor);
        buffer.get(scratch);
        if (Arrays.equals(scratch, targetHashBytes)) {
          // Found the target.
          StoredBlock storedBlock = StoredBlock.deserializeCompact(params, buffer);
          blockCache.put(hash, storedBlock);
          return storedBlock;
        }
      } while (cursor != startingPoint);
      // Not found.
      notFoundCache.put(hash, notFoundMarker);
      return null;
    } catch (ProtocolException e) {
      throw new RuntimeException(e); // Cannot happen.
    } finally {
      lock.unlock();
    }
  }
Ejemplo n.º 23
0
 private static void parseMeta() {
   try {
     RandomAccessFile f = new RandomAccessFile(path + "meta", "r");
     MappedByteBuffer mbb = f.getChannel().map(MapMode.READ_ONLY, 0, f.length());
     mbb.position(0);
     pos = mbb.getLong();
     long ck = mbb.getLong();
     int len = mbb.getInt();
     byte[] dst = new byte[len];
     mbb.get(dst);
     name = new String(dst, Charset.forName("UTF-8"));
     System.out.println("file: " + name + " pos: " + pos + " checksum: " + ck);
     mbb = null;
     f.close();
   } catch (Exception e) {
     System.out.println(e.getMessage());
     pos = 0;
     name = "null";
   }
 }
  /**
   * @see java.io.RandomAccessFile#read(byte[], int, int)
   * @param bytes byte[]
   * @param off int offset
   * @param len int length
   * @return int bytes read or -1 on EOF
   */
  public int read(byte bytes[], int off, int len) {
    int mapN = (int) (pos / BUFSIZE);
    int offN = (int) (pos % BUFSIZE);
    int totalRead = 0;

    while (totalRead < len) {
      if (mapN >= mappedBuffers.length) // we have run out of data to read from
      break;
      MappedByteBuffer currentBuffer = mappedBuffers[mapN];
      if (offN > currentBuffer.limit()) break;
      currentBuffer.position(offN);
      int bytesFromThisBuffer = Math.min(len - totalRead, currentBuffer.remaining());
      currentBuffer.get(bytes, off, bytesFromThisBuffer);
      off += bytesFromThisBuffer;
      pos += bytesFromThisBuffer;
      totalRead += bytesFromThisBuffer;

      mapN++;
      offN = 0;
    }
    return totalRead == 0 ? -1 : totalRead;
  }
Ejemplo n.º 25
0
  @Override
  public StoredBlock getChainHead() throws BlockStoreException {
    final MappedByteBuffer buffer = this.buffer;
    if (buffer == null) throw new BlockStoreException("Store closed");

    lock.lock();
    try {
      if (lastChainHead == null) {
        byte[] headHash = new byte[32];
        buffer.position(8);
        buffer.get(headHash);
        Sha256Hash hash = new Sha256Hash(headHash);
        StoredBlock block = get(hash);
        if (block == null)
          throw new BlockStoreException(
              "Corrupted block store: could not find chain head: " + hash);
        lastChainHead = block;
      }
      return lastChainHead;
    } finally {
      lock.unlock();
    }
  }
Ejemplo n.º 26
0
 private byte[] readRecordDataFromMappedBuffer(RecordHeader header) {
   byte[] data = new byte[header.getDataSize()];
   mappedDataBuffer.position(header.getPosition() + header.getSize());
   mappedDataBuffer.get(data);
   return data;
 }
Ejemplo n.º 27
0
  private long writeSubfile(
      long startPositionSubfile,
      int zoomIntervalIndex,
      boolean debugStrings,
      boolean waynodeCompression,
      boolean polygonClipping,
      boolean pixelCompression)
      throws IOException {

    logger.fine(
        "writing data for zoom interval "
            + zoomIntervalIndex
            + ", number of tiles: "
            + dataStore.numberOfHorizontalTiles(zoomIntervalIndex)
                * dataStore.numberOfVerticalTiles(zoomIntervalIndex));

    TileCoordinate upperLeft = dataStore.getUpperLeft(zoomIntervalIndex);
    int lengthX = dataStore.numberOfHorizontalTiles(zoomIntervalIndex);
    int lengthY = dataStore.numberOfVerticalTiles(zoomIntervalIndex);

    byte minZoomCurrentInterval =
        dataStore.getZoomIntervalConfiguration().getMinZoom(zoomIntervalIndex);
    byte maxZoomCurrentInterval =
        dataStore.getZoomIntervalConfiguration().getMaxZoom(zoomIntervalIndex);
    byte baseZoomCurrentInterval =
        dataStore.getZoomIntervalConfiguration().getBaseZoom(zoomIntervalIndex);
    byte maxMaxZoomlevel = dataStore.getZoomIntervalConfiguration().getMaxMaxZoom();

    int tileAmountInBytes =
        dataStore.numberOfHorizontalTiles(zoomIntervalIndex)
            * dataStore.numberOfVerticalTiles(zoomIntervalIndex)
            * BYTE_AMOUNT_SUBFILE_INDEX_PER_TILE;
    int indexBufferSize =
        tileAmountInBytes + (debugStrings ? DEBUG_INDEX_START_STRING.getBytes().length : 0);
    MappedByteBuffer indexBuffer =
        randomAccessFile
            .getChannel()
            .map(MapMode.READ_WRITE, startPositionSubfile, indexBufferSize);
    MappedByteBuffer tileBuffer =
        randomAccessFile
            .getChannel()
            .map(MapMode.READ_WRITE, startPositionSubfile + indexBufferSize, TILE_BUFFER_SIZE);

    long currentSubfileOffset = indexBufferSize;

    for (int tileY = upperLeft.getY(); tileY < upperLeft.getY() + lengthY; tileY++) {
      for (int tileX = upperLeft.getX(); tileX < upperLeft.getX() + lengthX; tileX++) {
        // logger.info("writing data for tile (" + tileX + ", " + tileY + ")");

        long currentTileOffsetInBuffer = tileBuffer.position();
        TileCoordinate currentTileCoordinate =
            new TileCoordinate(tileX, tileY, baseZoomCurrentInterval);

        // seek to index frame of this tile and write relative offset of this
        // tile as five bytes to the index
        indexBuffer.put(Serializer.getFiveBytes(currentSubfileOffset));

        // get statistics for tile
        TileData currentTile = dataStore.getTile(zoomIntervalIndex, tileX, tileY);

        // ************* POI ************
        // write amount of POIs and ways for each zoom level
        // TODO is this computation correct? Ways that have an associated zoom level of
        // e.g. 9
        // are lifted to zoom level 12 for an interval 12,14,17
        Map<Byte, List<TDNode>> poisByZoomlevel =
            currentTile.poisByZoomlevel(minZoomCurrentInterval, maxMaxZoomlevel);
        Map<Byte, List<TDWay>> waysByZoomlevel =
            currentTile.waysByZoomlevel(minZoomCurrentInterval, maxMaxZoomlevel);

        if (poisByZoomlevel.size() > 0 || waysByZoomlevel.size() > 0) {
          int tileContainerStart = tileBuffer.position();
          if (debugStrings) {
            // write tile header
            StringBuilder sb = new StringBuilder();
            sb.append(DEBUG_STRING_TILE_HEAD)
                .append(tileX)
                .append(",")
                .append(tileY)
                .append(DEBUG_STRING_TILE_TAIL);
            tileBuffer.put(sb.toString().getBytes());
            // append withespaces so that block has 32 bytes
            appendWhitespace(32 - sb.toString().getBytes().length, tileBuffer);
          }

          short cumulatedPOIs = 0;
          short cumulatedWays = 0;
          for (byte zoomlevel = minZoomCurrentInterval;
              zoomlevel <= maxZoomCurrentInterval;
              zoomlevel++) {
            if (poisByZoomlevel.get(zoomlevel) != null)
              cumulatedPOIs += poisByZoomlevel.get(zoomlevel).size();
            if (waysByZoomlevel.get(zoomlevel) != null)
              cumulatedWays += waysByZoomlevel.get(zoomlevel).size();
            tileBuffer.putShort(cumulatedPOIs);
            tileBuffer.putShort(cumulatedWays);
          }

          // skip 4 bytes, later these 4 bytes will contain the start
          // position of the ways in this tile
          int fileIndexStartWayContainer = tileBuffer.position();
          tileBuffer.position(fileIndexStartWayContainer + 4);

          // write POIs for each zoom level beginning with lowest zoom level
          for (byte zoomlevel = minZoomCurrentInterval;
              zoomlevel <= maxZoomCurrentInterval;
              zoomlevel++) {
            List<TDNode> pois = poisByZoomlevel.get(zoomlevel);
            if (pois == null) continue;
            for (TDNode poi : pois) {
              if (debugStrings) {
                StringBuilder sb = new StringBuilder();
                sb.append(DEBUG_STRING_POI_HEAD).append(poi.getId()).append(DEBUG_STRING_POI_TAIL);
                tileBuffer.put(sb.toString().getBytes());
                // append withespaces so that block has 32 bytes
                appendWhitespace(32 - sb.toString().getBytes().length, tileBuffer);
              }

              // write poi features to the file
              tileBuffer.putInt(poi.getLatitude());
              tileBuffer.putInt(poi.getLongitude());

              // write byte with layer and tag amount info
              tileBuffer.put(
                  buildLayerTagAmountByte(
                      poi.getLayer(), poi.getTags() == null ? 0 : (short) poi.getTags().size()));

              // write tag ids to the file
              if (poi.getTags() != null) {
                for (PoiEnum poiEnum : poi.getTags()) {
                  tileBuffer.putShort((short) poiEnum.ordinal());
                }
              }

              // write byte with bits set to 1 if the poi has a name, an elevation
              // or a housenumber
              tileBuffer.put(
                  buildInfoByteForPOI(poi.getName(), poi.getElevation(), poi.getHouseNumber()));

              if (poi.getName() != null && poi.getName().length() > 0) {
                writeUTF8(poi.getName(), tileBuffer);
              }
              if (poi.getElevation() != 0) {
                tileBuffer.putShort(poi.getElevation());
              }
              if (poi.getHouseNumber() != null && poi.getHouseNumber().length() > 0) {
                writeUTF8(poi.getHouseNumber(), tileBuffer);
              }
            }
          } // end for loop over POIs

          // write offset to first way in the tile header
          tileBuffer.putInt(fileIndexStartWayContainer, tileBuffer.position() - tileContainerStart);

          // ************* WAYS ************
          // write ways
          for (byte zoomlevel = minZoomCurrentInterval;
              zoomlevel <= maxZoomCurrentInterval;
              zoomlevel++) {
            List<TDWay> ways = waysByZoomlevel.get(zoomlevel);
            if (ways == null) continue;

            // use executor service to parallelize computation of subtile bitmasks
            // for all
            // ways in the current tile
            short[] bitmaskComputationResults = computeSubtileBitmasks(ways, currentTileCoordinate);
            assert bitmaskComputationResults.length == ways.size();
            // needed to access bitmask computation results in the foreach loop
            int i = 0;
            for (TDWay way : ways) {
              // // INNER WAY
              // // inner ways will be written as part of the outer way
              // if (way.isInnerWay())
              // continue;
              int startIndexWay = tileBuffer.position();

              WayNodePreprocessingResult wayNodePreprocessingResult =
                  preprocessWayNodes(
                      way,
                      waynodeCompression,
                      pixelCompression,
                      polygonClipping,
                      maxZoomCurrentInterval,
                      minZoomCurrentInterval,
                      currentTileCoordinate);

              if (wayNodePreprocessingResult == null) {
                continue;
              }
              if (debugStrings) {
                StringBuilder sb = new StringBuilder();
                sb.append(DEBUG_STRING_WAY_HEAD).append(way.getId()).append(DEBUG_STRING_WAY_TAIL);
                tileBuffer.put(sb.toString().getBytes());
                // append withespaces so that block has 32 bytes
                appendWhitespace(32 - sb.toString().getBytes().length, tileBuffer);
              }

              // skip 4 bytes to reserve space for way size
              int startIndexWaySize = tileBuffer.position();
              tileBuffer.position(startIndexWaySize + 4);

              // write way features
              // short bitmask = GeoUtils.computeBitmask(way,
              // currentTileCoordinate);
              // short bitmask = (short) 0xffff;
              tileBuffer.putShort(bitmaskComputationResults[i++]);

              // write byte with layer and tag amount
              tileBuffer.put(
                  buildLayerTagAmountByte(
                      way.getLayer(), way.getTags() == null ? 0 : (short) way.getTags().size()));

              // set type of the way node compression
              int compressionType = wayNodePreprocessingResult.getCompressionType();

              // write byte with amount of tags which are rendered
              tileBuffer.put(buildRenderTagWayNodeCompressionByte(way.getTags(), compressionType));

              // write tag bitmap
              tileBuffer.put(buildTagBitmapByte(way.getTags()));
              // file.writeByte((byte) 0xff);

              // write tag ids
              if (way.getTags() != null) {
                for (WayEnum wayEnum : way.getTags()) {
                  tileBuffer.putShort((short) wayEnum.ordinal());
                }
              }
              // write the amount of way nodes to the file
              tileBuffer.putShort(
                  (short) (wayNodePreprocessingResult.getWaynodesAsList().size() / 2));

              // write the way nodes:
              // the first node is always stored with four bytes
              // the remaining way node differences are stored according to the
              // compression type
              writeWayNodes(
                  wayNodePreprocessingResult.getWaynodesAsList(),
                  wayNodePreprocessingResult.getCompressionType(),
                  tileBuffer);

              // write a byte with name, label and way type information
              tileBuffer.put(buildInfoByteForWay(way.getName(), way.getWaytype(), way.getRef()));

              // // if the way has a name, write it to the file
              if (way.getName() != null && way.getName().length() > 0) {
                writeUTF8(way.getName(), tileBuffer);
              }

              // if the way has a ref, write it to the file
              if (way.getRef() != null && way.getRef().length() > 0) {
                writeUTF8(way.getRef(), tileBuffer);
              }
              //
              // // // if the way has a label position write it to the file
              // // if (labelPositionLatitude != 0 && labelPositionLongitude != 0)
              // {
              // // raf.writeInt(labelPositionLatitude);
              // // raf.writeInt(labelPositionLongitude);
              // // }
              //
              // *********MULTIPOLYGON PROCESSING***********
              if (way.getWaytype() == 3
                  && dataStore.getInnerWaysOfMultipolygon(way.getId()) != null) {
                List<TDWay> innerways = dataStore.getInnerWaysOfMultipolygon(way.getId());

                if (innerways == null) {
                  tileBuffer.put((byte) 0);
                } else {
                  tileBuffer.put((byte) innerways.size());
                  for (TDWay innerway : innerways) {
                    WayNodePreprocessingResult innerWayNodePreprocessingResult =
                        preprocessWayNodes(
                            innerway,
                            waynodeCompression,
                            pixelCompression,
                            false,
                            maxZoomCurrentInterval,
                            minZoomCurrentInterval,
                            currentTileCoordinate);
                    // write the amount of way nodes to the file
                    tileBuffer.putShort(
                        (short) (innerWayNodePreprocessingResult.getWaynodesAsList().size() / 2));
                    writeWayNodes(
                        innerWayNodePreprocessingResult.getWaynodesAsList(),
                        wayNodePreprocessingResult.getCompressionType(),
                        tileBuffer);
                  }
                }
              }
              // write the size of the way to the file
              tileBuffer.putInt(startIndexWaySize, tileBuffer.position() - startIndexWay);
            }
          } // end for loop over ways
        } // end if clause checking if tile is empty or not
        long tileSize = tileBuffer.position() - currentTileOffsetInBuffer;
        currentSubfileOffset += tileSize;

        // if necessary, allocate new buffer
        if (tileBuffer.remaining() < MIN_TILE_BUFFER_SIZE)
          tileBuffer =
              randomAccessFile
                  .getChannel()
                  .map(
                      MapMode.READ_WRITE,
                      startPositionSubfile + currentSubfileOffset,
                      TILE_BUFFER_SIZE);

        tilesProcessed++;
        if (tilesProcessed % fivePercentOfTilesToProcess == 0) {
          logger.info(
              "written " + (tilesProcessed / fivePercentOfTilesToProcess) * 5 + "% of file");
        }
      } // end for loop over tile columns
    } // /end for loop over tile rows

    // return size of sub file in bytes
    return currentSubfileOffset;
  }
Ejemplo n.º 28
0
  private long writeContainerHeader(
      long date,
      int version,
      short tilePixel,
      String comment,
      boolean debugStrings,
      boolean waynodeCompression,
      boolean polygonClipping,
      boolean pixelCompression,
      GeoCoordinate mapStartPosition)
      throws IOException {

    // get metadata for the map file
    int numberOfZoomIntervals = dataStore.getZoomIntervalConfiguration().getNumberOfZoomIntervals();

    logger.fine("writing header");

    MappedByteBuffer containerHeaderBuffer =
        randomAccessFile.getChannel().map(MapMode.READ_WRITE, 0, HEADER_BUFFER_SIZE);

    // write file header
    // magic byte
    byte[] magicBytes = MAGIC_BYTE.getBytes();
    containerHeaderBuffer.put(magicBytes);

    // write container header size
    int headerSizePosition = containerHeaderBuffer.position();
    containerHeaderBuffer.position(headerSizePosition + 4);

    // version number of the binary file format
    containerHeaderBuffer.putInt(version);

    // meta info byte
    containerHeaderBuffer.put(
        buildMetaInfoByte(
            debugStrings,
            mapStartPosition != null,
            pixelCompression,
            polygonClipping,
            waynodeCompression));

    // amount of map files inside this file
    containerHeaderBuffer.put((byte) numberOfZoomIntervals);

    // projection type
    writeUTF8(PROJECTION, containerHeaderBuffer);

    // width and height of a tile in pixel
    containerHeaderBuffer.putShort(tilePixel);

    logger.fine(
        "Bounding box for file: "
            + dataStore.getBoundingBox().maxLatitudeE6
            + ", "
            + dataStore.getBoundingBox().minLongitudeE6
            + ", "
            + dataStore.getBoundingBox().minLatitudeE6
            + ", "
            + dataStore.getBoundingBox().maxLongitudeE6);
    // upper left corner of the bounding box
    containerHeaderBuffer.putInt(dataStore.getBoundingBox().maxLatitudeE6);
    containerHeaderBuffer.putInt(dataStore.getBoundingBox().minLongitudeE6);
    containerHeaderBuffer.putInt(dataStore.getBoundingBox().minLatitudeE6);
    containerHeaderBuffer.putInt(dataStore.getBoundingBox().maxLongitudeE6);

    if (mapStartPosition != null) {
      containerHeaderBuffer.putInt(mapStartPosition.getLatitudeE6());
      containerHeaderBuffer.putInt(mapStartPosition.getLongitudeE6());
    }

    // date of the map data
    containerHeaderBuffer.putLong(date);

    // store the mapping of tags to tag ids
    containerHeaderBuffer.putShort((short) PoiEnum.values().length);
    for (PoiEnum poiEnum : PoiEnum.values()) {
      writeUTF8(poiEnum.toString(), containerHeaderBuffer);
      containerHeaderBuffer.putShort((short) poiEnum.ordinal());
    }
    containerHeaderBuffer.putShort((short) WayEnum.values().length);
    for (WayEnum wayEnum : WayEnum.values()) {
      writeUTF8(wayEnum.toString(), containerHeaderBuffer);
      containerHeaderBuffer.putShort((short) wayEnum.ordinal());
    }

    // comment
    if (comment != null && !comment.equals("")) {
      writeUTF8(comment, containerHeaderBuffer);
    } else {
      writeUTF8("", containerHeaderBuffer);
    }

    // initialize buffer for writing zoom interval configurations
    bufferZoomIntervalConfig =
        randomAccessFile
            .getChannel()
            .map(
                MapMode.READ_WRITE,
                containerHeaderBuffer.position(),
                SIZE_ZOOMINTERVAL_CONFIGURATION * numberOfZoomIntervals);

    containerHeaderBuffer.position(
        containerHeaderBuffer.position() + SIZE_ZOOMINTERVAL_CONFIGURATION * numberOfZoomIntervals);

    // -4 bytes of header size variable itself
    int headerSize = containerHeaderBuffer.position() - headerSizePosition - 4;
    containerHeaderBuffer.putInt(headerSizePosition, headerSize);

    return containerHeaderBuffer.position();
  }
Ejemplo n.º 29
0
 private void writeRecordDataToMappedBuffer(RecordHeader header, byte[] data) {
   mappedDataBuffer.position(header.getPosition() + header.getSize());
   mappedDataBuffer.put(data);
 }
Ejemplo n.º 30
0
  public static void main(String[] argv) throws Exception {
    // Create a temp file and get a channel connected to it
    File tempFile = File.createTempFile("mmaptest", null);
    RandomAccessFile file = new RandomAccessFile(tempFile, "rw");
    FileChannel channel = file.getChannel();

    // write strings to file by channel
    ByteBuffer temp = ByteBuffer.allocate(100);
    temp.put("This is the file content".getBytes());
    temp.flip();
    channel.write(temp, 0);

    temp.clear();
    temp.put("This is more file content".getBytes());
    temp.flip();
    channel.write(temp, 8192);

    // readonly
    MappedByteBuffer ro = channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size());
    // read and write
    MappedByteBuffer rw = channel.map(FileChannel.MapMode.READ_WRITE, 0, channel.size());
    // copy on write
    MappedByteBuffer cow = channel.map(FileChannel.MapMode.PRIVATE, 0, channel.size());

    System.out.println("Begin");

    // ro、rw、cow 相同
    showBuffers(ro, rw, cow);

    // Modify the copy-on-write buffer,copy-on-write不会影响数据源
    cow.position(8);
    cow.put("COW".getBytes());
    System.out.println("Change to COW buffer");
    showBuffers(ro, rw, cow);

    // Modify the read/write buffer,read/write会影响数据源
    rw.position(9);
    rw.put(" R/W ".getBytes());
    rw.position(8194);
    rw.put(" R/W ".getBytes());
    rw.force();
    System.out.println("Change to R/W buffer");
    showBuffers(ro, rw, cow);

    // Write to the file through the channel; hit both pages
    temp.clear();
    temp.put("Channel write ".getBytes());
    temp.flip();
    channel.write(temp, 0);
    temp.rewind();
    channel.write(temp, 8202);
    System.out.println("Write on channel");
    showBuffers(ro, rw, cow);

    // Modify the copy-on-write buffer again
    cow.position(8207);
    cow.put(" COW2 ".getBytes());
    System.out.println("Second change to COW buffer");
    showBuffers(ro, rw, cow);

    // Modify the read/write buffer
    rw.position(0);
    rw.put(" R/W2 ".getBytes());
    rw.position(8210);
    rw.put(" R/W2 ".getBytes());
    rw.force();
    System.out.println("Second change to R/W buffer");
    showBuffers(ro, rw, cow);
  }