public static String decompress(byte[] data) {
   int length = ByteBuffer.wrap(Arrays.copyOfRange(data, 0, 4)).getInt();
   if (length > 100000) {
     // This is a sanity check. More than 100kb of password settings make no sense.
     System.out.println("Decompression error: The trasferred length is too big.");
     return "";
   }
   Inflater inflater = new Inflater();
   inflater.setInput(data, 4, data.length - 4);
   byte[] decompressedBytes = new byte[length];
   try {
     if (inflater.inflate(decompressedBytes) != length) {
       throw new AssertionError();
     }
   } catch (DataFormatException e) {
     e.printStackTrace();
   }
   inflater.end();
   try {
     return new String(decompressedBytes, "UTF-8");
   } catch (UnsupportedEncodingException e) {
     System.out.println(
         "Decompression error: UTF-8 is not supported. " + "Using default encoding.");
     return new String(decompressedBytes);
   }
 }
 /** Parse out and decompress the data part of a fileblock helper function. */
 FileBlock parseData(byte buf[]) throws InvalidProtocolBufferException {
   FileBlock out = FileBlock.newInstance(type, indexdata);
   Fileformat.Blob blob = Fileformat.Blob.parseFrom(buf);
   if (blob.hasRaw()) {
     out.data = blob.getRaw();
   } else if (blob.hasZlibData()) {
     byte buf2[] = new byte[blob.getRawSize()];
     Inflater decompresser = new Inflater();
     decompresser.setInput(blob.getZlibData().toByteArray());
     // decompresser.getRemaining();
     try {
       decompresser.inflate(buf2);
     } catch (DataFormatException e) {
       e.printStackTrace();
       throw new Error(e);
     }
     assert (decompresser.finished());
     decompresser.end();
     out.data = ByteString.copyFrom(buf2);
   }
   return out;
 }
  @Override
  public CompressedChunkMessage decode(ChannelBuffer buffer) throws IOException {
    int x = buffer.readInt();
    int z = buffer.readInt();
    boolean contiguous = buffer.readByte() == 1;

    short primaryBitMap = buffer.readShort();
    short addBitMap = buffer.readShort();
    int compressedSize = buffer.readInt();
    int unused = buffer.readInt();
    byte[] compressedData = new byte[compressedSize];
    buffer.readBytes(compressedData);

    boolean[] hasAdditionalData = new boolean[MAX_SECTIONS];
    byte[][] data = new byte[MAX_SECTIONS][];

    int size = 0;
    for (int i = 0; i < MAX_SECTIONS; ++i) {
      if ((primaryBitMap & 1 << i) != 0) { // This chunk exists! Let's initialize the data for it.
        int sectionSize = SIXTEEN_CUBED * 5 / 2;
        if ((addBitMap & 1 << i) != 0) {
          hasAdditionalData[i] = true;
          sectionSize += SIXTEEN_CUBED / 2;
        }

        data[i] = new byte[sectionSize];
        size += sectionSize;
      }
    }

    if (contiguous) {
      size += Chunk.CHUNK_SIZE * Chunk.CHUNK_SIZE;
    }

    byte[] uncompressedData = new byte[size];

    Inflater inflater = new Inflater();
    inflater.setInput(compressedData);
    inflater.getRemaining();
    try {
      int uncompressed = inflater.inflate(uncompressedData);
      if (uncompressed == 0) {
        throw new IOException("Not all bytes uncompressed.");
      }
    } catch (DataFormatException e) {
      e.printStackTrace();
      throw new IOException("Bad compressed data.");
    } finally {
      inflater.end();
    }

    size = 0;
    for (byte[] sectionData : data) {
      if (sectionData != null && sectionData.length + size < uncompressedData.length) {
        System.arraycopy(uncompressedData, size, sectionData, 0, sectionData.length);
        size += sectionData.length;
      }
    }
    byte[] biomeData = new byte[Chunk.CHUNK_SIZE * Chunk.CHUNK_SIZE];

    if (contiguous) {
      System.arraycopy(uncompressedData, size, biomeData, 0, biomeData.length);
      size += biomeData.length;
    }

    return new CompressedChunkMessage(x, z, contiguous, hasAdditionalData, unused, data, biomeData);
  }