/** * inflater 解壓縮 * * @param value * @return */ public static byte[] inflater(byte[] value) { byte[] result = new byte[0]; Inflater inflater = null; ByteArrayOutputStream out = null; try { inflater = new Inflater(); inflater.setInput(value); // out = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; while (!inflater.finished()) { int count = inflater.inflate(buffer); out.write(buffer, 0, count); } // result = out.toByteArray(); } catch (Exception ex) { ex.printStackTrace(); } finally { if (inflater != null) { inflater.end(); // 不end也不會有oom,不過還是關了吧 } IoHelper.close(out); } return result; }
public String getSystraceHtml() { if (mSystraceIndex < 0) { return ""; } String trace = ""; if (mUncompress) { Inflater decompressor = new Inflater(); decompressor.setInput(mAtraceOutput, mSystraceIndex, mAtraceLength - mSystraceIndex); byte[] buf = new byte[4096]; int n; StringBuilder sb = new StringBuilder(1000); try { while ((n = decompressor.inflate(buf)) > 0) { sb.append(new String(buf, 0, n)); } } catch (DataFormatException e) { throw new RuntimeException(e); } decompressor.end(); trace = sb.toString(); } else { trace = new String(mAtraceOutput, mSystraceIndex, mAtraceLength - mSystraceIndex); } // each line should end with the characters \n\ followed by a newline String html_out = trace.replaceAll("\n", "\\\\n\\\\\n"); String header = String.format(mHtmlPrefix, mCss, mJs, ""); String footer = mHtmlSuffix; return header + html_out + footer; }
private final byte[] uncompress(final byte[] input) throws IOException { Inflater decompressor = new Inflater(); decompressor.setInput(input); // Create an expandable byte array to hold the decompressed data ByteArrayOutputStream bos = new ByteArrayOutputStream(input.length); // Decompress the data byte[] buf = new byte[1024]; while (!decompressor.finished()) { try { int count = decompressor.inflate(buf); bos.write(buf, 0, count); } catch (DataFormatException e) { // this will happen if the field is not compressed IOException newException = new IOException("field data are in wrong format: " + e.toString()); newException.initCause(e); throw newException; } } decompressor.end(); // Get the decompressed data return bos.toByteArray(); }
@Test public void testDecompressDeflateRequest() throws Exception { request.setMethod("POST"); request.setURI("/banner"); request.setHeader(HttpHeaders.CONTENT_ENCODING, "deflate"); request.setHeader(HttpHeaders.CONTENT_TYPE, PLAIN_TEXT_UTF_8); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (DeflaterOutputStream deflate = new DeflaterOutputStream(baos)) { Resources.copy(Resources.getResource("assets/new-banner.txt"), deflate); } byte[] output = baos.toByteArray(); request.setContent(output); // Decompress the bytes Inflater decompresser = new Inflater(); decompresser.setInput(output); byte[] result = new byte[4096]; int resultLength = decompresser.inflate(result); decompresser.end(); // Decode the bytes into a String System.out.println(new String(result, 0, resultLength, "UTF-8")); HttpTester.Response response = HttpTester.parseResponse(servletTester.getResponses(request.generate())); System.out.println(response.getStatus()); System.out.println(response.getContent()); }
public static String decompress(byte[] data) { int length = ByteBuffer.wrap(Arrays.copyOfRange(data, 0, 4)).getInt(); if (length > 100000) { // This is a sanity check. More than 100kb of password settings make no sense. System.out.println("Decompression error: The trasferred length is too big."); return ""; } Inflater inflater = new Inflater(); inflater.setInput(data, 4, data.length - 4); byte[] decompressedBytes = new byte[length]; try { if (inflater.inflate(decompressedBytes) != length) { throw new AssertionError(); } } catch (DataFormatException e) { e.printStackTrace(); } inflater.end(); try { return new String(decompressedBytes, "UTF-8"); } catch (UnsupportedEncodingException e) { System.out.println( "Decompression error: UTF-8 is not supported. " + "Using default encoding."); return new String(decompressedBytes); } }
/** * Closes this input stream and releases any system resources associated with the stream. * * @exception IOException if an I/O error has occurred */ public void close() throws IOException { if (!closed) { if (usesDefaultInflater) inf.end(); in.close(); closed = true; } }
/** * @param compressed_size or use null if not known * @param uncompressed_size or use null if not known * @return uncompressed ByteArrayOutputStream * @throws IOException * @throws DataFormatException */ private ByteArrayOutputStream unCompress(Integer compressed_size, Integer uncompressed_size) throws IOException, DataFormatException { byte[] uncompressed_data = null; byte[] input_data = null; ByteArrayOutputStream ret = new ByteArrayOutputStream(); Inflater decompresser = new Inflater(false); long first_seek = fileChannel.position(); Boolean uncompressing = true; while (uncompressing) { if (decompresser.needsInput()) { input_data = new byte[(compressed_size != null) ? compressed_size.intValue() : 1024]; fileChannel.read(ByteBuffer.wrap(input_data)); decompresser.setInput(input_data, 0, input_data.length); } uncompressed_data = new byte [(uncompressed_size != null) ? uncompressed_size.intValue() : (input_data.length * 4)]; decompresser.inflate(uncompressed_data); int op = (int) (decompresser.getBytesWritten() - (long) ret.size()); if (op > 0) ret.write(uncompressed_data, 0, op); if (decompresser.finished()) uncompressing = false; } fileChannel.position( (first_seek + decompresser.getBytesRead())); // move file pointer to start of next stream decompresser.end(); return ret; }
@Override public void readData(DataInputStream in) throws IOException { x = in.readInt(); z = in.readInt(); biomes = in.readBoolean(); bitmask = in.readShort(); additionalBitmask = in.readShort(); int tempLength = in.readInt(); byte[] compressedChunkData = new byte[tempLength]; in.readFully(compressedChunkData, 0, tempLength); int i = 0; for (int j = 0; j < 16; j++) i += bitmask >> j & 1; int k = 12288 * i; if (biomes) k += 256; chunkData = new byte[k]; Inflater inflater = new Inflater(); inflater.setInput(compressedChunkData, 0, tempLength); try { inflater.inflate(chunkData); } catch (DataFormatException dataformatexception) { chunkData = null; } catch (OutOfMemoryError error) { System.gc(); try { inflater.end(); inflater = new Inflater(); inflater.setInput(compressedChunkData, 0, tempLength); inflater.inflate(chunkData); } catch (DataFormatException dataformatexception) { chunkData = null; } catch (OutOfMemoryError error2) { chunkData = null; } } finally { inflater.end(); } }
/** * Finishes writing uncompressed data to the output stream without closing the underlying stream. * Use this method when applying multiple filters in succession to the same output stream. * * @throws IOException if an I/O error occurs or this stream is already closed */ public void finish() throws IOException { ensureOpen(); // Finish decompressing and writing pending output data flush(); if (usesDefaultInflater) { inf.end(); } }
/** Abstract. Reads the raw packet data from the data stream. */ public void readPacketData(DataInputStream par1DataInputStream) throws IOException { short short1 = par1DataInputStream.readShort(); this.dataLength = par1DataInputStream.readInt(); this.skyLightSent = par1DataInputStream.readBoolean(); this.chunkPostX = new int[short1]; this.chunkPosZ = new int[short1]; this.field_73590_a = new int[short1]; this.field_73588_b = new int[short1]; this.field_73584_f = new byte[short1][]; if (chunkDataNotCompressed.length < this.dataLength) { chunkDataNotCompressed = new byte[this.dataLength]; } par1DataInputStream.readFully(chunkDataNotCompressed, 0, this.dataLength); byte[] abyte = new byte[196864 * short1]; Inflater inflater = new Inflater(); inflater.setInput(chunkDataNotCompressed, 0, this.dataLength); try { inflater.inflate(abyte); } catch (DataFormatException dataformatexception) { throw new IOException("Bad compressed data format"); } finally { inflater.end(); } int i = 0; for (int j = 0; j < short1; ++j) { this.chunkPostX[j] = par1DataInputStream.readInt(); this.chunkPosZ[j] = par1DataInputStream.readInt(); this.field_73590_a[j] = par1DataInputStream.readShort(); this.field_73588_b[j] = par1DataInputStream.readShort(); int k = 0; int l = 0; int i1; for (i1 = 0; i1 < 16; ++i1) { k += this.field_73590_a[j] >> i1 & 1; l += this.field_73588_b[j] >> i1 & 1; } i1 = 2048 * 4 * k + 256; i1 += 2048 * l; if (this.skyLightSent) { i1 += 2048 * k; } this.field_73584_f[j] = new byte[i1]; System.arraycopy(abyte, i, this.field_73584_f[j], 0, i1); i += i1; } }
/** * Release an inflater previously obtained from this cache. * * @param i the inflater to return. May be null, in which case this method does nothing. */ public static void release(final Inflater i) { if (i == null) return; if (openInflaterCount == SZ) { i.end(); return; } i.reset(); releaseImpl(i); }
/** TIFF Adobe ZIP support contributed by Jason Newton. */ public byte[] zipUncompress(byte[] input) { ByteArrayOutputStream imageBuffer = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; Inflater decompressor = new Inflater(); decompressor.setInput(input); try { while (!decompressor.finished()) { int rlen = decompressor.inflate(buffer); imageBuffer.write(buffer, 0, rlen); } } catch (DataFormatException e) { IJ.log(e.toString()); } decompressor.end(); return imageBuffer.toByteArray(); }
public static byte[] decompress(byte[] compressedData) throws IOException { Inflater inflater = new Inflater(); inflater.setInput(compressedData); ByteArrayOutputStream bos = new ByteArrayOutputStream(compressedData.length); byte[] buf = new byte[1024]; while (!inflater.finished()) { try { int count = inflater.inflate(buf); bos.write(buf, 0, count); } catch (DataFormatException e) { } } bos.close(); inflater.end(); return bos.toByteArray(); }
/** * Decoding and deflating the encoded AuthReq * * @param encodedStr encoded AuthReq * @return decoded AuthReq */ public static String decode(String encodedStr) throws SAMLSSOException { try { org.apache.commons.codec.binary.Base64 base64Decoder = new org.apache.commons.codec.binary.Base64(); byte[] xmlBytes = encodedStr.getBytes("UTF-8"); byte[] base64DecodedByteArray = base64Decoder.decode(xmlBytes); try { // TODO if the request came in POST, inflating is wrong Inflater inflater = new Inflater(true); inflater.setInput(base64DecodedByteArray); byte[] xmlMessageBytes = new byte[5000]; int resultLength = inflater.inflate(xmlMessageBytes); if (inflater.getRemaining() > 0) { throw new RuntimeException("didn't allocate enough space to hold " + "decompressed data"); } inflater.end(); String decodedString = new String(xmlMessageBytes, 0, resultLength, "UTF-8"); if (log.isDebugEnabled()) { log.debug("Request message " + decodedString); } return decodedString; } catch (DataFormatException e) { ByteArrayInputStream bais = new ByteArrayInputStream(base64DecodedByteArray); ByteArrayOutputStream baos = new ByteArrayOutputStream(); InflaterInputStream iis = new InflaterInputStream(bais); byte[] buf = new byte[1024]; int count = iis.read(buf); while (count != -1) { baos.write(buf, 0, count); count = iis.read(buf); } iis.close(); String decodedStr = new String(baos.toByteArray(), Charset.forName("UTF-8")); if (log.isDebugEnabled()) { log.debug("Request message " + decodedStr); } return decodedStr; } } catch (IOException e) { throw new SAMLSSOException("Error when decoding the SAML Request.", e); } }
@Test public void testInflateBasics() throws Exception { // should result in "info:" text if properly inflated byte rawbuf[] = TypeUtil.fromHexString("CaCc4bCbB70200"); // what pywebsocket produces // byte rawbuf[] = TypeUtil.fromHexString("CbCc4bCbB70200"); // what java produces Inflater inflater = new Inflater(true); inflater.reset(); inflater.setInput(rawbuf, 0, rawbuf.length); byte outbuf[] = new byte[64]; int len = inflater.inflate(outbuf); inflater.end(); Assert.assertThat("Inflated length", len, greaterThan(4)); String actual = StringUtil.toUTF8String(outbuf, 0, len); Assert.assertThat("Inflated text", actual, is("info:")); }
/** Reads the raw packet data from the data stream. */ public void readPacketData(PacketBuffer data) throws IOException { this.field_149284_a = data.readInt(); this.field_149282_b = data.readInt(); this.field_149279_g = data.readBoolean(); this.field_149283_c = data.readShort(); this.field_149280_d = data.readShort(); this.field_149285_h = data.readInt(); if (field_149286_i.length < this.field_149285_h) { field_149286_i = new byte[this.field_149285_h]; } data.readBytes(field_149286_i, 0, this.field_149285_h); int i = 0; int j; int msb = 0; // BugFix: MC does not read the MSB array from the packet properly, causing issues for // servers that use blocks > 256 for (j = 0; j < 16; ++j) { i += this.field_149283_c >> j & 1; msb += this.field_149283_c >> j & 1; } j = 12288 * i; j += 2048 * msb; if (this.field_149279_g) { j += 256; } this.field_149278_f = new byte[j]; Inflater inflater = new Inflater(); inflater.setInput(field_149286_i, 0, this.field_149285_h); try { inflater.inflate(this.field_149278_f); } catch (DataFormatException dataformatexception) { throw new IOException("Bad compressed data format"); } finally { inflater.end(); } }
/** Abstract. Reads the raw packet data from the data stream. */ public void readPacketData(DataInputStream par1DataInputStream) throws IOException { this.xCh = par1DataInputStream.readInt(); this.zCh = par1DataInputStream.readInt(); this.includeInitialize = par1DataInputStream.readBoolean(); this.yChMin = par1DataInputStream.readShort(); this.yChMax = par1DataInputStream.readShort(); this.tempLength = par1DataInputStream.readInt(); if (temp.length < this.tempLength) { temp = new byte[this.tempLength]; } par1DataInputStream.readFully(temp, 0, this.tempLength); int var2 = 0; int var3; for (var3 = 0; var3 < 16; ++var3) { var2 += this.yChMin >> var3 & 1; } var3 = 12288 * var2; if (this.includeInitialize) { var3 += 256; } this.field_73596_g = new byte[var3]; Inflater var4 = new Inflater(); var4.setInput(temp, 0, this.tempLength); try { var4.inflate(this.field_73596_g); } catch (DataFormatException var9) { throw new IOException("Bad compressed data format"); } finally { var4.end(); } // Spout Start SpoutClient.getInstance() .getPacketManager() .sendSpoutPacket(new PacketCustomBlockChunkOverride(xCh, zCh)); // Spout End }
/** * decompress 0..len in data * * @param data * @param len length of data for decompress in data[] * @return * @throws java.util.zip.DataFormatException */ public static byte[] defalteDecompress(byte[] data) throws DataFormatException { ByteArrayOutputStream o = new ByteArrayOutputStream(data.length); Inflater decompresser = new Inflater(); try { decompresser.reset(); decompresser.setInput(data, 0, data.length); byte[] buf = new byte[1024]; while (!decompresser.finished()) { int i = decompresser.inflate(buf); o.write(buf, 0, i); } return o.toByteArray(); } finally { decompresser.end(); try { o.close(); } catch (IOException e) { } } }
/** Decompress the data bytes in the given message (in place). */ private void _decompressMessageData(Message msg) { if ((msg.flags & Message.FLAGS_COMPRESSED) == 0) { throw new IllegalArgumentException("message data is not compressed"); } Inflater decompresser = new Inflater(); decompresser.setInput(msg.data); ByteArrayOutputStream bos = new ByteArrayOutputStream(msg.data.length); byte[] buffer = new byte[8192]; try { while (!decompresser.finished()) { int size = decompresser.inflate(buffer); bos.write(buffer, 0, size); } msg.data = bos.toByteArray(); msg.flags &= ~Message.FLAGS_COMPRESSED; decompresser.end(); } catch (DataFormatException e) { throw new PyroException("invalid compressed data: ", e); } }
@Deprecated private static synchronized String unzipFileName(byte[] zippedFileName) throws DataFormatException, IOException { Inflater inflater = new Inflater(); inflater.setInput(zippedFileName); byte[] buffer = new byte[1024]; inflater.inflate(buffer); inflater.end(); int count = 0; for (int i = 0; i < buffer.length; i++) { if (buffer[i] != 0) count++; } byte[] result = new byte[count]; for (int i = 0; i < result.length; i++) { result[i] = buffer[i]; } return new String(result, PanboxConstants.STANDARD_CHARSET); }
public static byte[] decompress(byte[] data) throws Exception { if (data.length == 0) return null; Inflater inf = new Inflater(); inf.setInput(data); ByteArrayOutputStream baos = new ByteArrayOutputStream(data.length); byte[] buf = new byte[1024]; while (!inf.finished()) { int decompressed = inf.inflate(buf); baos.write(buf, 0, decompressed); } inf.end(); baos.close(); return baos.toByteArray(); }
public void a(DataInputStream datainputstream) throws IOException { // CraftBukkit - throws IOException this.a = datainputstream.readInt(); this.b = datainputstream.readInt(); this.e = datainputstream.readBoolean(); this.c = datainputstream.readShort(); this.d = datainputstream.readShort(); this.size = datainputstream.readInt(); if (buildBuffer.length < this.size) { buildBuffer = new byte[this.size]; } datainputstream.readFully(buildBuffer, 0, this.size); int i = 0; int j; for (j = 0; j < 16; ++j) { i += this.c >> j & 1; } j = 12288 * i; if (this.e) { j += 256; } this.inflatedBuffer = new byte[j]; Inflater inflater = new Inflater(); inflater.setInput(buildBuffer, 0, this.size); try { inflater.inflate(this.inflatedBuffer); } catch (DataFormatException dataformatexception) { throw new IOException("Bad compressed data format"); } finally { inflater.end(); } }
/** Parse out and decompress the data part of a fileblock helper function. */ FileBlock parseData(byte buf[]) throws InvalidProtocolBufferException { FileBlock out = FileBlock.newInstance(type, indexdata); Fileformat.Blob blob = Fileformat.Blob.parseFrom(buf); if (blob.hasRaw()) { out.data = blob.getRaw(); } else if (blob.hasZlibData()) { byte buf2[] = new byte[blob.getRawSize()]; Inflater decompresser = new Inflater(); decompresser.setInput(blob.getZlibData().toByteArray()); // decompresser.getRemaining(); try { decompresser.inflate(buf2); } catch (DataFormatException e) { e.printStackTrace(); throw new Error(e); } assert (decompresser.finished()); decompresser.end(); out.data = ByteString.copyFrom(buf2); } return out; }
public void func_73267_a(DataInputStream p_73267_1_) throws IOException { short word0 = p_73267_1_.readShort(); field_73585_g = p_73267_1_.readInt(); field_73589_c = new int[word0]; field_73586_d = new int[word0]; field_73590_a = new int[word0]; field_73588_b = new int[word0]; field_73584_f = new byte[word0][]; if (field_73591_h.length < field_73585_g) { field_73591_h = new byte[field_73585_g]; } p_73267_1_.readFully(field_73591_h, 0, field_73585_g); byte abyte0[] = new byte[0x30100 * word0]; Inflater inflater = new Inflater(); inflater.setInput(field_73591_h, 0, field_73585_g); try { inflater.inflate(abyte0); } catch (DataFormatException dataformatexception) { throw new IOException("Bad compressed data format"); } finally { inflater.end(); } int i = 0; for (int j = 0; j < word0; j++) { field_73589_c[j] = p_73267_1_.readInt(); field_73586_d[j] = p_73267_1_.readInt(); field_73590_a[j] = p_73267_1_.readShort(); field_73588_b[j] = p_73267_1_.readShort(); int k = 0; for (int l = 0; l < 16; l++) { k += field_73590_a[j] >> l & 1; } int i1 = 2048 * (5 * k) + 256; field_73584_f[j] = new byte[i1]; System.arraycopy(abyte0, i, field_73584_f[j], 0, i1); i += i1; } }
private void readRecord(int recordSize) throws IOException { int uid = PdbUtil.readShort(myStream); if (uid == 1) { myCompressionVersion = (short) PdbUtil.readShort(myStream); } else { int paragraphs = PdbUtil.readShort(myStream); int size = PdbUtil.readShort(myStream); // TODO ?????? int type = myStream.read(); int flags = myStream.read(); switch (type) { case 0: // text (TODO: found sample file and test this code) case 1: // compressed text { ArrayList /*<Integer>*/ pars = new ArrayList(); for (int i = 0; i < paragraphs; ++i) { int pSize = PdbUtil.readShort(myStream); pars.add(pSize); myStream.skip(2); } boolean doProcess = false; if (type == 0) { // ? byte[] buf = new byte[size]; doProcess = myStream.read(buf, 0, (int) size) == size; if (doProcess) { // TODO: use encoding!!!! // TODO: don't create any new objects!!!! myCharBuffer = new String(buf).toCharArray(); } } else if (myCompressionVersion == 1) { byte[] buf = new byte[size]; doProcess = DocDecompressor.decompress(myStream, buf, recordSize - 8 - 4 * paragraphs) == size; if (doProcess) { myCharBuffer = new String(buf).toCharArray(); } } else if (myCompressionVersion == 2) { byte input[] = new byte[(int) (recordSize - 10 - 4 * paragraphs)]; final int inputSize = myStream.read(input); Inflater decompressor = new Inflater(); decompressor.setInput(input, 0, inputSize); byte output[] = new byte[size]; try { doProcess = decompressor.inflate(output) == size; decompressor.end(); myCharBuffer = new String(output, 0, size).toCharArray(); } catch (DataFormatException e) { // TODO Auto-generated catch block // e.printStackTrace(); System.out.println(e.getMessage()); } // doProcess = // ZLZDecompressor(recordSize - 10 - 4 * paragraphs). // decompress(myStream, myCharBuffer, size) == size; } if (doProcess) { addHyperlinkLabel(fromNumber(uid)); myParagraphMap.put(uid, new ArrayList()); myParagraphVector = (ArrayList) myParagraphMap.get(uid); processTextRecord(size, pars); if ((flags & 0x1) == 0) { // insertEndOfTextParagraph(); // setNewTextModel(); } } break; } case 2: // image case 3: // compressed image { final String mime = "image/palm"; ZLImage image = null; if (type == 2) { System.out.println("non-compressed image"); image = new PluckerFileImage(mime, myFile, myStream.offset(), recordSize - 8); } else if (myCompressionVersion == 1) { System.out.println("DocCompressedImage"); image = new DocCompressedFileImage(mime, myFile, myStream.offset(), recordSize - 8); } else if (myCompressionVersion == 2) { System.out.println("ZCompressedImage"); image = new ZCompressedFileImage(mime, myFile, myStream.offset() + 2, recordSize - 10); } if (image != null) { addImage(fromNumber(uid), image); } break; } case 9: // category record is ignored break; case 10: short typeCode = (short) PdbUtil.readShort(myStream); break; case 11: // style sheet record is ignored break; case 12: // font page record is ignored break; case 13: // TODO: process tables case 14: // TODO: process tables break; case 15: // multiimage { short columns = (short) PdbUtil.readShort(myStream); short rows = (short) PdbUtil.readShort(myStream); System.out.println("multiimage"); /*PluckerMultiImage image = new PluckerMultiImage(rows, columns, Model.getImageMap()); for (int i = 0; i < size / 2 - 2; ++i) { short us = (short)myStream.read(); PdbUtil.readShort(myStream, us); image.addId(fromNumber(us)); } addImage(fromNumber(uid), image); */ break; } default: // std::cerr << "type = " << (int)type << "\n"; break; } } }
private static synchronized void releaseImpl(final Inflater i) { if (openInflaterCount == SZ) i.end(); else inflaterCache[openInflaterCount++] = i; }
@Override public ServerMessage inbound(Object message) throws Exception { Message messageSend = (Message) message; ServerMessageImpl coreMessage = new ServerMessageImpl(-1, messageSend.getSize()); String type = messageSend.getType(); if (type != null) { coreMessage.putStringProperty(new SimpleString("JMSType"), new SimpleString(type)); } coreMessage.setDurable(messageSend.isPersistent()); coreMessage.setExpiration(messageSend.getExpiration()); coreMessage.setPriority(messageSend.getPriority()); coreMessage.setTimestamp(messageSend.getTimestamp()); byte coreType = toCoreType(messageSend.getDataStructureType()); coreMessage.setType(coreType); ActiveMQBuffer body = coreMessage.getBodyBuffer(); ByteSequence contents = messageSend.getContent(); if (contents == null && coreType == org.apache.activemq.artemis.api.core.Message.TEXT_TYPE) { body.writeNullableString(null); } else if (contents != null) { boolean messageCompressed = messageSend.isCompressed(); if (messageCompressed) { coreMessage.putBooleanProperty(AMQ_MSG_COMPRESSED, messageCompressed); } switch (coreType) { case org.apache.activemq.artemis.api.core.Message.TEXT_TYPE: InputStream tis = new ByteArrayInputStream(contents); if (messageCompressed) { tis = new InflaterInputStream(tis); } DataInputStream tdataIn = new DataInputStream(tis); String text = MarshallingSupport.readUTF8(tdataIn); tdataIn.close(); body.writeNullableSimpleString(new SimpleString(text)); break; case org.apache.activemq.artemis.api.core.Message.MAP_TYPE: InputStream mis = new ByteArrayInputStream(contents); if (messageCompressed) { mis = new InflaterInputStream(mis); } DataInputStream mdataIn = new DataInputStream(mis); Map<String, Object> map = MarshallingSupport.unmarshalPrimitiveMap(mdataIn); mdataIn.close(); TypedProperties props = new TypedProperties(); loadMapIntoProperties(props, map); props.encode(body); break; case org.apache.activemq.artemis.api.core.Message.OBJECT_TYPE: if (messageCompressed) { try (InputStream ois = new InflaterInputStream(new ByteArrayInputStream(contents)); org.apache.activemq.util.ByteArrayOutputStream decompressed = new org.apache.activemq.util.ByteArrayOutputStream()) { byte[] buf = new byte[1024]; int n = ois.read(buf); while (n != -1) { decompressed.write(buf, 0, n); n = ois.read(); } // read done contents = decompressed.toByteSequence(); } } body.writeInt(contents.length); body.writeBytes(contents.data, contents.offset, contents.length); break; case org.apache.activemq.artemis.api.core.Message.STREAM_TYPE: InputStream sis = new ByteArrayInputStream(contents); if (messageCompressed) { sis = new InflaterInputStream(sis); } DataInputStream sdis = new DataInputStream(sis); int stype = sdis.read(); while (stype != -1) { switch (stype) { case MarshallingSupport.BOOLEAN_TYPE: body.writeByte(DataConstants.BOOLEAN); body.writeBoolean(sdis.readBoolean()); break; case MarshallingSupport.BYTE_TYPE: body.writeByte(DataConstants.BYTE); body.writeByte(sdis.readByte()); break; case MarshallingSupport.BYTE_ARRAY_TYPE: body.writeByte(DataConstants.BYTES); int slen = sdis.readInt(); byte[] sbytes = new byte[slen]; sdis.read(sbytes); body.writeInt(slen); body.writeBytes(sbytes); break; case MarshallingSupport.CHAR_TYPE: body.writeByte(DataConstants.CHAR); char schar = sdis.readChar(); body.writeShort((short) schar); break; case MarshallingSupport.DOUBLE_TYPE: body.writeByte(DataConstants.DOUBLE); double sdouble = sdis.readDouble(); body.writeLong(Double.doubleToLongBits(sdouble)); break; case MarshallingSupport.FLOAT_TYPE: body.writeByte(DataConstants.FLOAT); float sfloat = sdis.readFloat(); body.writeInt(Float.floatToIntBits(sfloat)); break; case MarshallingSupport.INTEGER_TYPE: body.writeByte(DataConstants.INT); body.writeInt(sdis.readInt()); break; case MarshallingSupport.LONG_TYPE: body.writeByte(DataConstants.LONG); body.writeLong(sdis.readLong()); break; case MarshallingSupport.SHORT_TYPE: body.writeByte(DataConstants.SHORT); body.writeShort(sdis.readShort()); break; case MarshallingSupport.STRING_TYPE: body.writeByte(DataConstants.STRING); String sstring = sdis.readUTF(); body.writeNullableString(sstring); break; case MarshallingSupport.BIG_STRING_TYPE: body.writeByte(DataConstants.STRING); String sbigString = MarshallingSupport.readUTF8(sdis); body.writeNullableString(sbigString); break; case MarshallingSupport.NULL: body.writeByte(DataConstants.STRING); body.writeNullableString(null); break; default: // something we don't know, ignore break; } stype = sdis.read(); } sdis.close(); break; case org.apache.activemq.artemis.api.core.Message.BYTES_TYPE: if (messageCompressed) { Inflater inflater = new Inflater(); try (org.apache.activemq.util.ByteArrayOutputStream decompressed = new org.apache.activemq.util.ByteArrayOutputStream()) { int length = ByteSequenceData.readIntBig(contents); contents.offset = 0; byte[] data = Arrays.copyOfRange(contents.getData(), 4, contents.getLength()); inflater.setInput(data); byte[] buffer = new byte[length]; int count = inflater.inflate(buffer); decompressed.write(buffer, 0, count); contents = decompressed.toByteSequence(); } catch (Exception e) { throw new IOException(e); } finally { inflater.end(); } } body.writeBytes(contents.data, contents.offset, contents.length); break; default: if (messageCompressed) { try (org.apache.activemq.util.ByteArrayOutputStream decompressed = new org.apache.activemq.util.ByteArrayOutputStream(); OutputStream os = new InflaterOutputStream(decompressed)) { os.write(contents.data, contents.offset, contents.getLength()); contents = decompressed.toByteSequence(); } catch (Exception e) { throw new IOException(e); } } body.writeBytes(contents.data, contents.offset, contents.length); break; } } // amq specific coreMessage.putLongProperty(AMQ_MSG_ARRIVAL, messageSend.getArrival()); coreMessage.putLongProperty(AMQ_MSG_BROKER_IN_TIME, messageSend.getBrokerInTime()); BrokerId[] brokers = messageSend.getBrokerPath(); if (brokers != null) { StringBuilder builder = new StringBuilder(); for (int i = 0; i < brokers.length; i++) { builder.append(brokers[i].getValue()); if (i != (brokers.length - 1)) { builder.append(","); // is this separator safe? } } coreMessage.putStringProperty(AMQ_MSG_BROKER_PATH, builder.toString()); } BrokerId[] cluster = messageSend.getCluster(); if (cluster != null) { StringBuilder builder = new StringBuilder(); for (int i = 0; i < cluster.length; i++) { builder.append(cluster[i].getValue()); if (i != (cluster.length - 1)) { builder.append(","); // is this separator safe? } } coreMessage.putStringProperty(AMQ_MSG_CLUSTER, builder.toString()); } coreMessage.putIntProperty(AMQ_MSG_COMMAND_ID, messageSend.getCommandId()); String corrId = messageSend.getCorrelationId(); if (corrId != null) { coreMessage.putStringProperty("JMSCorrelationID", corrId); } DataStructure ds = messageSend.getDataStructure(); if (ds != null) { ByteSequence dsBytes = marshaller.marshal(ds); dsBytes.compact(); coreMessage.putBytesProperty(AMQ_MSG_DATASTRUCTURE, dsBytes.data); } String groupId = messageSend.getGroupID(); if (groupId != null) { coreMessage.putStringProperty(AMQ_MSG_GROUP_ID, groupId); } coreMessage.putIntProperty(AMQ_MSG_GROUP_SEQUENCE, messageSend.getGroupSequence()); MessageId messageId = messageSend.getMessageId(); ByteSequence midBytes = marshaller.marshal(messageId); midBytes.compact(); coreMessage.putBytesProperty(AMQ_MSG_MESSAGE_ID, midBytes.data); ProducerId producerId = messageSend.getProducerId(); if (producerId != null) { ByteSequence producerIdBytes = marshaller.marshal(producerId); producerIdBytes.compact(); coreMessage.putBytesProperty(AMQ_MSG_PRODUCER_ID, producerIdBytes.data); } ByteSequence propBytes = messageSend.getMarshalledProperties(); if (propBytes != null) { propBytes.compact(); coreMessage.putBytesProperty(AMQ_MSG_MARSHALL_PROP, propBytes.data); // unmarshall properties to core so selector will work Map<String, Object> props = messageSend.getProperties(); // Map<String, Object> props = MarshallingSupport.unmarshalPrimitiveMap(new // DataInputStream(new ByteArrayInputStream(propBytes))); for (Entry<String, Object> ent : props.entrySet()) { Object value = ent.getValue(); try { coreMessage.putObjectProperty(ent.getKey(), value); } catch (ActiveMQPropertyConversionException e) { coreMessage.putStringProperty(ent.getKey(), value.toString()); } } } ActiveMQDestination replyTo = messageSend.getReplyTo(); if (replyTo != null) { ByteSequence replyToBytes = marshaller.marshal(replyTo); replyToBytes.compact(); coreMessage.putBytesProperty(AMQ_MSG_REPLY_TO, replyToBytes.data); } ConsumerId consumerId = messageSend.getTargetConsumerId(); String userId = messageSend.getUserID(); if (userId != null) { coreMessage.putStringProperty(AMQ_MSG_USER_ID, userId); } coreMessage.putBooleanProperty(AMQ_MSG_DROPPABLE, messageSend.isDroppable()); ActiveMQDestination origDest = messageSend.getOriginalDestination(); if (origDest != null) { ByteSequence origDestBytes = marshaller.marshal(origDest); origDestBytes.compact(); coreMessage.putBytesProperty(AMQ_MSG_ORIG_DESTINATION, origDestBytes.data); } return coreMessage; }
public void close() throws IOException { _inflater.end(); }
@Override public CompressedChunkMessage decode(ChannelBuffer buffer) throws IOException { int x = buffer.readInt(); int z = buffer.readInt(); boolean contiguous = buffer.readByte() == 1; short primaryBitMap = buffer.readShort(); short addBitMap = buffer.readShort(); int compressedSize = buffer.readInt(); int unused = buffer.readInt(); byte[] compressedData = new byte[compressedSize]; buffer.readBytes(compressedData); boolean[] hasAdditionalData = new boolean[MAX_SECTIONS]; byte[][] data = new byte[MAX_SECTIONS][]; int size = 0; for (int i = 0; i < MAX_SECTIONS; ++i) { if ((primaryBitMap & 1 << i) != 0) { // This chunk exists! Let's initialize the data for it. int sectionSize = SIXTEEN_CUBED * 5 / 2; if ((addBitMap & 1 << i) != 0) { hasAdditionalData[i] = true; sectionSize += SIXTEEN_CUBED / 2; } data[i] = new byte[sectionSize]; size += sectionSize; } } if (contiguous) { size += Chunk.CHUNK_SIZE * Chunk.CHUNK_SIZE; } byte[] uncompressedData = new byte[size]; Inflater inflater = new Inflater(); inflater.setInput(compressedData); inflater.getRemaining(); try { int uncompressed = inflater.inflate(uncompressedData); if (uncompressed == 0) { throw new IOException("Not all bytes uncompressed."); } } catch (DataFormatException e) { e.printStackTrace(); throw new IOException("Bad compressed data."); } finally { inflater.end(); } size = 0; for (byte[] sectionData : data) { if (sectionData != null && sectionData.length + size < uncompressedData.length) { System.arraycopy(uncompressedData, size, sectionData, 0, sectionData.length); size += sectionData.length; } } byte[] biomeData = new byte[Chunk.CHUNK_SIZE * Chunk.CHUNK_SIZE]; if (contiguous) { System.arraycopy(uncompressedData, size, biomeData, 0, biomeData.length); size += biomeData.length; } return new CompressedChunkMessage(x, z, contiguous, hasAdditionalData, unused, data, biomeData); }
public void end() { inflater.end(); }