/** * Encode the uncompressed source image associated with this compressor instance and return a * buffer containing a YUV planar image. See {@link #encodeYUV(byte[], int)} for more detail. * * @param flags the bitwise OR of one or more of {@link TJ TJ.FLAG_*} * @return a buffer containing a YUV planar image */ public byte[] encodeYUV(int flags) throws Exception { if (srcWidth < 1 || srcHeight < 1) throw new Exception(NO_ASSOC_ERROR); if (subsamp < 0) throw new Exception("Subsampling level not set"); byte[] buf = new byte[TJ.bufSizeYUV(srcWidth, srcHeight, subsamp)]; encodeYUV(buf, flags); return buf; }
/** * Encode the uncompressed source image associated with this compressor instance and output a YUV * planar image to the given destination buffer. This method uses the accelerated color conversion * routines in TurboJPEG's underlying codec to produce a planar YUV image that is suitable for * direct video display. Specifically, if the chrominance components are subsampled along the * horizontal dimension, then the width of the luminance plane is padded to 2 in the output image * (same goes for the height of the luminance plane, if the chrominance components are subsampled * along the vertical dimension.) Also, each line of each plane in the output image is padded to 4 * bytes. Although this will work with any subsampling option, it is really only useful in * combination with {@link TJ#SAMP_420}, which produces an image compatible with the I420 (AKA * "YUV420P") format. * * @param dstBuf buffer that will receive the YUV planar image. Use {@link TJ#bufSizeYUV} to * determine the appropriate size for this buffer based on the image width, height, and level * of chrominance subsampling. * @param flags the bitwise OR of one or more of {@link TJ TJ.FLAG_*} */ public void encodeYUV(byte[] dstBuf, int flags) throws Exception { if (dstBuf == null || flags < 0) throw new Exception("Invalid argument in compress()"); if (srcBuf == null) throw new Exception(NO_ASSOC_ERROR); if (subsamp < 0) throw new Exception("Subsampling level not set"); encodeYUV(srcBuf, srcWidth, srcPitch, srcHeight, srcPixelFormat, dstBuf, subsamp, flags); compressedSize = TJ.bufSizeYUV(srcWidth, srcHeight, subsamp); }
/** * Encode the uncompressed source image stored in <code>srcImage</code> and return a buffer * containing a YUV planar image. See {@link #encodeYUV(byte[], int)} for more detail. * * @param srcImage a <code>BufferedImage</code> instance containing RGB or grayscale pixels to be * encoded * @param flags the bitwise OR of one or more of {@link TJ TJ.FLAG_*} * @return a buffer containing a YUV planar image */ public byte[] encodeYUV(BufferedImage srcImage, int flags) throws Exception { if (subsamp < 0) throw new Exception("Subsampling level not set"); int width = srcImage.getWidth(); int height = srcImage.getHeight(); byte[] buf = new byte[TJ.bufSizeYUV(width, height, subsamp)]; encodeYUV(srcImage, buf, flags); return buf; }
/** * Decompress the JPEG source image associated with this decompressor instance and return a buffer * containing a YUV planar image. See {@link #decompressToYUV(byte[], int)} for more detail. * * @param flags the bitwise OR of one or more of {@link TJ TJ.FLAG_*} * @return a buffer containing a YUV planar image */ public byte[] decompressToYUV(int flags) throws Exception { if (flags < 0) throw new Exception("Invalid argument in decompressToYUV()"); if (jpegWidth < 1 || jpegHeight < 1 || jpegSubsamp < 0) throw new Exception(NO_ASSOC_ERROR); if (jpegSubsamp >= TJ.NUMSAMP) throw new Exception("JPEG header information is invalid"); byte[] buf = new byte[TJ.bufSizeYUV(jpegWidth, jpegHeight, jpegSubsamp)]; decompressToYUV(buf, flags); return buf; }
/** * Encode the uncompressed source image stored in <code>srcImage</code> and output a YUV planar * image to the given destination buffer. See {@link #encodeYUV(byte[], int)} for more detail. * * @param srcImage a <code>BufferedImage</code> instance containing RGB or grayscale pixels to be * encoded * @param dstBuf buffer that will receive the YUV planar image. Use {@link TJ#bufSizeYUV} to * determine the appropriate size for this buffer based on the image width, height, and level * of chrominance subsampling. * @param flags the bitwise OR of one or more of {@link TJ TJ.FLAG_*} */ public void encodeYUV(BufferedImage srcImage, byte[] dstBuf, int flags) throws Exception { if (srcImage == null || dstBuf == null || flags < 0) throw new Exception("Invalid argument in encodeYUV()"); int width = srcImage.getWidth(); int height = srcImage.getHeight(); int pixelFormat; boolean intPixels = false; if (byteOrder == null) byteOrder = ByteOrder.nativeOrder(); switch (srcImage.getType()) { case BufferedImage.TYPE_3BYTE_BGR: pixelFormat = TJ.PF_BGR; break; case BufferedImage.TYPE_4BYTE_ABGR: case BufferedImage.TYPE_4BYTE_ABGR_PRE: pixelFormat = TJ.PF_XBGR; break; case BufferedImage.TYPE_BYTE_GRAY: pixelFormat = TJ.PF_GRAY; break; case BufferedImage.TYPE_INT_BGR: if (byteOrder == ByteOrder.BIG_ENDIAN) pixelFormat = TJ.PF_XBGR; else pixelFormat = TJ.PF_RGBX; intPixels = true; break; case BufferedImage.TYPE_INT_RGB: case BufferedImage.TYPE_INT_ARGB: case BufferedImage.TYPE_INT_ARGB_PRE: if (byteOrder == ByteOrder.BIG_ENDIAN) pixelFormat = TJ.PF_XRGB; else pixelFormat = TJ.PF_BGRX; intPixels = true; break; default: throw new Exception("Unsupported BufferedImage format"); } WritableRaster wr = srcImage.getRaster(); if (subsamp < 0) throw new Exception("Subsampling level not set"); if (intPixels) { SinglePixelPackedSampleModel sm = (SinglePixelPackedSampleModel) srcImage.getSampleModel(); int pitch = sm.getScanlineStride(); DataBufferInt db = (DataBufferInt) wr.getDataBuffer(); int[] buf = db.getData(); encodeYUV(buf, width, pitch, height, pixelFormat, dstBuf, subsamp, flags); } else { ComponentSampleModel sm = (ComponentSampleModel) srcImage.getSampleModel(); int pixelSize = sm.getPixelStride(); if (pixelSize != TJ.getPixelSize(pixelFormat)) throw new Exception("Inconsistency between pixel format and pixel size in BufferedImage"); int pitch = sm.getScanlineStride(); DataBufferByte db = (DataBufferByte) wr.getDataBuffer(); byte[] buf = db.getData(); encodeYUV(buf, width, pitch, height, pixelFormat, dstBuf, subsamp, flags); } compressedSize = TJ.bufSizeYUV(width, height, subsamp); }
private static void doTest(int w, int h, int[] formats, int subsamp, String baseName) throws Exception { TJCompressor tjc = null; TJDecompressor tjd = null; int size; byte[] dstBuf; if (yuv == YUVENCODE) dstBuf = new byte[TJ.bufSizeYUV(w, h, subsamp)]; else dstBuf = new byte[TJ.bufSize(w, h, subsamp)]; try { tjc = new TJCompressor(); tjd = new TJDecompressor(); for (int pf : formats) { for (int i = 0; i < 2; i++) { int flags = 0; if (subsamp == TJ.SAMP_422 || subsamp == TJ.SAMP_420 || subsamp == TJ.SAMP_440) flags |= TJ.FLAG_FASTUPSAMPLE; if (i == 1) { if (yuv == YUVDECODE) { tjc.close(); tjd.close(); return; } else flags |= TJ.FLAG_BOTTOMUP; } size = compTest(tjc, dstBuf, w, h, pf, baseName, subsamp, 100, flags); decompTest(tjd, dstBuf, size, w, h, pf, baseName, subsamp, flags); if (pf >= TJ.PF_RGBX && pf <= TJ.PF_XRGB && !bi) decompTest( tjd, dstBuf, size, w, h, pf + (TJ.PF_RGBA - TJ.PF_RGBX), baseName, subsamp, flags); } } } catch (Exception e) { if (tjc != null) tjc.close(); if (tjd != null) tjd.close(); throw e; } if (tjc != null) tjc.close(); if (tjd != null) tjd.close(); }