/** Create a lookup table to be used in the case of byte data. */ private void createLUT() { // Allocate memory for the data array references. int numBands = abscissas.length; byte[][] data = new byte[numBands][]; // Generate the data for each band. for (int band = 0; band < numBands; band++) { // Allocate memory for this band. data[band] = new byte[256]; // Cache the references to avoid extra indexing. byte[] table = data[band]; float[] x = abscissas[band]; float[] a = slopes[band]; float[] b = intercepts[band]; float yL = minOrdinates[band]; float yH = maxOrdinates[band]; // Initialize the lookup table data. for (int value = 0; value < 256; value++) { table[value] = ImageUtil.clampRoundByte(binarySearch(x, yL, yH, a, b, value)); } } // Construct the lookup table. lut = new LookupTableJAI(data); }
// package accessible for SubsampleBinaryToGrayOpImage4x4, etc... static ImageLayout layoutHelper( RenderedImage source, float scaleX, float scaleY, ImageLayout il, Map config) { ImageLayout layout = (il == null) ? new ImageLayout() : (ImageLayout) il.clone(); // to compute dWidth and dHeight // fTol and dWi, dHi must be the same as in computeDestInfo(..) // due to static method, a few lines of coding are repeated int srcWidth = source.getWidth(); int srcHeight = source.getHeight(); float f_dw = scaleX * srcWidth; float f_dh = scaleY * srcHeight; float fTol = .1F * Math.min(scaleX / (f_dw + 1.0F), scaleY / (f_dh + 1.0F)); int dWi = (int) (f_dw); int dHi = (int) (f_dh); // let it be int in the almost int case // espacially in the true int case with float calculation errors if (Math.abs(Math.round(f_dw) - f_dw) < fTol) { dWi = Math.round(f_dw); } if (Math.abs(Math.round(f_dh) - f_dh) < fTol) { dHi = Math.round(f_dh); } // Set the top left coordinate of the destination layout.setMinX((int) (scaleX * source.getMinX())); layout.setMinY((int) (scaleY * source.getMinY())); layout.setWidth(dWi); layout.setHeight(dHi); // sample model SampleModel sm = layout.getSampleModel(null); if (sm == null || sm.getDataType() != DataBuffer.TYPE_BYTE || !(sm instanceof PixelInterleavedSampleModel || sm instanceof SinglePixelPackedSampleModel && sm.getNumBands() == 1)) { // Width and height will be corrected in OpImage.layoutHelper sm = new PixelInterleavedSampleModel(DataBuffer.TYPE_BYTE, 1, 1, 1, 1, new int[] {0}); } layout.setSampleModel(sm); ColorModel cm = layout.getColorModel(null); if (cm == null || !JDKWorkarounds.areCompatibleDataModels(sm, cm)) { layout.setColorModel(ImageUtil.getCompatibleColorModel(sm, config)); } return layout; }
private synchronized void initByteTable() { if (byteTable != null) { return; } /* Initialize byteTable. */ byteTable = new byte[0x100][0x100]; for (int j = 0; j < 0x100; j++) { byte[] t = byteTable[j]; for (int i = 0; i < 0x100; i++) { t[i] = ImageUtil.clampBytePositive(j + i); } } }
private void computeRectUShort(RasterAccessor src, RasterAccessor dst) { int dstWidth = dst.getWidth(); int dstHeight = dst.getHeight(); int dstBands = dst.getNumBands(); int dstLineStride = dst.getScanlineStride(); int dstPixelStride = dst.getPixelStride(); int[] dstBandOffsets = dst.getBandOffsets(); short[][] dstData = dst.getShortDataArrays(); int srcLineStride = src.getScanlineStride(); int srcPixelStride = src.getPixelStride(); int[] srcBandOffsets = src.getBandOffsets(); short[][] srcData = src.getShortDataArrays(); for (int b = 0; b < dstBands; b++) { short[] d = dstData[b]; short[] s = srcData[b]; int dstLineOffset = dstBandOffsets[b]; int srcLineOffset = srcBandOffsets[b]; // Cache the references to avoid extra indexing. float[] x = abscissas[b]; float[] gain = slopes[b]; float[] bias = intercepts[b]; float yL = minOrdinates[b]; float yH = maxOrdinates[b]; for (int h = 0; h < dstHeight; h++) { int dstPixelOffset = dstLineOffset; int srcPixelOffset = srcLineOffset; dstLineOffset += dstLineStride; srcLineOffset += srcLineStride; for (int w = 0; w < dstWidth; w++) { d[dstPixelOffset] = ImageUtil.clampRoundUShort( binarySearch(x, yL, yH, gain, bias, s[srcPixelOffset] & 0xFFFF)); dstPixelOffset += dstPixelStride; srcPixelOffset += srcPixelStride; } } } }
private void computeRectUShort(RasterAccessor[] srcs, RasterAccessor dst) { int dstWidth = dst.getWidth(); int dstHeight = dst.getHeight(); int dstBands = dst.getNumBands(); int dstLineStride = dst.getScanlineStride(); int dstPixelStride = dst.getPixelStride(); int[] dstBandOffsets = dst.getBandOffsets(); short[][] dstData = dst.getShortDataArrays(); int numSrcs = getNumSources(); for (int i = 0; i < numSrcs; i++) { RasterAccessor src = srcs[i]; int srcLineStride = src.getScanlineStride(); int srcPixelStride = src.getPixelStride(); int[] srcBandOffsets = src.getBandOffsets(); short[][] srcData = src.getShortDataArrays(); for (int b = 0; b < dstBands; b++) { int dstLineOffset = dstBandOffsets[b]; int srcLineOffset = srcBandOffsets[b]; short[] d = dstData[b]; short[] s = srcData[b]; for (int h = 0; h < dstHeight; h++) { int dstPixelOffset = dstLineOffset; int srcPixelOffset = srcLineOffset; dstLineOffset += dstLineStride; srcLineOffset += srcLineStride; for (int w = 0; w < dstWidth; w++) { d[dstPixelOffset] = ImageUtil.clampUShortPositive( (d[dstPixelOffset] & 0xffff) + (s[srcPixelOffset] & 0xffff)); dstPixelOffset += dstPixelStride; srcPixelOffset += srcPixelStride; } } } } }
/** * Gets a RenderedImage that represents a rendering of this image using a given RenderContext. * This is the most general way to obtain a rendering of a RenderableImage. * * <p>This method does not validate sources and parameters supplied in the <code>ParameterBlock * </code> supplied at construction against the specification of the operation this node * represents. It is the caller's responsibility to ensure that the data in the <code> * ParameterBlock</code> are suitable for this operation. Otherwise, some kind of exception or * error will occur. Invoking this method will cause any <code>DeferredData</code> parameters to * be evaluated. * * <p>The <code>RenderContext</code> may contain a <code>Shape</code> that represents the * area-of-interest (aoi). If the aoi is specifed, it is still legal to return an image that's * larger than this aoi. Therefore, by default, the aoi, if specified, is ignored at the * rendering. * * <p>Any hints in the <code>RenderContext</code> will be merged with any set on the node via * <code>setRenderingHints()</code> with the hints in the <code>RenderContext</code> taking * precedence. * * @param renderContext the RenderContext to use to produce the rendering. * @return a RenderedImage containing the rendered data. */ public RenderedImage createRendering(RenderContext renderContext) { findCRIF(); // Clone the original ParameterBlock; if the ParameterBlock // contains RenderableImage sources, they will be replaced by // RenderedImages. ParameterBlock nodePB = nodeSupport.getParameterBlock(); Vector nodeParams = ImageUtil.evaluateParameters(nodePB.getParameters()); ParameterBlock renderedPB = new ParameterBlock((Vector) nodePB.getSources().clone(), nodeParams); Vector sources = getRenderableSources(); try { // This assumes that if there is no renderable source, that there // is a rendered source in the ParameterBlock. // If there are any hints set on the node, create a new // RenderContext which merges them with those in the RenderContext // passed in with the passed in hints taking precedence. RenderContext rcIn = renderContext; RenderingHints nodeHints = nodeSupport.getRenderingHints(); if (nodeHints != null) { RenderingHints hints = renderContext.getRenderingHints(); RenderingHints mergedHints = JAI.mergeRenderingHints(nodeHints, hints); if (mergedHints != hints) { rcIn = new RenderContext( renderContext.getTransform(), renderContext.getAreaOfInterest(), mergedHints); } } if (sources != null) { Vector renderedSources = new Vector(); for (int i = 0; i < sources.size(); i++) { RenderContext rcOut = crif.mapRenderContext(i, rcIn, renderedPB, this); RenderableImage src = (RenderableImage) sources.elementAt(i); RenderedImage renderedImage = src.createRendering(rcOut); if (renderedImage == null) { return null; } // Add this rendered image to the ParameterBlock's // list of RenderedImages. renderedSources.addElement(renderedImage); } if (renderedSources.size() > 0) { renderedPB.setSources(renderedSources); } } RenderedImage rendering = crif.create(rcIn, renderedPB); // Replace with the actual rendering if a RenderedOp. if (rendering instanceof RenderedOp) { rendering = ((RenderedOp) rendering).getRendering(); } // Copy properties to the rendered node. if (rendering != null && rendering instanceof WritablePropertySource) { String[] propertyNames = getPropertyNames(); if (propertyNames != null) { WritablePropertySource wps = (WritablePropertySource) rendering; // Save the names of rendered properties. HashSet wpsNameSet = null; String[] wpsNames = wps.getPropertyNames(); if (wpsNames != null) { wpsNameSet = new HashSet(); for (int j = 0; j < wpsNames.length; j++) { wpsNameSet.add(new CaselessStringKey(wpsNames[j])); } } // Copy any properties not already defined by the rendering. for (int j = 0; j < propertyNames.length; j++) { String name = propertyNames[j]; if (wpsNameSet == null || !wpsNameSet.contains(new CaselessStringKey(name))) { Object value = getProperty(name); if (value != null && value != java.awt.Image.UndefinedProperty) { wps.setProperty(name, value); } } } } } return rendering; } catch (ArrayIndexOutOfBoundsException e) { // This should never happen return null; } }
/** Gets the minimum Y coordinate of the rendering-independent image data. */ public float getMinY() { findCRIF(); ParameterBlock paramBlock = ImageUtil.evaluateParameters(nodeSupport.getParameterBlock()); Rectangle2D boundingBox = crif.getBounds2D(paramBlock); return (float) boundingBox.getY(); }