/** * @param ls * @param at * @param generalize * @param maxDistance */ public void init(LineString ls, AffineTransform at, boolean generalize, float maxDistance) { _init(ls, at, generalize, maxDistance); xScale = (float) Math.sqrt((at.getScaleX() * at.getScaleX()) + (at.getShearX() * at.getShearX())); yScale = (float) Math.sqrt((at.getScaleY() * at.getScaleY()) + (at.getShearY() * at.getShearY())); }
public void setTextTransform(AffineTransform affineTransform) { // look to see if we have shear and thus text that has been rotated, if so we insert a page // break if (previousTextTransform != null && currentLine != null) { // hard round as we're just looking for a 90 degree shift in writing direction. // if found we clear the current work so we can start a new word. if ((previousTextTransform.getShearX() < 0 && (int) affineTransform.getShearX() > 0) || (previousTextTransform.getShearX() > 0 && (int) affineTransform.getShearX() < 0) || (previousTextTransform.getShearY() < 0 && (int) affineTransform.getShearY() > 0) || (previousTextTransform.getShearY() > 0 && (int) affineTransform.getShearY() < 0)) { currentLine.clearCurrentWord(); } } previousTextTransform = affineTransform; }
/** * Cuando soltamos el botón del ratón desplazamos la imagen a la posición de destino calculando el * extent nuevamente. */ public void mouseReleased(MouseEvent e) throws BehaviorException { if (!isActiveTool()) return; if (e.getButton() == MouseEvent.BUTTON1 && isMoveable) { FLyrRasterSE lyr = grBehavior.getLayer(); if (lyr == null) return; ViewPort vp = grBehavior.getMapControl().getMapContext().getViewPort(); ptoFin = vp.toMapPoint(e.getPoint()); // Asignamos la nueva matriz de transformación a la capa AffineTransform atOld = lyr.getAffineTransform(); AffineTransform atNew = null; double distX = ptoFin.getX() - ptoIni.getX(); double distY = ptoFin.getY() - ptoIni.getY(); // La nueva matriz de transformación es la vieja más la distancia desplazada atNew = new AffineTransform( atOld.getScaleX(), atOld.getShearY(), atOld.getShearX(), atOld.getScaleY(), atOld.getTranslateX() + distX, atOld.getTranslateY() + distY); lyr.setAffineTransform(atNew); grBehavior.getMapControl().getMapContext().invalidate(); isMoveable = false; super.mouseReleased(e); } }
/** * Renders an image on the device * * @param tx the image location on the screen, x coordinate * @param ty the image location on the screen, y coordinate * @param img the image * @param rotation the image rotatation */ private static void renderImage( Graphics2D graphics, double x, double y, Image image, double rotation, float opacity) { AffineTransform temp = graphics.getTransform(); AffineTransform markAT = new AffineTransform(); Point2D mapCentre = new java.awt.geom.Point2D.Double(x, y); Point2D graphicCentre = new java.awt.geom.Point2D.Double(); temp.transform(mapCentre, graphicCentre); markAT.translate(graphicCentre.getX(), graphicCentre.getY()); double shearY = temp.getShearY(); double scaleY = temp.getScaleY(); double originalRotation = Math.atan(shearY / scaleY); markAT.rotate(rotation); graphics.setTransform(markAT); graphics.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, opacity)); // we moved the origin to the centre of the image. graphics.drawImage(image, -image.getWidth(null) / 2, -image.getHeight(null) / 2, null); graphics.setTransform(temp); return; }
TexturePaintContext(ColorModel cm, AffineTransform xform, int bWidth, int bHeight, int maxw) { this.colorModel = getInternedColorModel(cm); this.bWidth = bWidth; this.bHeight = bHeight; this.maxWidth = maxw; try { xform = xform.createInverse(); } catch (NoninvertibleTransformException e) { xform.setToScale(0, 0); } this.incXAcross = mod(xform.getScaleX(), bWidth); this.incYAcross = mod(xform.getShearY(), bHeight); this.incXDown = mod(xform.getShearX(), bWidth); this.incYDown = mod(xform.getScaleY(), bHeight); this.xOrg = xform.getTranslateX(); this.yOrg = xform.getTranslateY(); this.colincx = (int) incXAcross; this.colincy = (int) incYAcross; this.colincxerr = fractAsInt(incXAcross); this.colincyerr = fractAsInt(incYAcross); this.rowincx = (int) incXDown; this.rowincy = (int) incYDown; this.rowincxerr = fractAsInt(incXDown); this.rowincyerr = fractAsInt(incYDown); }
/** Asigna las coordenadas temporales en el dialogo. */ public void assignTransformToDialog() { AffineTransform atNew = null; double distX = ptoFin.getX() - ptoIni.getX(); double distY = ptoFin.getY() - ptoIni.getY(); AffineTransform atOld = grBehavior.getLayer().getAffineTransform(); // La nueva matriz de transformación es la vieja más la distancia desplazada atNew = new AffineTransform( atOld.getScaleX(), atOld.getShearY(), atOld.getShearX(), atOld.getScaleY(), atOld.getTranslateX() + distX, atOld.getTranslateY() + distY); trIO.loadTransform(atNew); }
public void mouseDragged(MouseEvent e) { if (e.getModifiers() == modifiers) { Point2D pEndScale = e.getPoint(); double scale = pEndScale.distance(viewOrigin) / pStartScale.distance(viewOrigin); AffineTransform transform = super.plot.getTransform(); transform.setTransform( transform.getScaleX() * scale, transform.getShearY(), transform.getShearX(), transform.getScaleY() * scale, transform.getTranslateX() * scale, transform.getTranslateY() * scale); pStartScale = pEndScale; super.plot.setTransform(transform); } }
void setModelTransformation(final AffineTransform gridToCRS) { // See pag 28 of the spec for an explanation final double[] modelTransformation = new double[16]; modelTransformation[0] = gridToCRS.getScaleX(); modelTransformation[1] = gridToCRS.getShearX(); modelTransformation[2] = 0; modelTransformation[3] = gridToCRS.getTranslateX(); modelTransformation[4] = gridToCRS.getShearY(); modelTransformation[5] = gridToCRS.getScaleY(); modelTransformation[6] = 0; modelTransformation[7] = gridToCRS.getTranslateY(); modelTransformation[8] = 0; modelTransformation[9] = 0; modelTransformation[10] = 0; modelTransformation[11] = 0; modelTransformation[12] = 0; modelTransformation[13] = 0; modelTransformation[14] = 0; modelTransformation[15] = 1; nTransform = createTiffField(getModelTransformationTag()); final Node nValues = createTiffDoubles(modelTransformation); nTransform.appendChild(nValues); }
/** * Ipp filter. * * @param src the src. * @param dst the dst. * @param imageType the image type. * @return the int. */ @SuppressWarnings("unused") private int ippFilter(Raster src, WritableRaster dst, int imageType) { int srcStride, dstStride; boolean skipChannel = false; int channels; int offsets[] = null; switch (imageType) { case BufferedImage.TYPE_INT_RGB: case BufferedImage.TYPE_INT_BGR: { channels = 4; srcStride = src.getWidth() * 4; dstStride = dst.getWidth() * 4; skipChannel = true; break; } case BufferedImage.TYPE_INT_ARGB: case BufferedImage.TYPE_INT_ARGB_PRE: case BufferedImage.TYPE_4BYTE_ABGR: case BufferedImage.TYPE_4BYTE_ABGR_PRE: { channels = 4; srcStride = src.getWidth() * 4; dstStride = dst.getWidth() * 4; break; } case BufferedImage.TYPE_BYTE_GRAY: case BufferedImage.TYPE_BYTE_INDEXED: { channels = 1; srcStride = src.getWidth(); dstStride = dst.getWidth(); break; } case BufferedImage.TYPE_3BYTE_BGR: { channels = 3; srcStride = src.getWidth() * 3; dstStride = dst.getWidth() * 3; break; } case BufferedImage.TYPE_USHORT_GRAY: // TODO - could be done in // native code? case BufferedImage.TYPE_USHORT_565_RGB: case BufferedImage.TYPE_USHORT_555_RGB: case BufferedImage.TYPE_BYTE_BINARY: { return slowFilter(src, dst); } default: { SampleModel srcSM = src.getSampleModel(); SampleModel dstSM = dst.getSampleModel(); if (srcSM instanceof PixelInterleavedSampleModel && dstSM instanceof PixelInterleavedSampleModel) { // Check PixelInterleavedSampleModel if (srcSM.getDataType() != DataBuffer.TYPE_BYTE || dstSM.getDataType() != DataBuffer.TYPE_BYTE) { return slowFilter(src, dst); } channels = srcSM.getNumBands(); // Have IPP functions for 1, // 3 and 4 channels if (channels != 1 && channels != 3 && channels != 4) { return slowFilter(src, dst); } int dataTypeSize = DataBuffer.getDataTypeSize(srcSM.getDataType()) / 8; srcStride = ((ComponentSampleModel) srcSM).getScanlineStride() * dataTypeSize; dstStride = ((ComponentSampleModel) dstSM).getScanlineStride() * dataTypeSize; } else if (srcSM instanceof SinglePixelPackedSampleModel && dstSM instanceof SinglePixelPackedSampleModel) { // Check SinglePixelPackedSampleModel SinglePixelPackedSampleModel sppsm1 = (SinglePixelPackedSampleModel) srcSM; SinglePixelPackedSampleModel sppsm2 = (SinglePixelPackedSampleModel) dstSM; // No IPP function for this type if (sppsm1.getDataType() == DataBuffer.TYPE_USHORT) { return slowFilter(src, dst); } channels = sppsm1.getNumBands(); // Have IPP functions for 1, 3 and 4 channels if (channels != 1 && channels != 3 && channels != 4) { return slowFilter(src, dst); } // Check compatibility of sample models if (sppsm1.getDataType() != sppsm2.getDataType() || !Arrays.equals(sppsm1.getBitOffsets(), sppsm2.getBitOffsets()) || !Arrays.equals(sppsm1.getBitMasks(), sppsm2.getBitMasks())) { return slowFilter(src, dst); } for (int i = 0; i < channels; i++) { if (sppsm1.getSampleSize(i) != 8) { return slowFilter(src, dst); } } if (channels == 3) { channels = 4; } int dataTypeSize = DataBuffer.getDataTypeSize(sppsm1.getDataType()) / 8; srcStride = sppsm1.getScanlineStride() * dataTypeSize; dstStride = sppsm2.getScanlineStride() * dataTypeSize; } else { return slowFilter(src, dst); } // Fill offsets if there's a child raster if (src.getParent() != null || dst.getParent() != null) { if (src.getSampleModelTranslateX() != 0 || src.getSampleModelTranslateY() != 0 || dst.getSampleModelTranslateX() != 0 || dst.getSampleModelTranslateY() != 0) { offsets = new int[4]; offsets[0] = -src.getSampleModelTranslateX() + src.getMinX(); offsets[1] = -src.getSampleModelTranslateY() + src.getMinY(); offsets[2] = -dst.getSampleModelTranslateX() + dst.getMinX(); offsets[3] = -dst.getSampleModelTranslateY() + dst.getMinY(); } } } } double m00 = at.getScaleX(); double m01 = at.getShearX(); double m02 = at.getTranslateX(); double m10 = at.getShearY(); double m11 = at.getScaleY(); double m12 = at.getTranslateY(); Object srcData, dstData; AwtImageBackdoorAccessor dbAccess = AwtImageBackdoorAccessor.getInstance(); try { srcData = dbAccess.getData(src.getDataBuffer()); dstData = dbAccess.getData(dst.getDataBuffer()); } catch (IllegalArgumentException e) { return -1; // Unknown data buffer type } return ippAffineTransform( m00, m01, m02, m10, m11, m12, srcData, src.getWidth(), src.getHeight(), srcStride, dstData, dst.getWidth(), dst.getHeight(), dstStride, iType, channels, skipChannel, offsets); }
/** * Executes the raster to vector process. * * @param coverage the input grid coverage * @param band the coverage band to process; defaults to 0 if {@code null} * @param insideEdges whether boundaries between raster regions with data values (ie. not NODATA) * should be returned; defaults to {@code true} if {@code null} * @param roi optional polygonal {@code Geometry} to define a sub-area within which vectorizing * will be done * @param noDataValues optional list of values to treat as NODATA; regions with these values will * not be represented in the returned features; if {@code null}, 0 is used as the single * NODATA value; ignored if {@code classificationRanges} is provided * @param classificationRanges optional list of {@code Range} objects to pre-classify the input * coverage prior to vectorizing; values not included in the list will be treated as NODATA; * values in the first {@code Range} are classified to 1, those in the second {@code Range} to * 2 etc. * @param progressListener an optional listener * @return a feature collection where each feature has a {@code Polygon} ("the_geom") and an * attribute "value" with value of the corresponding region in either {@code coverage} or the * classified coverage (when {@code classificationRanges} is used) * @throws ProcessException */ @DescribeResult(name = "result", description = "The polygon feature collection") public SimpleFeatureCollection execute( @DescribeParameter(name = "data", description = "The raster to be used as the source") GridCoverage2D coverage, @DescribeParameter( name = "band", description = "(Integer, default=0) the source image band to process", min = 0) Integer band, @DescribeParameter( name = "insideEdges", description = "(Boolean, default=true) whether to vectorize boundaries between adjacent regions with non-outside values", min = 0) Boolean insideEdges, @DescribeParameter( name = "roi", description = "The geometry used to delineate the area of interest in model space", min = 0) Geometry roi, @DescribeParameter( name = "nodata", description = "Collection<Number>, default={0}) values to treat as NODATA", collectionType = Number.class, min = 0) Collection<Number> noDataValues, @DescribeParameter( name = "ranges", description = "The list of ranges to be applied. \n" + "Each range is expressed as 'OPEN START ; END CLOSE'\n" + "where 'OPEN:=(|[, CLOSE=)|]',\n " + "START is the low value, or nothing to imply -INF,\n" + "CLOSE is the biggest value, or nothing to imply +INF", collectionType = Range.class, min = 0) List<Range> classificationRanges, ProgressListener progressListener) throws ProcessException { // // initial checks // if (coverage == null) { throw new ProcessException("Invalid input, source grid coverage should be not null"); } if (band == null) { band = 0; } else if (band < 0 || band >= coverage.getNumSampleDimensions()) { throw new ProcessException("Invalid input, invalid band number:" + band); } // do we have classification ranges? boolean hasClassificationRanges = classificationRanges != null && classificationRanges.size() > 0; // apply the classification by setting 0 as the default value and using 1, ..., numClasses for // the other classes. // we use 0 also as the noData for the resulting coverage. if (hasClassificationRanges) { final RangeLookupProcess lookup = new RangeLookupProcess(); coverage = lookup.execute(coverage, band, classificationRanges, progressListener); } // Use noDataValues to set the "outsideValues" parameter of the Vectorize // operation unless classificationRanges are in use, in which case the // noDataValues arg is ignored. List<Number> outsideValues = new ArrayList<Number>(); if (noDataValues != null && !hasClassificationRanges) { outsideValues.addAll(noDataValues); } else { outsideValues.add(0); } // // GRID TO WORLD preparation // final AffineTransform mt2D = (AffineTransform) coverage.getGridGeometry().getGridToCRS2D(PixelOrientation.UPPER_LEFT); // get the rendered image final RenderedImage raster = coverage.getRenderedImage(); // perform jai operation ParameterBlockJAI pb = new ParameterBlockJAI("Vectorize"); pb.setSource("source0", raster); if (roi != null) { pb.setParameter("roi", CoverageUtilities.prepareROI(roi, mt2D)); } pb.setParameter("band", band); pb.setParameter("outsideValues", outsideValues); if (insideEdges != null) { pb.setParameter("insideEdges", insideEdges); } // pb.setParameter("removeCollinear", false); final RenderedOp dest = JAI.create("Vectorize", pb); @SuppressWarnings("unchecked") final Collection<Polygon> prop = (Collection<Polygon>) dest.getProperty(VectorizeDescriptor.VECTOR_PROPERTY_NAME); // wrap as a feature collection and return final SimpleFeatureType featureType = CoverageUtilities.createFeatureType(coverage, Polygon.class); final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(featureType); int i = 0; final ListFeatureCollection featureCollection = new ListFeatureCollection(featureType); final AffineTransformation jtsTransformation = new AffineTransformation( mt2D.getScaleX(), mt2D.getShearX(), mt2D.getTranslateX(), mt2D.getShearY(), mt2D.getScaleY(), mt2D.getTranslateY()); for (Polygon polygon : prop) { // get value Double value = (Double) polygon.getUserData(); polygon.setUserData(null); // filter coordinates in place polygon.apply(jtsTransformation); // create feature and add to list builder.set("the_geom", polygon); builder.set("value", value); featureCollection.add(builder.buildFeature(String.valueOf(i++))); } // return value return featureCollection; }
@DescribeResult(name = "result", description = "The contours feature collection") public SimpleFeatureCollection execute( @DescribeParameter(name = "data", description = "The raster to be used as the source") GridCoverage2D gc2d, @DescribeParameter( name = "band", description = "The source image band to process", min = 0, max = 1) Integer band, @DescribeParameter(name = "levels", description = "Values for which to generate contours") double[] levels, @DescribeParameter( name = "interval", description = "Interval between contour values (ignored if levels arg is supplied)", min = 0) Double interval, @DescribeParameter( name = "simplify", description = "Values for which to generate contours", min = 0) Boolean simplify, @DescribeParameter( name = "smooth", description = "Values for which to generate contours", min = 0) Boolean smooth, @DescribeParameter( name = "roi", description = "The geometry used to delineate the area of interest in model space", min = 0) Geometry roi, ProgressListener progressListener) throws ProcessException { // // initial checks // if (gc2d == null) { throw new ProcessException("Invalid input, source grid coverage should be not null"); } if (band != null && (band < 0 || band >= gc2d.getNumSampleDimensions())) { throw new ProcessException("Invalid input, invalid band number:" + band); } boolean hasValues = !(levels == null || levels.length == 0); if (!hasValues && interval == null) { throw new ProcessException("One between interval and values must be valid"); } // switch to geophisics if necessary gc2d = gc2d.view(ViewType.GEOPHYSICS); // // GRID TO WORLD preparation // final AffineTransform mt2D = (AffineTransform) gc2d.getGridGeometry().getGridToCRS2D(PixelOrientation.CENTER); // get the list of nodata, if any List<Object> noDataList = new ArrayList<Object>(); for (GridSampleDimension sd : gc2d.getSampleDimensions()) { // grab all the explicit nodata final double[] sdNoData = sd.getNoDataValues(); if (sdNoData != null) { for (double nodata : sdNoData) { noDataList.add(nodata); } } // handle also readers setting up nodata in a category with a specific name if (sd.getCategories() != null) { for (Category cat : sd.getCategories()) { if (cat.getName().equals(NO_DATA)) { final NumberRange<? extends Number> catRange = cat.getRange(); if (catRange.getMinimum() == catRange.getMaximum()) { noDataList.add(catRange.getMinimum()); } else { Range<Double> noData = new Range<Double>( catRange.getMinimum(), catRange.isMinIncluded(), catRange.getMaximum(), catRange.isMaxIncluded()); noDataList.add(noData); } } } } } // get the rendered image final RenderedImage raster = gc2d.getRenderedImage(); // perform jai operation ParameterBlockJAI pb = new ParameterBlockJAI("Contour"); pb.setSource("source0", raster); if (roi != null) { pb.setParameter("roi", CoverageUtilities.prepareROI(roi, mt2D)); } if (band != null) { pb.setParameter("band", band); } if (interval != null) { pb.setParameter("interval", interval); } else { final ArrayList<Double> elements = new ArrayList<Double>(levels.length); for (double level : levels) elements.add(level); pb.setParameter("levels", elements); } if (simplify != null) { pb.setParameter("simplify", simplify); } if (smooth != null) { pb.setParameter("smooth", smooth); } if (noDataList != null) { pb.setParameter("nodata", noDataList); } final RenderedOp dest = JAI.create("Contour", pb); @SuppressWarnings("unchecked") final Collection<LineString> prop = (Collection<LineString>) dest.getProperty(ContourDescriptor.CONTOUR_PROPERTY_NAME); // wrap as a feature collection and return final SimpleFeatureType schema = CoverageUtilities.createFeatureType(gc2d, LineString.class); final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(schema); int i = 0; final ListFeatureCollection featureCollection = new ListFeatureCollection(schema); final AffineTransformation jtsTransformation = new AffineTransformation( mt2D.getScaleX(), mt2D.getShearX(), mt2D.getTranslateX(), mt2D.getShearY(), mt2D.getScaleY(), mt2D.getTranslateY()); for (LineString line : prop) { // get value Double value = (Double) line.getUserData(); line.setUserData(null); // filter coordinates in place line.apply(jtsTransformation); // create feature and add to list builder.set("the_geom", line); builder.set("value", value); featureCollection.add(builder.buildFeature(String.valueOf(i++))); } // return value return featureCollection; }
/** * This method is responsible fro creating a world file to georeference an image given the image * bounding box and the image geometry. The name of the file is composed by the name of the image * file with a proper extension, depending on the format (see WorldImageFormat). The projection is * in the world file. * * @param imageFile * @param baseFile Basename and path for this image. * @param ext * @throws IOException In case we cannot create the world file. * @throws TransformException */ private void createWorldFile( final AffineTransform transform, final String ext, final String baseFile) throws IOException { // ///////////////////////////////////////////////////////////////////// // // CRS information // // //////////////////////////////////////////////////////////////////// // final AffineTransform gridToWorld = (AffineTransform) // gc.getGridGeometry ().getGridToCRS (); final boolean lonFirst = (XAffineTransform.getSwapXY(transform) != -1); // ///////////////////////////////////////////////////////////////////// // // World File values // It is worthwhile to note that we have to keep into account the fact // that the axis could be swapped (LAT,lon) therefore when getting // xPixSize and yPixSize we need to look for it a the right place // inside the grid to world transform. // // //////////////////////////////////////////////////////////////////// final double xPixelSize = (lonFirst) ? transform.getScaleX() : transform.getShearY(); final double rotation1 = (lonFirst) ? transform.getShearX() : transform.getScaleX(); final double rotation2 = (lonFirst) ? transform.getShearY() : transform.getScaleY(); final double yPixelSize = (lonFirst) ? transform.getScaleY() : transform.getShearX(); final double xLoc = transform.getTranslateX(); final double yLoc = transform.getTranslateY(); // ///////////////////////////////////////////////////////////////////// // // writing world file // // //////////////////////////////////////////////////////////////////// final StringBuffer buff = new StringBuffer(baseFile); // looking for another extension if (ext.substring(0, 4).equalsIgnoreCase(".tif")) { buff.append(".tfw"); } else if (ext.substring(0, 4).equalsIgnoreCase(".png")) { buff.append(".pgw"); } else if (ext.substring(0, 4).equalsIgnoreCase(".jpg") || ext.substring(0, 4).equalsIgnoreCase("jpeg")) { buff.append(".jpw"); } else if (ext.substring(0, 4).equalsIgnoreCase(".gif")) { buff.append(".gfw"); } else if (ext.substring(0, 4).equalsIgnoreCase(".bmp")) { buff.append(".bpw"); } else { buff.append(".tffw"); } final File worldFile = new File(buff.toString()); LOG.debug("Writing world file: " + worldFile); final PrintWriter out = new PrintWriter(new FileOutputStream(worldFile)); try { out.println(xPixelSize); out.println(rotation1); out.println(rotation2); out.println(yPixelSize); out.println(xLoc); out.println(yLoc); out.flush(); } finally { out.close(); } }