@Override
  public int print(Graphics g, PageFormat pf, int page) throws PrinterException {
    if (page > 0) {
      return NO_SUCH_PAGE;
    }

    int i = pf.getOrientation();

    // get the size of the page
    double pageWidth = pf.getImageableWidth();
    double pageHeight = pf.getImageableHeight();
    double myWidth = this.getWidth(); // - borderWidth * 2;
    double myHeight = this.getHeight(); // - borderWidth * 2;
    double scaleX = pageWidth / myWidth;
    double scaleY = pageHeight / myHeight;
    double minScale = Math.min(scaleX, scaleY);

    Graphics2D g2d = (Graphics2D) g;
    g2d.translate(pf.getImageableX(), pf.getImageableY());
    g2d.scale(minScale, minScale);

    drawPlot(g);

    return PAGE_EXISTS;
  }
示例#2
0
  /**
   * The printing interface.
   *
   * @param g the graphic context.
   * @param pf the page format.
   * @param page the page number.
   * @return PAGE_EXISTS if the page has to be printed.
   * @throws PrinterException if a printing error occurs.
   */
  public int print(Graphics g, PageFormat pf, int page) throws PrinterException {
    int npages = 0;

    // This might be explained as follows:
    // 1 - The Java printing system normally works with an internal
    // resolution which is 72 dpi (probably inspired by Postscript).
    // 2 - To have a sufficient resolution, this is increased by 16 times,
    // by using the scale method of the graphic object associated to the
    // printer. This gives a 72 dpi * 16=1152 dpi resolution.
    // 3 - The 0.127 mm pitch used in FidoCadJ corresponds to a 200 dpi
    // resolution. Calculating 1152 dpi / 200 dpi gives the 5.76 constant

    double xscale = 1.0 / 16; // Set 1152 logical units for an inch
    double yscale = 1.0 / 16; // as the standard resolution is 72
    double zoom = 5.76; // act in a 1152 dpi resolution as 1:1

    Graphics2D g2d = (Graphics2D) g;

    // User (0,0) is typically outside the imageable area, so we must
    // translate by the X and Y values in the PageFormat to avoid clipping

    if (printMirror) {
      g2d.translate(pf.getImageableX() + pf.getImageableWidth(), pf.getImageableY());
      g2d.scale(-xscale, yscale);

    } else {
      g2d.translate(pf.getImageableX(), pf.getImageableY());
      g2d.scale(xscale, yscale);
    }

    int printerWidth = (int) pf.getImageableWidth() * 16;

    // Perform an adjustement if we need to fit the drawing to the page.
    if (printFitToPage) {
      MapCoordinates zoomm =
          DrawingSize.calculateZoomToFit(
              cc.dmp, (int) pf.getImageableWidth() * 16, (int) pf.getImageableHeight() * 16, false);
      zoom = zoomm.getXMagnitude();
    }

    MapCoordinates m = new MapCoordinates();

    m.setMagnitudes(zoom, zoom);

    PointG o = new PointG(0, 0);

    int imageWidth = DrawingSize.getImageSize(cc.dmp, zoom, false, o).width;
    npages = (int) Math.floor((imageWidth - 1) / (double) printerWidth);

    // Check if we need more than one page
    if (printerWidth < imageWidth) {
      g2d.translate(-(printerWidth * page), 0);
    }

    // Check if printing is finished.
    if (page > npages) {
      return NO_SUCH_PAGE;
    }
    // Now we perform our rendering
    cc.drawingAgent.draw(new Graphics2DSwing(g2d), m);

    /* tell the caller that this page is part of the printed document */
    return PAGE_EXISTS;
  }
  @Override
  public void run() {
    amIActive = true;

    String shapefile = null;
    String inputFieldsString = null;
    String[] fieldNames = null;
    double z;
    int numFields;
    int progress = 0;
    int lastProgress = 0;
    int row;
    int a, i, j;
    double[] fieldAverages;
    double[] fieldTotals;
    boolean standardizedPCA = false;
    int numberOfComponentsOutput = 0;

    if (args.length <= 0) {
      showFeedback("Plugin parameters have not been set.");
      return;
    }

    // read the input parameters

    inputFieldsString = args[0];
    standardizedPCA = Boolean.parseBoolean(args[1]);
    if (args[2].toLowerCase().contains("not")) { // not specified
      numberOfComponentsOutput = 0;
    } else {
      numberOfComponentsOutput = Integer.parseInt(args[2]);
    }

    try {
      // deal with the input fields
      String[] inputs = inputFieldsString.split(";");
      shapefile = inputs[0];
      numFields = inputs.length - 1;
      fieldNames = new String[numFields];
      System.arraycopy(inputs, 1, fieldNames, 0, numFields);

      // read the appropriate field from the dbf file into an array
      AttributeTable table = new AttributeTable(shapefile.replace(".shp", ".dbf"));
      int numRecs = table.getNumberOfRecords();
      DBFField[] fields = table.getAllFields();
      ArrayList<Integer> PCAFields = new ArrayList<>();
      for (j = 0; j < fieldNames.length; j++) {
        for (i = 0; i < fields.length; i++) {
          if (fields[i].getName().equals(fieldNames[j])
              && (fields[i].getDataType() == DBFField.DBFDataType.NUMERIC
                  || fields[i].getDataType() == DBFField.DBFDataType.FLOAT)) {
            PCAFields.add(i);
          }
        }
      }

      if (numFields != PCAFields.size()) {
        showFeedback(
            "Not all of the specified database fields were found in the file or "
                + "a field of a non-numerical type was selected.");
        return;
      }

      double[][] fieldArray = new double[numRecs][numFields];
      Object[] rec;
      for (i = 0; i < numRecs; i++) {
        rec = table.getRecord(i);
        for (j = 0; j < numFields; j++) {
          fieldArray[i][j] = (Double) (rec[PCAFields.get(j)]);
        }
        if (cancelOp) {
          cancelOperation();
          return;
        }
        progress = (int) (100f * i / (numRecs - 1));
        if (progress != lastProgress) {
          updateProgress("Reading data:", progress);
        }
        lastProgress = progress;
      }

      fieldAverages = new double[numFields];
      fieldTotals = new double[numFields];

      // Calculate the means
      for (row = 0; row < numRecs; row++) {
        for (i = 0; i < numFields; i++) {
          fieldTotals[i] += fieldArray[row][i];
        }
      }

      for (i = 0; i < numFields; i++) {
        fieldAverages[i] = fieldTotals[i] / numRecs;
      }

      // Calculate the covariance matrix and total deviations
      double[] fieldTotalDeviation = new double[numFields];
      double[][] covariances = new double[numFields][numFields];
      double[][] correlationMatrix = new double[numFields][numFields];

      for (row = 0; row < numRecs; row++) {
        for (i = 0; i < numFields; i++) {
          fieldTotalDeviation[i] +=
              (fieldArray[row][i] - fieldAverages[i]) * (fieldArray[row][i] - fieldAverages[i]);
          for (a = 0; a < numFields; a++) {
            covariances[i][a] +=
                (fieldArray[row][i] - fieldAverages[i]) * (fieldArray[row][a] - fieldAverages[a]);
          }
        }
        if (cancelOp) {
          cancelOperation();
          return;
        }
        progress = (int) (100f * row / (numRecs - 1));
        if (progress != lastProgress) {
          updateProgress("Calculating covariances:", progress);
        }
        lastProgress = progress;
      }

      for (i = 0; i < numFields; i++) {
        for (a = 0; a < numFields; a++) {
          correlationMatrix[i][a] =
              covariances[i][a] / (Math.sqrt(fieldTotalDeviation[i] * fieldTotalDeviation[a]));
        }
      }

      for (i = 0; i < numFields; i++) {
        for (a = 0; a < numFields; a++) {
          covariances[i][a] = covariances[i][a] / (numRecs - 1);
        }
      }

      // Calculate the eigenvalues and eigenvectors
      Matrix cov = null;
      if (!standardizedPCA) {
        cov = new Matrix(covariances);
      } else {
        cov = new Matrix(correlationMatrix);
      }
      EigenvalueDecomposition eigen = cov.eig();
      double[] eigenvalues;
      Matrix eigenvectors;
      SortedSet<PrincipalComponent> principalComponents;
      eigenvalues = eigen.getRealEigenvalues();
      eigenvectors = eigen.getV();

      double[][] vecs = eigenvectors.getArray();
      int numComponents = eigenvectors.getColumnDimension(); // same as num rows.
      principalComponents = new TreeSet<PrincipalComponent>();
      for (i = 0; i < numComponents; i++) {
        double[] eigenvector = new double[numComponents];
        for (j = 0; j < numComponents; j++) {
          eigenvector[j] = vecs[j][i];
        }
        principalComponents.add(new PrincipalComponent(eigenvalues[i], eigenvector));
      }

      double totalEigenvalue = 0;
      for (i = 0; i < numComponents; i++) {
        totalEigenvalue += eigenvalues[i];
      }

      double[][] explainedVarianceArray = new double[numComponents][2]; // percent and cum. percent
      j = 0;
      for (PrincipalComponent pc : principalComponents) {
        explainedVarianceArray[j][0] = pc.eigenValue / totalEigenvalue * 100.0;
        if (j == 0) {
          explainedVarianceArray[j][1] = explainedVarianceArray[j][0];
        } else {
          explainedVarianceArray[j][1] =
              explainedVarianceArray[j][0] + explainedVarianceArray[j - 1][1];
        }
        j++;
      }

      DecimalFormat df1 = new DecimalFormat("0.00");
      DecimalFormat df2 = new DecimalFormat("0.0000");
      DecimalFormat df3 = new DecimalFormat("0.000000");
      DecimalFormat df4 = new DecimalFormat("0.000");
      String ret = "Principal Component Analysis Report:\n\n";
      ret += "Component\tExplained Var.\tCum. %\tEigenvalue\tEigenvector\n";
      j = 0;
      for (PrincipalComponent pc : principalComponents) {

        String explainedVariance = df1.format(explainedVarianceArray[j][0]);
        String explainedCumVariance = df1.format(explainedVarianceArray[j][1]);
        double[] eigenvector = pc.eigenVector.clone();
        ret +=
            (j + 1)
                + "\t"
                + explainedVariance
                + "\t"
                + explainedCumVariance
                + "\t"
                + df2.format(pc.eigenValue)
                + "\t";
        String eigenvec = "[";
        for (i = 0; i < numComponents; i++) {
          if (i < numComponents - 1) {
            eigenvec += df3.format(eigenvector[i]) + ", ";
          } else {
            eigenvec += df3.format(eigenvector[i]);
          }
        }
        eigenvec += "]";
        ret += eigenvec + "\n";

        if (j < numberOfComponentsOutput) {
          DBFField field = new DBFField();
          field = new DBFField();
          field.setName("COMP" + (j + 1));
          field.setDataType(DBFField.DBFDataType.NUMERIC);
          field.setFieldLength(10);
          field.setDecimalCount(4);
          table.addField(field);

          for (row = 0; row < numRecs; row++) {
            z = 0;
            for (i = 0; i < numFields; i++) {
              z += fieldArray[row][i] * eigenvector[i];
            }

            Object[] recData = table.getRecord(row);
            recData[recData.length - 1] = new Double(z);
            table.updateRecord(row, recData);

            if (cancelOp) {
              cancelOperation();
              return;
            }
            progress = (int) (100f * row / (numRecs - 1));
            if (progress != lastProgress) {
              updateProgress("Outputing Component " + (j + 1) + ":", progress);
            }
            lastProgress = progress;
          }
        }
        j++;
      }

      // calculate the factor loadings.
      ret += "\nFactor Loadings:\n";
      ret += "\t\tComponent\n\t";
      for (i = 0; i < numComponents; i++) {
        ret += (i + 1) + "\t";
      }
      ret += "\n";
      double loading = 0;
      if (!standardizedPCA) {
        for (i = 0; i < numFields; i++) {
          ret += "field " + (i + 1) + "\t";
          for (PrincipalComponent pc : principalComponents) {
            double[] eigenvector = pc.eigenVector.clone();
            double ev = pc.eigenValue;
            loading = (eigenvector[i] * Math.sqrt(ev)) / Math.sqrt(covariances[i][i]);
            ret += df4.format(loading) + "\t";
          }
          ret += "\n";
        }
      } else {
        for (i = 0; i < numFields; i++) {
          ret += "field " + (i + 1) + "\t";
          for (PrincipalComponent pc : principalComponents) {
            double[] eigenvector = pc.eigenVector.clone();
            double ev = pc.eigenValue;
            loading = (eigenvector[i] * Math.sqrt(ev));
            ret += df4.format(loading) + "\t";
          }
          ret += "\n";
        }
      }

      ret += "\n";
      for (i = 0; i < numFields; i++) {
        ret += "field " + (i + 1) + "\t" + fieldNames[i] + "\n";
      }

      returnData(ret);

      if (numberOfComponentsOutput > 0) {
        returnData(table.getFileName());
      }

      ScreePlot plot = new ScreePlot(explainedVarianceArray);
      returnData(plot);

    } catch (OutOfMemoryError oe) {
      myHost.showFeedback("An out-of-memory error has occurred during operation.");
    } catch (Exception e) {
      myHost.showFeedback("An error has occurred during operation. See log file for details.");
      myHost.logException("Error in " + getDescriptiveName(), e);
    } finally {
      updateProgress("Progress: ", 0);
      // tells the main application that this process is completed.
      amIActive = false;
      myHost.pluginComplete();
    }
  }