Esempio n. 1
0
 private void chkFPS() {
   if (fpsCount == 0) {
     fpsTime = System.currentTimeMillis() / 1000;
     fpsCount++;
     return;
   }
   fpsCount++;
   long time = System.currentTimeMillis() / 1000;
   if (time != fpsTime) {
     lastFPS = fpsCount;
     fpsCount = 1;
     fpsTime = time;
   }
 }
 public void actionPerformed(ActionEvent e) {
   int selection =
       JOptionPane.showConfirmDialog(
           OpenMapCFrame.this, "Would you like to exit this application?");
   if (selection == JOptionPane.OK_OPTION) {
     System.exit(0);
   }
 }
Esempio n. 3
0
  // Initialize all the data
  private void initializeData() throws FileNotFoundException {
    nClusters = Integer.parseInt(cookies.getInstance().getString("nClusters"));
    map = pastCrime.burglaryCases("burglarywithprecinct.txt");
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat sample = new Mat(map.size(), 2, CvType.CV_64F);
    int count = 0;

    // Put crimes into the Matrix for trainning
    for (crimeCase c : map.values()) {
      sample.put(count, 0, c.getLa());
      sample.put(count++, 1, c.getlo());
    }
    myModel = new gausianMixture(sample, nClusters, 1000, "burgulary");
    precincts = kmlParser.getPrecinct();
    rpaAggregate = new RpaAggregate(kmlParser.getRPAs());
  }
  @Override
  public void run() {
    amIActive = true;

    String shapefile = null;
    String inputFieldsString = null;
    String[] fieldNames = null;
    double z;
    int numFields;
    int progress = 0;
    int lastProgress = 0;
    int row;
    int a, i, j;
    double[] fieldAverages;
    double[] fieldTotals;
    boolean standardizedPCA = false;
    int numberOfComponentsOutput = 0;

    if (args.length <= 0) {
      showFeedback("Plugin parameters have not been set.");
      return;
    }

    // read the input parameters

    inputFieldsString = args[0];
    standardizedPCA = Boolean.parseBoolean(args[1]);
    if (args[2].toLowerCase().contains("not")) { // not specified
      numberOfComponentsOutput = 0;
    } else {
      numberOfComponentsOutput = Integer.parseInt(args[2]);
    }

    try {
      // deal with the input fields
      String[] inputs = inputFieldsString.split(";");
      shapefile = inputs[0];
      numFields = inputs.length - 1;
      fieldNames = new String[numFields];
      System.arraycopy(inputs, 1, fieldNames, 0, numFields);

      // read the appropriate field from the dbf file into an array
      AttributeTable table = new AttributeTable(shapefile.replace(".shp", ".dbf"));
      int numRecs = table.getNumberOfRecords();
      DBFField[] fields = table.getAllFields();
      ArrayList<Integer> PCAFields = new ArrayList<>();
      for (j = 0; j < fieldNames.length; j++) {
        for (i = 0; i < fields.length; i++) {
          if (fields[i].getName().equals(fieldNames[j])
              && (fields[i].getDataType() == DBFField.DBFDataType.NUMERIC
                  || fields[i].getDataType() == DBFField.DBFDataType.FLOAT)) {
            PCAFields.add(i);
          }
        }
      }

      if (numFields != PCAFields.size()) {
        showFeedback(
            "Not all of the specified database fields were found in the file or "
                + "a field of a non-numerical type was selected.");
        return;
      }

      double[][] fieldArray = new double[numRecs][numFields];
      Object[] rec;
      for (i = 0; i < numRecs; i++) {
        rec = table.getRecord(i);
        for (j = 0; j < numFields; j++) {
          fieldArray[i][j] = (Double) (rec[PCAFields.get(j)]);
        }
        if (cancelOp) {
          cancelOperation();
          return;
        }
        progress = (int) (100f * i / (numRecs - 1));
        if (progress != lastProgress) {
          updateProgress("Reading data:", progress);
        }
        lastProgress = progress;
      }

      fieldAverages = new double[numFields];
      fieldTotals = new double[numFields];

      // Calculate the means
      for (row = 0; row < numRecs; row++) {
        for (i = 0; i < numFields; i++) {
          fieldTotals[i] += fieldArray[row][i];
        }
      }

      for (i = 0; i < numFields; i++) {
        fieldAverages[i] = fieldTotals[i] / numRecs;
      }

      // Calculate the covariance matrix and total deviations
      double[] fieldTotalDeviation = new double[numFields];
      double[][] covariances = new double[numFields][numFields];
      double[][] correlationMatrix = new double[numFields][numFields];

      for (row = 0; row < numRecs; row++) {
        for (i = 0; i < numFields; i++) {
          fieldTotalDeviation[i] +=
              (fieldArray[row][i] - fieldAverages[i]) * (fieldArray[row][i] - fieldAverages[i]);
          for (a = 0; a < numFields; a++) {
            covariances[i][a] +=
                (fieldArray[row][i] - fieldAverages[i]) * (fieldArray[row][a] - fieldAverages[a]);
          }
        }
        if (cancelOp) {
          cancelOperation();
          return;
        }
        progress = (int) (100f * row / (numRecs - 1));
        if (progress != lastProgress) {
          updateProgress("Calculating covariances:", progress);
        }
        lastProgress = progress;
      }

      for (i = 0; i < numFields; i++) {
        for (a = 0; a < numFields; a++) {
          correlationMatrix[i][a] =
              covariances[i][a] / (Math.sqrt(fieldTotalDeviation[i] * fieldTotalDeviation[a]));
        }
      }

      for (i = 0; i < numFields; i++) {
        for (a = 0; a < numFields; a++) {
          covariances[i][a] = covariances[i][a] / (numRecs - 1);
        }
      }

      // Calculate the eigenvalues and eigenvectors
      Matrix cov = null;
      if (!standardizedPCA) {
        cov = new Matrix(covariances);
      } else {
        cov = new Matrix(correlationMatrix);
      }
      EigenvalueDecomposition eigen = cov.eig();
      double[] eigenvalues;
      Matrix eigenvectors;
      SortedSet<PrincipalComponent> principalComponents;
      eigenvalues = eigen.getRealEigenvalues();
      eigenvectors = eigen.getV();

      double[][] vecs = eigenvectors.getArray();
      int numComponents = eigenvectors.getColumnDimension(); // same as num rows.
      principalComponents = new TreeSet<PrincipalComponent>();
      for (i = 0; i < numComponents; i++) {
        double[] eigenvector = new double[numComponents];
        for (j = 0; j < numComponents; j++) {
          eigenvector[j] = vecs[j][i];
        }
        principalComponents.add(new PrincipalComponent(eigenvalues[i], eigenvector));
      }

      double totalEigenvalue = 0;
      for (i = 0; i < numComponents; i++) {
        totalEigenvalue += eigenvalues[i];
      }

      double[][] explainedVarianceArray = new double[numComponents][2]; // percent and cum. percent
      j = 0;
      for (PrincipalComponent pc : principalComponents) {
        explainedVarianceArray[j][0] = pc.eigenValue / totalEigenvalue * 100.0;
        if (j == 0) {
          explainedVarianceArray[j][1] = explainedVarianceArray[j][0];
        } else {
          explainedVarianceArray[j][1] =
              explainedVarianceArray[j][0] + explainedVarianceArray[j - 1][1];
        }
        j++;
      }

      DecimalFormat df1 = new DecimalFormat("0.00");
      DecimalFormat df2 = new DecimalFormat("0.0000");
      DecimalFormat df3 = new DecimalFormat("0.000000");
      DecimalFormat df4 = new DecimalFormat("0.000");
      String ret = "Principal Component Analysis Report:\n\n";
      ret += "Component\tExplained Var.\tCum. %\tEigenvalue\tEigenvector\n";
      j = 0;
      for (PrincipalComponent pc : principalComponents) {

        String explainedVariance = df1.format(explainedVarianceArray[j][0]);
        String explainedCumVariance = df1.format(explainedVarianceArray[j][1]);
        double[] eigenvector = pc.eigenVector.clone();
        ret +=
            (j + 1)
                + "\t"
                + explainedVariance
                + "\t"
                + explainedCumVariance
                + "\t"
                + df2.format(pc.eigenValue)
                + "\t";
        String eigenvec = "[";
        for (i = 0; i < numComponents; i++) {
          if (i < numComponents - 1) {
            eigenvec += df3.format(eigenvector[i]) + ", ";
          } else {
            eigenvec += df3.format(eigenvector[i]);
          }
        }
        eigenvec += "]";
        ret += eigenvec + "\n";

        if (j < numberOfComponentsOutput) {
          DBFField field = new DBFField();
          field = new DBFField();
          field.setName("COMP" + (j + 1));
          field.setDataType(DBFField.DBFDataType.NUMERIC);
          field.setFieldLength(10);
          field.setDecimalCount(4);
          table.addField(field);

          for (row = 0; row < numRecs; row++) {
            z = 0;
            for (i = 0; i < numFields; i++) {
              z += fieldArray[row][i] * eigenvector[i];
            }

            Object[] recData = table.getRecord(row);
            recData[recData.length - 1] = new Double(z);
            table.updateRecord(row, recData);

            if (cancelOp) {
              cancelOperation();
              return;
            }
            progress = (int) (100f * row / (numRecs - 1));
            if (progress != lastProgress) {
              updateProgress("Outputing Component " + (j + 1) + ":", progress);
            }
            lastProgress = progress;
          }
        }
        j++;
      }

      // calculate the factor loadings.
      ret += "\nFactor Loadings:\n";
      ret += "\t\tComponent\n\t";
      for (i = 0; i < numComponents; i++) {
        ret += (i + 1) + "\t";
      }
      ret += "\n";
      double loading = 0;
      if (!standardizedPCA) {
        for (i = 0; i < numFields; i++) {
          ret += "field " + (i + 1) + "\t";
          for (PrincipalComponent pc : principalComponents) {
            double[] eigenvector = pc.eigenVector.clone();
            double ev = pc.eigenValue;
            loading = (eigenvector[i] * Math.sqrt(ev)) / Math.sqrt(covariances[i][i]);
            ret += df4.format(loading) + "\t";
          }
          ret += "\n";
        }
      } else {
        for (i = 0; i < numFields; i++) {
          ret += "field " + (i + 1) + "\t";
          for (PrincipalComponent pc : principalComponents) {
            double[] eigenvector = pc.eigenVector.clone();
            double ev = pc.eigenValue;
            loading = (eigenvector[i] * Math.sqrt(ev));
            ret += df4.format(loading) + "\t";
          }
          ret += "\n";
        }
      }

      ret += "\n";
      for (i = 0; i < numFields; i++) {
        ret += "field " + (i + 1) + "\t" + fieldNames[i] + "\n";
      }

      returnData(ret);

      if (numberOfComponentsOutput > 0) {
        returnData(table.getFileName());
      }

      ScreePlot plot = new ScreePlot(explainedVarianceArray);
      returnData(plot);

    } catch (OutOfMemoryError oe) {
      myHost.showFeedback("An out-of-memory error has occurred during operation.");
    } catch (Exception e) {
      myHost.showFeedback("An error has occurred during operation. See log file for details.");
      myHost.logException("Error in " + getDescriptiveName(), e);
    } finally {
      updateProgress("Progress: ", 0);
      // tells the main application that this process is completed.
      amIActive = false;
      myHost.pluginComplete();
    }
  }
Esempio n. 5
0
  private void getAttributeValues(DocFlavor flavor) throws PrintException {

    if (reqAttrSet.get(Fidelity.class) == Fidelity.FIDELITY_TRUE) {
      fidelity = true;
    } else {
      fidelity = false;
    }

    Class category;
    Attribute[] attrs = reqAttrSet.toArray();
    for (int i = 0; i < attrs.length; i++) {
      Attribute attr = attrs[i];
      category = attr.getCategory();
      if (fidelity == true) {
        if (!service.isAttributeCategorySupported(category)) {
          notifyEvent(PrintJobEvent.JOB_FAILED);
          throw new PrintJobAttributeException("unsupported category: " + category, category, null);
        } else if (!service.isAttributeValueSupported(attr, flavor, null)) {
          notifyEvent(PrintJobEvent.JOB_FAILED);
          throw new PrintJobAttributeException("unsupported attribute: " + attr, null, attr);
        }
      }
      if (category == Destination.class) {
        URI uri = ((Destination) attr).getURI();
        if (!"file".equals(uri.getScheme())) {
          notifyEvent(PrintJobEvent.JOB_FAILED);
          throw new PrintException("Not a file: URI");
        } else {
          try {
            mDestination = (new File(uri)).getPath();
          } catch (Exception e) {
            throw new PrintException(e);
          }
          // check write access
          SecurityManager security = System.getSecurityManager();
          if (security != null) {
            try {
              security.checkWrite(mDestination);
            } catch (SecurityException se) {
              notifyEvent(PrintJobEvent.JOB_FAILED);
              throw new PrintException(se);
            }
          }
        }
      } else if (category == JobName.class) {
        jobName = ((JobName) attr).getValue();
      } else if (category == Copies.class) {
        copies = ((Copies) attr).getValue();
      } else if (category == Media.class) {
        if (attr instanceof MediaSizeName) {
          mediaName = (MediaSizeName) attr;
          // If requested MediaSizeName is not supported,
          // get the corresponding media size - this will
          // be used to create a new PageFormat.
          if (!service.isAttributeValueSupported(attr, null, null)) {
            mediaSize = MediaSize.getMediaSizeForName(mediaName);
          }
        }
      } else if (category == OrientationRequested.class) {
        orient = (OrientationRequested) attr;
      }
    }
  }
Esempio n. 6
0
  /* There's some inefficiency here as the job set is created even though
   * it may never be requested.
   */
  private synchronized void initializeAttributeSets(Doc doc, PrintRequestAttributeSet reqSet) {

    reqAttrSet = new HashPrintRequestAttributeSet();
    jobAttrSet = new HashPrintJobAttributeSet();

    Attribute[] attrs;
    if (reqSet != null) {
      reqAttrSet.addAll(reqSet);
      attrs = reqSet.toArray();
      for (int i = 0; i < attrs.length; i++) {
        if (attrs[i] instanceof PrintJobAttribute) {
          jobAttrSet.add(attrs[i]);
        }
      }
    }

    DocAttributeSet docSet = doc.getAttributes();
    if (docSet != null) {
      attrs = docSet.toArray();
      for (int i = 0; i < attrs.length; i++) {
        if (attrs[i] instanceof PrintRequestAttribute) {
          reqAttrSet.add(attrs[i]);
        }
        if (attrs[i] instanceof PrintJobAttribute) {
          jobAttrSet.add(attrs[i]);
        }
      }
    }

    /* add the user name to the job */
    String userName = "";
    try {
      userName = System.getProperty("user.name");
    } catch (SecurityException se) {
    }

    if (userName == null || userName.equals("")) {
      RequestingUserName ruName = (RequestingUserName) reqSet.get(RequestingUserName.class);
      if (ruName != null) {
        jobAttrSet.add(new JobOriginatingUserName(ruName.getValue(), ruName.getLocale()));
      } else {
        jobAttrSet.add(new JobOriginatingUserName("", null));
      }
    } else {
      jobAttrSet.add(new JobOriginatingUserName(userName, null));
    }

    /* if no job name supplied use doc name (if supplied), if none and
     * its a URL use that, else finally anything .. */
    if (jobAttrSet.get(JobName.class) == null) {
      JobName jobName;
      if (docSet != null && docSet.get(DocumentName.class) != null) {
        DocumentName docName = (DocumentName) docSet.get(DocumentName.class);
        jobName = new JobName(docName.getValue(), docName.getLocale());
        jobAttrSet.add(jobName);
      } else {
        String str = "JPS Job:" + doc;
        try {
          Object printData = doc.getPrintData();
          if (printData instanceof URL) {
            str = ((URL) (doc.getPrintData())).toString();
          }
        } catch (IOException e) {
        }
        jobName = new JobName(str, null);
        jobAttrSet.add(jobName);
      }
    }

    jobAttrSet = AttributeSetUtilities.unmodifiableView(jobAttrSet);
  }