Esempio n. 1
1
  @Override
  public Map<String, Object> saveMap(Map<String, Object> object, Class clazz) throws Exception {

    StringBuilder sql = new StringBuilder();
    EntityInfo entityInfo = ClassUtils.getEntityInfoByClazz(clazz);

    sql.append("INSERT INTO ");
    sql.append(entityInfo.getTableName());
    sql.append("(");

    List<String> columns = new ArrayList<String>();
    List<Object> values = new ArrayList<Object>();
    Map<String, String> ptcMap = ClassUtils.propToColumnMap.get(entityInfo.getClazzName());
    for (Map.Entry<String, Object> entry : object.entrySet()) {
      columns.add(ptcMap.get(entry.getKey()));
      values.add(entry.getValue());
    }
    sql.append(StringUtils.join(columns, ","));
    sql.append(") VALUES(");
    String[] params = new String[values.size()];
    Arrays.fill(params, "?");
    sql.append(StringUtils.join(params, ","));
    sql.append(")");
    if (entityInfo.getStrategy().equals(GenerationType.IDENTITY)) {
      Long id = addReutrnId(sql.toString(), values);
      if (id != null) {
        object.put(entityInfo.getPkName(), id);
      }
    } else {
      add(sql.toString(), values);
    }
    return object;
  }
 public void setSwingDataCollection(Collection<ICFSecurityISOCountryObj> value) {
   final String S_ProcName = "setSwingDataCollection";
   swingDataCollection = value;
   if (swingDataCollection == null) {
     arrayOfISOCountry = new ICFSecurityISOCountryObj[0];
   } else {
     int len = value.size();
     arrayOfISOCountry = new ICFSecurityISOCountryObj[len];
     Iterator<ICFSecurityISOCountryObj> iter = swingDataCollection.iterator();
     int idx = 0;
     while (iter.hasNext() && (idx < len)) {
       arrayOfISOCountry[idx++] = iter.next();
     }
     if (idx < len) {
       throw CFLib.getDefaultExceptionFactory()
           .newRuntimeException(
               getClass(),
               S_ProcName,
               "Collection iterator did not fully populate the array copy");
     }
     if (iter.hasNext()) {
       throw CFLib.getDefaultExceptionFactory()
           .newRuntimeException(
               getClass(),
               S_ProcName,
               "Collection iterator had left over items when done populating array copy");
     }
     Arrays.sort(arrayOfISOCountry, compareISOCountryByQualName);
   }
   PickerTableModel tblDataModel = getDataModel();
   if (tblDataModel != null) {
     tblDataModel.fireTableDataChanged();
   }
 }
Esempio n. 3
0
  @Override
  public boolean removeMetadataBlob(
      UserPublicKey owner, byte[] encodedWritingPublicKey, byte[] writingKeySignedMapKeyPlusBlob) {
    UserPublicKey writingKey = new UserPublicKey(encodedWritingPublicKey);

    try {
      byte[] payload = writingKey.unsignMessage(writingKeySignedMapKeyPlusBlob);
      byte[] mapKey = Arrays.copyOfRange(payload, 0, 32);
      byte[] metadataBlob =
          Arrays.copyOfRange(
              payload,
              32,
              payload
                  .length); // will be zero length for now // TODO  change to the current blob hash
      MetadataBlob blob = new MetadataBlob(writingKey.getPublicKeys(), mapKey, metadataBlob);
      return blob.delete();
    } catch (TweetNaCl.InvalidSignatureException e) {
      System.err.println(
          "Invalid signature during removeMetadataBlob for owner: "
              + owner
              + " and sharer: "
              + writingKey);
      return false;
    }
  }
 public List<ICFAccTaxObj> readAllTax(boolean forceRead) {
   final String S_ProcName = "readAllTax";
   if ((allTax == null) || forceRead) {
     Map<CFAccTaxPKey, ICFAccTaxObj> map = new HashMap<CFAccTaxPKey, ICFAccTaxObj>();
     allTax = map;
     CFAccTaxBuff[] buffList =
         ((ICFAccSchema) schema.getBackingStore())
             .getTableTax()
             .readAllDerived(schema.getAuthorization());
     CFAccTaxBuff buff;
     ICFAccTaxObj obj;
     for (int idx = 0; idx < buffList.length; idx++) {
       buff = buffList[idx];
       obj = newInstance();
       obj.setPKey(((ICFAccSchema) schema.getBackingStore()).getFactoryTax().newPKey());
       obj.setBuff(buff);
       ICFAccTaxObj realized = (ICFAccTaxObj) obj.realize();
     }
   }
   Comparator<ICFAccTaxObj> cmp =
       new Comparator<ICFAccTaxObj>() {
         public int compare(ICFAccTaxObj lhs, ICFAccTaxObj rhs) {
           if (lhs == null) {
             if (rhs == null) {
               return (0);
             } else {
               return (-1);
             }
           } else if (rhs == null) {
             return (1);
           } else {
             CFAccTaxPKey lhsPKey = lhs.getPKey();
             CFAccTaxPKey rhsPKey = rhs.getPKey();
             int ret = lhsPKey.compareTo(rhsPKey);
             return (ret);
           }
         }
       };
   int len = allTax.size();
   ICFAccTaxObj arr[] = new ICFAccTaxObj[len];
   Iterator<ICFAccTaxObj> valIter = allTax.values().iterator();
   int idx = 0;
   while ((idx < len) && valIter.hasNext()) {
     arr[idx++] = valIter.next();
   }
   if (idx < len) {
     throw CFLib.getDefaultExceptionFactory()
         .newArgumentUnderflowException(getClass(), S_ProcName, 0, "idx", idx, len);
   } else if (valIter.hasNext()) {
     throw CFLib.getDefaultExceptionFactory()
         .newArgumentOverflowException(getClass(), S_ProcName, 0, "idx", idx, len);
   }
   Arrays.sort(arr, cmp);
   ArrayList<ICFAccTaxObj> arrayList = new ArrayList<ICFAccTaxObj>(len);
   for (idx = 0; idx < len; idx++) {
     arrayList.add(arr[idx]);
   }
   List<ICFAccTaxObj> sortedList = arrayList;
   return (sortedList);
 }
  private LogisticRegressionModel instantiateSparkModel() {
    Configuration conf = new Configuration();
    conf.set("fs.defaultFS", topologyConfig.getProperty("hdfs.url"));

    double[] sparkModelInfo = null;

    try {
      sparkModelInfo =
          getSparkModelInfoFromHDFS(
              new Path(topologyConfig.getProperty("hdfs.url") + "/tmp/sparkML_weights"), conf);
    } catch (Exception e) {
      LOG.error("Couldn't instantiate Spark model in prediction bolt: " + e.getMessage());
      e.printStackTrace();

      throw new RuntimeException(e);
    }

    // all numbers besides the last value are the weights
    double[] weights = Arrays.copyOfRange(sparkModelInfo, 0, sparkModelInfo.length - 1);

    // the last number in the array is the intercept
    double intercept = sparkModelInfo[sparkModelInfo.length - 1];

    org.apache.spark.mllib.linalg.Vector weightsV = (Vectors.dense(weights));
    return new LogisticRegressionModel(weightsV, intercept);
  }
Esempio n. 6
0
 SqlDialect get(DataSource dataSource) {
   Connection connection = null;
   try {
     connection = dataSource.getConnection();
     DatabaseMetaData metaData = connection.getMetaData();
     String productName = metaData.getDatabaseProductName();
     String productVersion = metaData.getDatabaseProductVersion();
     List key = Arrays.asList(productName, productVersion);
     SqlDialect dialect = map.get(key);
     if (dialect == null) {
       final SqlDialect.DatabaseProduct product =
           SqlDialect.getProduct(productName, productVersion);
       dialect = new SqlDialect(product, productName, metaData.getIdentifierQuoteString());
       map.put(key, dialect);
     }
     connection.close();
     connection = null;
     return dialect;
   } catch (SQLException e) {
     throw new RuntimeException(e);
   } finally {
     if (connection != null) {
       try {
         connection.close();
       } catch (SQLException e) {
         // ignore
       }
     }
   }
 }
 public void loadData(boolean forceReload) {
   ICFFreeSwitchSchemaObj schemaObj = swingSchema.getSchema();
   if ((containingCluster == null) || forceReload) {
     CFSecurityAuthorization auth = schemaObj.getAuthorization();
     long containingClusterId = auth.getSecClusterId();
     containingCluster = schemaObj.getClusterTableObj().readClusterByIdIdx(containingClusterId);
   }
   if ((listOfTenant == null) || forceReload) {
     arrayOfTenant = null;
     listOfTenant =
         schemaObj
             .getTenantTableObj()
             .readTenantByClusterIdx(containingCluster.getRequiredId(), swingIsInitializing);
     if (listOfTenant != null) {
       Object objArray[] = listOfTenant.toArray();
       if (objArray != null) {
         int len = objArray.length;
         arrayOfTenant = new ICFSecurityTenantObj[len];
         for (int i = 0; i < len; i++) {
           arrayOfTenant[i] = (ICFSecurityTenantObj) objArray[i];
         }
         Arrays.sort(arrayOfTenant, compareTenantByQualName);
       }
     }
   }
 }
Esempio n. 8
0
 /**
  * Return Regions as Array
  *
  * @param ctx context
  * @return MCountry Array
  */
 public static MRegion[] getRegions(Ctx ctx) {
   if (s_regions.isEmpty()) loadAllRegions(ctx);
   MRegion[] retValue = new MRegion[s_regions.size()];
   s_regions.values().toArray(retValue);
   Arrays.sort(retValue, new MRegion(ctx, 0, null));
   return retValue;
 } //	getRegions
Esempio n. 9
0
 @Parameterized.Parameters(name = "{index} - {4}")
 public static Collection<Object[]> getData() {
   return Arrays.asList(
       new Object[][] {
         {null, "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1", "sa", "", "H2 test"},
         {new jdbcDriver(), "jdbc:hsqldb:mem:testmemdb", "SA", "", "HyperSQL DB test"}
       });
 }
 @BeforeClass
 public static void doTestSuiteSetup() throws Exception {
   /* List all of the object names being used in this entire class.
    * The objects are dropped with errors ignored, so it is OK if the
    * object does not exist for a particular test.
    */
   objDropList = new ArrayList<String>(Arrays.asList("table " + ATABLE_NAME));
   doBaseTestSuiteSetup();
 }
 private static Collection<?> foreach(
     ScanRanges scanRanges, int[] widths, KeyRange[] expectedSplits) {
   SkipScanFilter filter = new SkipScanFilter(scanRanges.getRanges(), buildSchema(widths));
   Scan scan =
       new Scan().setFilter(filter).setStartRow(KeyRange.UNBOUND).setStopRow(KeyRange.UNBOUND);
   List<Object> ret = Lists.newArrayList();
   ret.add(new Object[] {scan, scanRanges, Arrays.<KeyRange>asList(expectedSplits)});
   return ret;
 }
Esempio n. 12
0
  protected void maybeCreateTables(boolean dropTables) {
    boolean createTables = false;
    try {
      size();
    } catch (DataAccessException e) {
      createTables = true;
    }
    Connection connection = getConnection();
    try {
      startTransaction(connection);
      if (dropTables && !createTables) {
        // TODO: Logging....
        System.err.println("--- dropping tables:");
        List<String> tables = new ArrayList<String>(Arrays.asList(TABLES));
        Collections.reverse(tables);
        for (String table : tables) {
          System.err.print("Dropping table " + table);
          dropTable(table, connection);
          System.err.println("ok!");
        }
        createTables = true;
      }
      if (createTables) {
        System.err.println("--- creating " + TABLES.length + " tables:");

        for (String table : TABLES) {
          System.err.print("--- creating table " + table + "...");
          InputStream inputStream =
              getClass().getClassLoader().getResourceAsStream("ddl/" + table + ".ddl");
          if (inputStream == null) {
            System.err.println("Could not find DDL file for table " + table + "!");
            return;
          }
          try {
            String sql = IOUtils.toString(inputStream);
            createTable(sql, connection);
          } catch (IOException e) {
            System.err.println("Failed!");
            e.printStackTrace();
            return;
          } finally {
            IOUtils.closeQuietly(inputStream);
          }

          System.err.println("ok");
        }
      }
    } catch (DataAccessException e) {
      rollback(connection);
    } finally {
      endTransaction(connection);
      close(connection);
    }
  }
Esempio n. 13
0
  @Override
  public boolean setMetadataBlob(
      UserPublicKey owner, byte[] encodedWritingPublicKey, byte[] writingKeySignedMapKey) {
    UserPublicKey writingKey = new UserPublicKey(encodedWritingPublicKey);

    try {
      byte[] payload = writingKey.unsignMessage(writingKeySignedMapKey);
      byte[] mapKey = Arrays.copyOfRange(payload, 0, 32);
      byte[] metadataBlob = Arrays.copyOfRange(payload, 32, payload.length);
      MetadataBlob blob = new MetadataBlob(writingKey.getPublicKeys(), mapKey, metadataBlob);
      return blob.insert();
    } catch (TweetNaCl.InvalidSignatureException e) {
      System.err.println(
          "Invalid signature during setMetadataBlob for owner: "
              + owner
              + " and sharer: "
              + writingKey);
      return false;
    }
  }
Esempio n. 14
0
 /**
  * Return Array of Regions of Country
  *
  * @param ctx context
  * @param C_Country_ID country
  * @return MRegion Array
  */
 public static MRegion[] getRegions(Ctx ctx, int C_Country_ID) {
   if (s_regions.isEmpty()) loadAllRegions(ctx);
   ArrayList<MRegion> list = new ArrayList<MRegion>();
   Iterator<MRegion> it = s_regions.values().iterator();
   while (it.hasNext()) {
     MRegion r = it.next();
     if (r.getC_Country_ID() == C_Country_ID) list.add(r);
   }
   //  Sort it
   MRegion[] retValue = new MRegion[list.size()];
   list.toArray(retValue);
   Arrays.sort(retValue, new MRegion(ctx, 0, null));
   return retValue;
 } //	getRegions
 private static Collection<?> foreach(
     KeyRange[][] ranges, int[] widths, KeyRange[] expectedSplits) {
   RowKeySchema schema = buildSchema(widths);
   List<List<KeyRange>> slots = Lists.transform(Lists.newArrayList(ranges), ARRAY_TO_LIST);
   SkipScanFilter filter = new SkipScanFilter(slots, schema);
   // Always set start and stop key to max to verify we are using the information in skipscan
   // filter over the scan's KMIN and KMAX.
   Scan scan =
       new Scan().setFilter(filter).setStartRow(KeyRange.UNBOUND).setStopRow(KeyRange.UNBOUND);
   ScanRanges scanRanges = ScanRanges.create(slots, schema);
   List<Object> ret = Lists.newArrayList();
   ret.add(new Object[] {scan, scanRanges, Arrays.<KeyRange>asList(expectedSplits)});
   return ret;
 }
Esempio n. 16
0
 @Override
 public void batchDel(List<Object> ids, Class clazz) throws Exception {
   EntityInfo entityInfo = ClassUtils.getEntityInfoByClazz(clazz);
   if (entityInfo == null) {
     throw new DaoException("无效的实体类class");
   }
   Object[] objects = new Object[ids.size()];
   Arrays.fill(objects, "?");
   SqlHelper sqlHelper = SqlHelper.getDeleteHelper(clazz);
   sqlHelper
       .append(" WHERE ")
       .append(entityInfo.getPkClumnName())
       .append(" IN (")
       .append(StringUtils.join(objects, ","))
       .append(")");
   update(sqlHelper.getSql(), ids);
 }
Esempio n. 17
0
  @Test
  public void testSearch_Has() throws SQLException {
    Connection connection = null;
    ResultSet resultSet = null;
    try {
      ConnectionManager connectionManager = temporaryFileDatabase.getConnectionManager(true);
      initWithTestData(connectionManager);

      connection = connectionManager.getConnection(null);
      resultSet = DBQueries.search(has(self(Car.class)), NAME, connection);
      assertResultSetObjectKeysOrderAgnostic(resultSet, Arrays.asList(1, 2, 3));

    } finally {
      DBUtils.closeQuietly(connection);
      DBUtils.closeQuietly(resultSet);
    }
  }
  //
  // Return the methods of an interface in a deterministic
  // order. Class.getMethods() does not do us this favor.
  //
  private Method[] sortMethods(Class iface) throws Exception {
    Method[] raw = iface.getMethods();
    int count = raw.length;
    Method[] cooked = new Method[count];
    MethodSortable[] sortables = new MethodSortable[count];

    for (int i = 0; i < count; i++) {
      sortables[i] = new MethodSortable(raw[i]);
    }

    Arrays.sort(sortables);

    for (int i = 0; i < count; i++) {
      cooked[i] = sortables[i].getMethod();
    }

    return cooked;
  }
  // debug print the list of methods which throw SQLFeatureNotSupportedException
  private void printUnsupportedList(HashSet<String> unsupportedList) {
    int count = unsupportedList.size();

    if (count == 0) {
      return;
    }

    println("--------------- UNSUPPORTED METHODS ------------------");
    println("--");

    String[] result = new String[count];

    unsupportedList.toArray(result);
    Arrays.sort(result);

    for (int i = 0; i < count; i++) {
      println(result[i]);
    }
  }
  // debug print the list of methods which have disappeared from the SQL interface
  private void printVanishedMethodList(HashSet<String> vanishedMethodList) {
    int count = vanishedMethodList.size();

    if (count == 0) {
      return;
    }

    println("--------------- VANISHED METHODS ------------------");
    println("--");

    String[] result = new String[count];

    vanishedMethodList.toArray(result);
    Arrays.sort(result);

    for (int i = 0; i < count; i++) {
      println(result[i]);
    }
  }
 public void loadData(boolean forceReload) {
   ICFBamSchemaObj schemaObj = swingSchema.getSchema();
   if ((listOfAccessFrequency == null) || forceReload) {
     arrayOfAccessFrequency = null;
     listOfAccessFrequency =
         schemaObj.getAccessFrequencyTableObj().readAllAccessFrequency(swingIsInitializing);
     if (listOfAccessFrequency != null) {
       Object objArray[] = listOfAccessFrequency.toArray();
       if (objArray != null) {
         int len = objArray.length;
         arrayOfAccessFrequency = new ICFBamAccessFrequencyObj[len];
         for (int i = 0; i < len; i++) {
           arrayOfAccessFrequency[i] = (ICFBamAccessFrequencyObj) objArray[i];
         }
         Arrays.sort(arrayOfAccessFrequency, compareAccessFrequencyByQualName);
       }
     }
   }
 }
Esempio n. 22
0
  @Test
  public void testSearch_StringStartsWith() throws SQLException {
    Connection connection = null;
    ResultSet resultSet = null;
    try {
      ConnectionManager connectionManager = temporaryFileDatabase.getConnectionManager(true);
      initWithTestData(connectionManager);

      StringStartsWith<Car, String> startsWith = startsWith(Car.FEATURES, "ab");

      connection = connectionManager.getConnection(null);
      resultSet = DBQueries.search(startsWith, NAME, connection);
      assertResultSetObjectKeysOrderAgnostic(resultSet, Arrays.asList(1, 3));

    } finally {
      DBUtils.closeQuietly(connection);
      DBUtils.closeQuietly(resultSet);
    }
  }
 public void loadData(boolean forceReload) {
   ICFSecuritySchemaObj schemaObj = swingSchema.getSchema();
   if ((listOfISOTimezone == null) || forceReload) {
     arrayOfISOTimezone = null;
     listOfISOTimezone =
         schemaObj.getISOTimezoneTableObj().readAllISOTimezone(swingIsInitializing);
     if (listOfISOTimezone != null) {
       Object objArray[] = listOfISOTimezone.toArray();
       if (objArray != null) {
         int len = objArray.length;
         arrayOfISOTimezone = new ICFSecurityISOTimezoneObj[len];
         for (int i = 0; i < len; i++) {
           arrayOfISOTimezone[i] = (ICFSecurityISOTimezoneObj) objArray[i];
         }
         Arrays.sort(arrayOfISOTimezone, compareISOTimezoneByQualName);
       }
     }
   }
 }
  // Debug print the list of method failures which we don't understand
  private void printNotUnderstoodList(HashSet<String> notUnderstoodList) {
    int count = notUnderstoodList.size();

    if (count == 0) {
      return;
    }

    println("\n\n");
    println("--------------- NOT UNDERSTOOD METHODS ------------------");
    println("--");

    String[] result = new String[count];

    notUnderstoodList.toArray(result);
    Arrays.sort(result);

    for (int i = 0; i < count; i++) {
      println(result[i]);
    }
  }
Esempio n. 25
0
  public void writePredictionToHDFS(Tuple input, double[] params, double prediction)
      throws Exception {

    try {

      Configuration conf = new Configuration();
      conf.set("fs.defaultFS", topologyConfig.getProperty("hdfs.url"));
      FileSystem fs = FileSystem.get(conf);

      Path pt =
          new Path(
              topologyConfig.getProperty("hdfs.url")
                  + "/tmp/predictions/"
                  + System.currentTimeMillis());

      BufferedWriter br = new BufferedWriter(new OutputStreamWriter(fs.create(pt, true)));

      br.write("Original Event: " + input + "\n");
      br.write("\n");
      br.write("Certification status (from HBase): " + (params[0] == 1 ? "Y" : "N") + "\n");
      br.write("Wage plan (from HBase): " + (params[1] == 1 ? "Miles" : "Hours" + "\n"));
      br.write("Hours logged (from HBase): " + params[2] * 100 + "\n");
      br.write("Miles logged (from HBase): " + params[3] * 1000 + "\n");
      br.write("\n");
      br.write("Is Foggy? (from weather API): " + (params[4] == 1 ? "Y" : "N" + "\n"));
      br.write("Is Rainy? (from weather API): " + (params[5] == 1 ? "Y" : "N" + "\n"));
      br.write("Is Windy? (from weather API): " + (params[6] == 1 ? "Y" : "N" + "\n"));
      br.write("\n");
      br.write("\n");
      br.write("Input to Spark ML model: " + Arrays.toString(params) + "\n");
      br.write("\n");
      br.write("Prediction from Spark ML model: " + prediction + "\n");
      br.flush();
      br.close();

    } catch (Exception e) {
      e.printStackTrace();
    }
  }
Esempio n. 26
0
  public static void showCSVFileOpenDialog(
      String tablename, String parentTable, int parentKeyColumn, int passwordField)
      throws FileNotFoundException, IOException, SQLException, NoSuchAlgorithmException {
    JFileChooser fopen = new JFileChooser();

    javax.swing.filechooser.FileFilter filter =
        new FileNameExtensionFilter("Comma Separated Values(CSV) Files", "csv");
    fopen.addChoosableFileFilter(filter);

    int ret = fopen.showDialog(null, "Open File");
    if (ret == JFileChooser.APPROVE_OPTION) {
      CSVRead loginCSV = new CSVRead();
      String[] deptCheckTables = {"equipment", "user", "class", "subject"};
      if (Arrays.asList(deptCheckTables).contains(tablename) && !LoginForm.userRole.equals("sa")) {
        loginCSV.setDeptCheck(true);
      }
      loginCSV.setFilename(fopen.getSelectedFile().toString());
      loginCSV.setTablename(tablename);
      if (parentTable != null) {
        loginCSV.setParentTable(parentTable);
      }
      if (parentKeyColumn != 0) {
        loginCSV.setParentKeyColumn(parentKeyColumn);
      }
      if (passwordField != 0) {
        loginCSV.setPasswordField(passwordField);
      }
      try {
        loginCSV.insertRows();
      } catch (FileNotFoundException ex) {
        JOptionPane.showMessageDialog(null, "Invalid File");
      } catch (IOException ex) {
        JOptionPane.showMessageDialog(null, "Invalid Data");
      }
    } else {
      fopen.setVisible(false);
    }
  }
Esempio n. 27
0
  static {
    try {
      CON_PROXY_CTOR = createProxyConstructor(ProxyConnection.class);

      Class[] argClasses = new Class[0];
      RS_CLOSE_METHOD = ResultSet.class.getMethod("close", argClasses);
      STMT_CLOSE_METHOD = Statement.class.getMethod("close", argClasses);

      CLOSE_ARGS = new Object[0];

      OBJECT_METHODS =
          Collections.unmodifiableSet(new HashSet(Arrays.asList(Object.class.getMethods())));
    } catch (Exception e) {
      // e.printStackTrace();
      logger.log(
          MLevel.SEVERE,
          "An Exception occurred in static initializer of" + C3P0PooledConnection.class.getName(),
          e);
      throw new InternalError(
          "Something is very wrong, or this is a pre 1.3 JVM."
              + "We cannot set up dynamic proxies and/or methods!");
    }
  }
Esempio n. 28
0
 public static int[] getMSPIDs(MSPLocator loc, Connection c) throws SQLException {
   Statement s = c.createStatement();
   String name = loc.getNameVersion().name;
   String version = loc.getNameVersion().version;
   ResultSet rs =
       s.executeQuery(
           "select id from rosettaanalysis where name='"
               + name
               + "' and "
               + "version = '"
               + version
               + "'");
   Vector<Integer> values = new Vector<Integer>();
   while (rs.next()) {
     values.add(rs.getInt(1));
   }
   s.close();
   int[] array = new int[values.size()];
   for (int i = 0; i < values.size(); i++) {
     array[i] = values.get(i);
   }
   Arrays.sort(array);
   return array;
 }
Esempio n. 29
0
  public static void main(String[] args) {
    //////////////////////////
    // The values in following 4 lines should be user input
    int startPos = 200;
    int endPos = 1000;
    int totalNumPixel = 1044;
    double threshhold = 0.0001;
    /////////////////////////
    int numPixel = endPos - startPos;
    double wavelength[] = new double[totalNumPixel];
    double photonCount[] = new double[totalNumPixel];
    double SpecNoBk[] = new double[numPixel];
    double ThisSpectrum[] = new double[numPixel];
    double ThisXaxis[] = new double[numPixel];
    double Po[] = new double[numPixel];
    double Re[] = new double[numPixel];
    double P[] = new double[6];
    double Re2[] = new double[numPixel - 1];
    double mySUM[] = new double[numPixel];
    int ind[];
    double DEV;
    double prevDEV;
    Connection connection = null;
    Statement stmt = null;
    String pattern = "##.##";

    try {
      Scanner in = new Scanner(new FileReader(args[0]));
      int i = 0;
      while (in.hasNextDouble()) {
        wavelength[i] = in.nextDouble();
        photonCount[i] = in.nextDouble();
        ++i;
      }
    } catch (FileNotFoundException e) {
      e.printStackTrace();
    }

    ThisSpectrum = Arrays.copyOfRange(photonCount, startPos, endPos);
    ThisXaxis = Arrays.copyOfRange(wavelength, startPos, endPos);
    final WeightedObservedPoints obs = new WeightedObservedPoints();

    for (int i = 0; i < numPixel; i++) {
      obs.add(ThisXaxis[i], ThisSpectrum[i]);
    }

    final PolynomialCurveFitter fitter = PolynomialCurveFitter.create(5);
    P = fitter.fit(obs.toList());
    Polyval pVal = new Polyval(P, ThisXaxis, numPixel);
    Po = pVal.evl();

    for (int i = 0; i < numPixel; i++) {
      Re[i] = ThisSpectrum[i] - Po[i];
    }

    for (int i = 0; i < numPixel - 1; i++) {
      Re2[i] = Re[i + 1] - Re[i];
    }

    DEV = Math.sqrt(StatUtils.populationVariance(Re2, 0, Re2.length));
    for (int i = 0; i < numPixel; i++) {
      mySUM[i] = Po[i] + DEV;
    }

    int jj = 0; // jj is the length of points to be removed
    for (int i = 0; i < numPixel; i++) {
      if (ThisSpectrum[i] > mySUM[i]) {
        jj++;
        ;
      }
    }
    ind = new int[jj];

    int jjj = 0;
    for (int i = 0; i < numPixel; i++) {
      if (ThisSpectrum[i] > mySUM[i]) {
        ind[jjj] = i;
        jjj++;
      }
    }

    int indKeepLength = numPixel - ind.length;
    int indKeep[] = new int[indKeepLength];
    int k = 0;
    for (int i = 0; i < numPixel; i++) {
      if (!ArrayUtils.contains(ind, i)) {
        indKeep[k] = i;
        k++;
      }
    }
    double ThisSpectrumKeep[] = new double[indKeepLength];
    double ThisXaxisKeep[] = new double[indKeepLength];
    double PoKeep[] = new double[indKeepLength];
    double ReKeep[] = new double[indKeepLength];
    double Re2Keep[] = new double[indKeepLength - 1];
    double mySUMKeep[] = new double[indKeepLength];

    for (int i = 0; i < indKeepLength; i++) {
      ThisSpectrumKeep[i] = ThisSpectrum[indKeep[i]];
      ThisXaxisKeep[i] = ThisXaxis[indKeep[i]];
    }

    prevDEV = DEV;

    // at the point, ThisSpectrum and ThisXaxis should have reduced size
    final WeightedObservedPoints obs1 = new WeightedObservedPoints();

    for (int i = 0; i < indKeepLength; i++) {
      obs1.add(ThisXaxisKeep[i], ThisSpectrumKeep[i]);
    }

    while (true) {
      final PolynomialCurveFitter fitter1 = PolynomialCurveFitter.create(5);
      P = fitter1.fit(obs1.toList());
      Polyval pVal1 = new Polyval(P, ThisXaxisKeep, indKeepLength);
      PoKeep = pVal1.evl();

      for (int i = 0; i < indKeepLength; i++) {
        ReKeep[i] = ThisSpectrumKeep[i] - PoKeep[i];
      }

      for (int i = 0; i < indKeepLength - 1; i++) {
        Re2Keep[i] = ReKeep[i + 1] - ReKeep[i];
      }

      DEV = Math.sqrt(StatUtils.populationVariance(Re2Keep, 0, Re2Keep.length));

      for (int i = 0; i < indKeepLength; i++) {
        mySUMKeep[i] = PoKeep[i] + DEV;
      }

      for (int i = 0; i < indKeepLength; i++) {
        if (ThisSpectrumKeep[i] > mySUMKeep[i]) ThisSpectrumKeep[i] = mySUMKeep[i];
      }
      if ((Math.abs(DEV - prevDEV) / DEV) < threshhold) break;
      prevDEV = DEV;

      obs1.clear();
      for (int i = 0; i < indKeepLength; i++) {
        obs1.add(ThisXaxisKeep[i], ThisSpectrumKeep[i]);
      }
    }
    Polyval pVal2 = new Polyval(P, ThisXaxis, numPixel);
    double FLbk[] = pVal2.evl();
    for (int i = 0; i < ThisXaxis.length; i++) {
      SpecNoBk[i] = ThisSpectrum[i] - FLbk[i];
    }

    // the write-to-file part is only for testing purpose, ThisXaxis and SpecNoBk are two outputs
    try {
      FileWriter fr = new FileWriter(args[1]);
      BufferedWriter br = new BufferedWriter(fr);
      PrintWriter out = new PrintWriter(br);
      DecimalFormat df = new DecimalFormat(pattern);
      for (int j = 0; j < ThisXaxis.length; j++) {
        if (Double.toString(wavelength[j]) != null) out.write(ThisXaxis[j] + "\t" + SpecNoBk[j]);
        out.write("\r\n");
      }
      out.close();
    } catch (IOException e) {
      System.out.println(e);
    }
  }
 public List<ICFAccTaxObj> readTaxByTenantIdx(long TenantId, boolean forceRead) {
   final String S_ProcName = "readTaxByTenantIdx";
   CFAccTaxByTenantIdxKey key =
       ((ICFAccSchema) schema.getBackingStore()).getFactoryTax().newTenantIdxKey();
   key.setRequiredTenantId(TenantId);
   Map<CFAccTaxPKey, ICFAccTaxObj> dict;
   if (indexByTenantIdx == null) {
     indexByTenantIdx = new HashMap<CFAccTaxByTenantIdxKey, Map<CFAccTaxPKey, ICFAccTaxObj>>();
   }
   if ((!forceRead) && indexByTenantIdx.containsKey(key)) {
     dict = indexByTenantIdx.get(key);
   } else {
     dict = new HashMap<CFAccTaxPKey, ICFAccTaxObj>();
     // Allow other threads to dirty-read while we're loading
     indexByTenantIdx.put(key, dict);
     ICFAccTaxObj obj;
     CFAccTaxBuff[] buffList =
         ((ICFAccSchema) schema.getBackingStore())
             .getTableTax()
             .readDerivedByTenantIdx(schema.getAuthorization(), TenantId);
     CFAccTaxBuff buff;
     for (int idx = 0; idx < buffList.length; idx++) {
       buff = buffList[idx];
       obj = schema.getTaxTableObj().newInstance();
       obj.setPKey(((ICFAccSchema) schema.getBackingStore()).getFactoryTax().newPKey());
       obj.setBuff(buff);
       ICFAccTaxObj realized = (ICFAccTaxObj) obj.realize();
     }
   }
   Comparator<ICFAccTaxObj> cmp =
       new Comparator<ICFAccTaxObj>() {
         public int compare(ICFAccTaxObj lhs, ICFAccTaxObj rhs) {
           if (lhs == null) {
             if (rhs == null) {
               return (0);
             } else {
               return (-1);
             }
           } else if (rhs == null) {
             return (1);
           } else {
             CFAccTaxPKey lhsPKey = lhs.getPKey();
             CFAccTaxPKey rhsPKey = rhs.getPKey();
             int ret = lhsPKey.compareTo(rhsPKey);
             return (ret);
           }
         }
       };
   int len = dict.size();
   ICFAccTaxObj arr[] = new ICFAccTaxObj[len];
   Iterator<ICFAccTaxObj> valIter = dict.values().iterator();
   int idx = 0;
   while ((idx < len) && valIter.hasNext()) {
     arr[idx++] = valIter.next();
   }
   if (idx < len) {
     throw CFLib.getDefaultExceptionFactory()
         .newArgumentUnderflowException(getClass(), S_ProcName, 0, "idx", idx, len);
   } else if (valIter.hasNext()) {
     throw CFLib.getDefaultExceptionFactory()
         .newArgumentOverflowException(getClass(), S_ProcName, 0, "idx", idx, len);
   }
   Arrays.sort(arr, cmp);
   ArrayList<ICFAccTaxObj> arrayList = new ArrayList<ICFAccTaxObj>(len);
   for (idx = 0; idx < len; idx++) {
     arrayList.add(arr[idx]);
   }
   List<ICFAccTaxObj> sortedList = arrayList;
   return (sortedList);
 }