void optimizeForGPIsoScaleLOOMSE() { MultivariateFunction negaLLfunc = generateGPIsoScaleLOOMSE(this); double lambda0 = Math.pow(sigmaN / stdev, 2); double[] pointInit = {Math.log10(lengthes[0]), Math.log10(lambda0)}; double[] dPointInit = {0.01, 0.01}; minimize(negaLLfunc, pointInit, dPointInit); }
public void updateScore() throws Exception { Connection connection = MysqlInfo.getMysqlConnection(); String sql = "select * from repotest"; PreparedStatement stmt = connection.prepareStatement(sql); ResultSet resultSet = stmt.executeQuery(); connection.setAutoCommit(false); while (resultSet.next()) { int repo_id = resultSet.getInt("id"); int hot = resultSet.getInt("hot"); int mature = resultSet.getInt("mature"); int popular = resultSet.getInt("popular"); int nb = resultSet.getInt("nb"); int contributor = (int) Math.min(10, Math.log10(resultSet.getInt("contributor")) * 2.5); int size = (int) Math.min(10, Math.log10(resultSet.getInt("size")) * 2.5); int update = 10; int release = 10; int total = (hot + mature + popular + nb + contributor + size + update + release) / 8; String insertSql = "replace into reposcore values(?,?,?,?,?,?,?,?,?,?)"; stmt = connection.prepareStatement(insertSql); stmt.setInt(1, repo_id); stmt.setInt(2, hot); stmt.setInt(3, mature); stmt.setInt(4, popular); stmt.setInt(5, nb); stmt.setInt(6, contributor); stmt.setInt(7, size); stmt.setInt(8, update); stmt.setInt(9, release); stmt.setInt(10, total); stmt.execute(); } connection.commit(); connection.close(); }
public static Point2D latLongToPixel( final Dimension2D MAP_DIMENSION, final Point2D UPPER_LEFT, final Point2D LOWER_RIGHT, final Point2D LOCATION) { final double LATITUDE = LOCATION.getX(); final double LONGITUDE = LOCATION.getY(); final double MAP_WIDTH = MAP_DIMENSION.getWidth(); final double MAP_HEIGHT = MAP_DIMENSION.getHeight(); final double WORLD_MAP_WIDTH = ((MAP_WIDTH / (LOWER_RIGHT.getY() - UPPER_LEFT.getY())) * 360) / (2 * Math.PI); final double MAP_OFFSET_Y = (WORLD_MAP_WIDTH / 2 * Math.log10( (1 + Math.sin(Math.toRadians(LOWER_RIGHT.getX()))) / (1 - Math.sin(Math.toRadians(LOWER_RIGHT.getX()))))); final double X = (LONGITUDE - UPPER_LEFT.getY()) * (MAP_WIDTH / (LOWER_RIGHT.getY() - UPPER_LEFT.getY())); final double Y = MAP_HEIGHT - ((WORLD_MAP_WIDTH / 2 * Math.log10( (1 + Math.sin(Math.toRadians(LATITUDE))) / (1 - Math.sin(Math.toRadians(LATITUDE))))) - MAP_OFFSET_Y); return new Point2D(X, Y); }
static double matchOrdersOfMagnitude( FrequencyTermBased fFeatureWords, FrequencyTermBased fFeatureRef) { double totalScore = 0, totalWords = 0; for (String t : fFeatureRef.getMapTerm2TTF().keySet()) { totalWords++; totalScore += fFeatureRef.getTTFNorm(t); } double meanRef = totalScore / totalWords; totalScore = 0; totalWords = 0; for (String t : fFeatureWords.getMapTerm2TTF().keySet()) { totalWords++; totalScore += fFeatureWords.getTTFNorm(t); } double mean = totalScore / totalWords; if (Double.isFinite(meanRef)) { int oomRef = (int) Math.log10(meanRef); int oom = (int) Math.log10(mean); double s = Math.pow(10, (oom - oomRef)); return s; } else return 1.0; }
/** * Sets boost of termClaimsDescriptionAbstractTitles. boost = weight = factor(tf*idf) * * @param vecsTerms * @param currentField * @param factor - adjustment factor ( ex. alpha or beta ) * @param decayFactor * @return * @throws java.io.IOException */ public Map<String, TermQuery> setBoost( Map<TermFreqVector, String> vecsTerms, String currentField, float factor, float decayFactor) throws IOException { Map<String, TermQuery> terms = new HashMap<>(); // setBoost for each of the terms of each of the docs int i = 0; float norm = (float) 1 / vecsTerms.size(); // System.out.println("--------------------------"); for (Map.Entry<TermFreqVector, String> e : vecsTerms.entrySet()) { // Increase decay String field = e.getValue(); TermFreqVector docTerms = e.getKey(); float decay = decayFactor * i; // Populate terms: with TermQuries and set boost for (String termTxt : docTerms.getTerms()) { // Create Term Term term = new Term(currentField, termTxt); // Calculate weight float tf = docTerms.getFreq(termTxt); // float idf = ir.docFreq(termTitle); int docs; float idf; if (sourceField.equals(PatentQuery.all)) { docs = ir.getDocCount(field); idf = (float) Math.log10((double) docs / (ir.docFreq(new Term(field, termTxt)) + 1)); } else { docs = ir.getDocCount(sourceField); idf = (float) Math.log10((double) docs / (ir.docFreq(new Term(sourceField, termTxt)) + 1)); } float weight = tf * idf; // System.out.println(term.text() + " -> tf= " + tf + " idf= " + idf + " // tfidf= " + weight); // Adjust weight by decay factor weight = weight - (weight * decay); // Create TermQuery and add it to the collection TermQuery termQuery = new TermQuery(term); // Calculate and set boost float boost; if (vecsTerms.size() == 1) { boost = factor * tf; } else { boost = factor; } if (boost != 0) { termQuery.setBoost(boost * norm); if (terms.containsKey(termTxt)) { TermQuery tq = terms.get(termTxt); tq.setBoost(tq.getBoost() + termQuery.getBoost()); } else { terms.put(termTxt, termQuery); } } } i++; } return terms; }
@Override public Shaking getIntensityFromVelocity(Shaking PGV) { // returns the macroseimic intensity I estimate as double // PGV is the peak ground velocity (median, 84th, 16th percentile) // Minimum intensity possible double Imin = 1; // Conversion equation assumes PGV in cm/s Shaking IfromPGV = new Shaking(); double IfromPGVmedian = 5.11 + 2.35 * Math.log10(100 * PGV.expectedSI); double IfromPGV84 = 5.11 + 2.35 * Math.log10(100 * PGV.percentile84); double IfromPGV16 = 5.11 + 2.35 * Math.log10(100 * PGV.percentile16); // Impose minimum intensity if necessary if (IfromPGVmedian < 1) { IfromPGVmedian = 1; } if (IfromPGV84 < 1) { IfromPGV84 = 1; } if (IfromPGV16 < 1) { IfromPGV16 = 1; } IfromPGV.expectedSI = IfromPGVmedian; IfromPGV.percentile84 = IfromPGV84; IfromPGV.percentile16 = IfromPGV16; return IfromPGV; }
private synchronized void createModel() { if (initialized) return; long st = System.currentTimeMillis(); // create a unique list of keywords Set<String> unique = new HashSet<String>(); for (Document d : docs) unique.addAll(d.keys()); words = new ArrayList<String>(unique); Collections.sort(words); // raw values termdoc = new float[words.size()][docs.size()]; for (int i = 0, I = words.size(); i < I; i++) { for (int j = 0, J = docs.size(); j < J; j++) { termdoc[i][j] = docs.get(j).count(words.get(i)); } } // calc average number of terms per doc int c = 0; for (int j = 0, J = docs.size(); j < J; j++) { for (int i = 0, I = words.size(); i < I; i++) { c += termdoc[i][j]; } } float avt = c / docs.size(); System.out.println("avt: " + avt); // adjust by log(n)/log(fd) for (int i = 0, I = words.size(); i < I; i++) { for (int j = 0, J = docs.size(); j < J; j++) { if (termdoc[i][j] != 0) termdoc[i][j] = (float) (Math.log10(termdoc[i][j]) / Math.log10(avt)); } } // calculate the euclidean distance (frobenius norms): (a^2+b^2)^1/2 edst = new float[docs.size()]; for (int j = 0, J = termdoc[0].length; j < J; j++) { for (int i = 0, I = termdoc.length; i < I; i++) { edst[j] += Math.pow(termdoc[i][j], 2); } } for (int i = 0, I = edst.length; i < I; i++) edst[i] = (float) Math.sqrt(edst[i]); // we are ready for searches initialized = true; System.out.println( "created Term Model of (" + words.size() + " x " + docs.size() + ") in " + (System.currentTimeMillis() - st) + " msec"); }
public static int firstNineDigitsOfNthFibonacci(int n) { double phi = (1.0 + Math.sqrt(5.0)) / 2.0; double log_phi = Math.log10(phi); double log_sqrt_5 = Math.log10(Math.sqrt(5.0)); double log_f = (double) n * log_phi - log_sqrt_5; return (int) Math.pow(10.0, 8.0 + log_f % 1.0); }
/** * DOCUMENT ME! * * @param u DOCUMENT ME! * @param v DOCUMENT ME! * @return DOCUMENT ME! */ @Override public final int rgbColor(final double u, final double v) { Complex z = Complex.valueOf(u, v); for (int i = 1; i < palette.getSize(); i++) { z = formula.calculate(z); double magn = z.magnitude(); // if ( magn > radius) { // return palette[i]; // } if (magn > radius) { float position = i - (float) (Math.log10(Math.log10(magn)) / Math.log(((IPolynomialFormula) formula).getOrder())); position /= (palette.getSize() - 1); return palette.getRGB(position); } } return RGB_BLACK; }
public void TestNBModel( Map<Integer, FilewithClassInfo> filesToTest, double[] priors, Map<Integer, Map<String, Double>> condProb) { // TODO Auto-generated method stub int percentageAccuracyCounter = 0; int percentageErrorCounter = 0; Helper h = new Helper(); for (int i = 0; i < filesToTest.size(); i++) { List<Double> vals = new ArrayList<Double>(); for (int j = 0; j < 2; j++) { Map<String, Double> probMap = condProb.get(j); double prob = Math.log10(priors[j]); for (String token : filesToTest.get(i).WordMap.keySet()) { if (probMap.containsKey(token)) prob += Math.log10(probMap.get(token)); } vals.add(prob); } if (filesToTest.get(i).ClassInfo == h.getMaxValIndex(vals)) percentageAccuracyCounter++; else percentageErrorCounter++; } System.out.println( "percentageAccuracyCounter" + percentageAccuracyCounter + "percentageErrorCounter" + percentageErrorCounter); System.out.println( "Percentage : " + (double) (percentageAccuracyCounter * 100) / filesToTest.size()); }
public static void main(String[] args) { double BILLION = 1000000000d; // nanoseconds to seconds double start = 0; // start time of the current run double elapsedTime = 0; // elapsed time of current run double prevTime = 0; // elapsed time of previous run double ratio = 1; // currentTime / prevTime double lgratio = 0; // log base 2 of ratio int N = 1; // problem size parameter int key = 23; // selects internal method of RunningTime // TimingLab four = new TimingLab(24); for (int x = 71; x >= 0; x--) { TimingLab tl = new TimingLab(x); System.out.println("Time complexity for Key: " + x); System.out.println(""); for (int i = N; i <= 1024; i = i * 2) { start = System.nanoTime(); tl.timeTrial(i); elapsedTime = (System.nanoTime() - start) / BILLION; System.out.print("This call to method TimingLab.timeTrial(" + i + ") took "); System.out.printf("%4.3f", elapsedTime); System.out.println(" seconds."); if (prevTime != 0) { ratio = elapsedTime / prevTime; System.out.println("R = " + ratio); System.out.println("K = " + (Math.log10(ratio) / Math.log10(2))); } prevTime = elapsedTime; } prevTime = 0; System.out.println(""); } }
public NGDItem( long term1count, long term2count, String term1, String term2, List<String[]> term1Array, List<String[]> term2Array, long combocount, boolean useAlias, long totalDocCount) { this.term1count = term1count; this.term2count = term2count; this.term1 = term1; this.term2 = term2; this.combocount = combocount; this.term1Array = term1Array; this.term2Array = term2Array; this.useAlias = useAlias; this.totalDocCount = totalDocCount; if (this.combocount == 0) { this.ngd = -1; } else { double term1_log = Math.log10(this.term1count); double term2_log = Math.log10(this.term2count); double combo_log = Math.log10(this.combocount); this.ngd = (Math.max(term1_log, term2_log) - combo_log) / (Math.log10(this.totalDocCount) - Math.min(term1_log, term2_log)); } }
private void buildSegmentTree() { if (nums.length == 0) return; final int height = (int) Math.ceil(Math.log10(nums.length) / Math.log10(2)); final int nodes = (int) (2 * Math.pow(2, height) - 1); segTree = new int[nodes]; buildSegmentTree(0, nums.length - 1, 0); }
public static String readableFileSize(long size) { if (size <= 0) return "0"; final String[] units = new String[] {"B", "KB", "MB", "GB", "TB"}; int digitGroups = (int) (Math.log10(size) / Math.log10(1024)); return new DecimalFormat("#,##0.#").format(size / Math.pow(1024, digitGroups)) + " " + units[digitGroups]; }
/** * method to calculate the enrichment score of the gene given the total number of genes * * @param numberOfGenes total number of genes that are considered during the scoring process * @return the enrichment score of the gene indicating if the score is better than expected by * random */ public double getEnrichmentScore(int numberOfGenes) { double enrichment = Math.log10(this.score) + Math.log10((double) numberOfGenes); if (Double.isInfinite(enrichment)) { return enrichment; } else { return (double) Math.round(enrichment * 1E3) / 1E3; } }
/** * @param filesize * @return B/KB/MB/GB format */ public static String formattedFileSize(long filesize) { if (filesize <= 0) return "0"; int digGroup = (int) (Math.log10(filesize) / Math.log10(1024)); return new DecimalFormat("#,##0.00").format(filesize / Math.pow(1024, digGroup)) + " " + sizeUnits[digGroup]; }
private String buildZarmonierName(int number) { StringBuilder builder = new StringBuilder(); int doubleWordCount = Math.round((float) (Math.log10(number) / Math.log10(2))) - 2; for (int i = 0; i < doubleWordCount; i++) builder.append("doppel"); builder.append("vier"); builder.setCharAt(0, Character.toUpperCase(builder.charAt(0))); return builder.toString(); }
public boolean isPowerOfTwo(int n) { double res = Math.log10(n) / Math.log10(2); if (res == (int) res) { return true; } else { return false; } }
/** * Converts a number of bytes to a human readable file size (eg 3.5 GiB). * * <p>Based on http://stackoverflow.com/a/5599842 */ public static String readableFileSize(Context context, long bytes) { final String[] units = context.getResources().getStringArray(R.array.file_size_units); if (bytes <= 0) return "0 " + units[0]; int digitGroups = (int) (Math.log10(bytes) / Math.log10(1024)); return new DecimalFormat("#,##0.#").format(bytes / Math.pow(1024, digitGroups)) + " " + units[digitGroups]; }
public void setVolume(int vol) { synchronized (this.lock) { if (this.player != null && this.started) { player.setVolume((float) Math.log10(vol), (float) Math.log10(vol)); lock.notifyAll(); } } }
/** gate score = smallest noise margin (distance in log(REU) of input REU to margin REU) */ public static void evaluateGateNoiseMargin(Gate g, Args options) { if (options.is_noise_margin() == false) { g.get_scores().set_noise_margin_contract(true); return; } if (g.Type == GateType.INPUT || g.Type == GateType.OUTPUT || g.Type == GateType.OUTPUT_OR) { return; } // "x" to value HashMap<String, Double> lowest_on_reu = GateUtil.getIncomingONlow(g); HashMap<String, Double> highest_off_reu = GateUtil.getIncomingOFFhigh(g); ArrayList<Double> all_margins = new ArrayList<Double>(); for (String var : highest_off_reu.keySet()) { if (g.get_variable_thresholds().get(var) != null) { // IL is the input-low threshold Double IL = g.get_variable_thresholds().get(var)[0]; // actual REU Double log_input_reu = Math.log10(highest_off_reu.get(var)); // NML is the margin/width between the actual REU and the threshold REU Double NML = Math.log10(IL) - log_input_reu; all_margins.add(NML); } } for (String var : lowest_on_reu.keySet()) { if (g.get_variable_thresholds().get(var) != null) { Double IH = g.get_variable_thresholds().get(var)[1]; Double NMH = Math.log10(lowest_on_reu.get(var)) - Math.log10(IH); all_margins.add(NMH); } } if (all_margins.isEmpty()) { g.get_scores().set_noise_margin(0.0); g.get_scores().set_noise_margin_contract(true); } else { Collections.sort(all_margins); g.get_scores().set_noise_margin(all_margins.get(0)); if (all_margins.get(0) < 0) { g.get_scores().set_noise_margin_contract(false); } else { g.get_scores().set_noise_margin_contract(true); } } }
private void solve() { int n = sc.nextInt(); long result = 0; for (int i = 0; i <= Math.log10(n) - 1; i++) { result += 9 * Math.pow(10, i) * (i + 1); } result += ((int) Math.log10(n) + 1) * (n - Math.pow(10, (int) Math.log10(n)) + 1); System.out.println(result); }
public static String format(long bytes) { if (bytes <= 0) { return "0"; //$NON-NLS-1$ } int digitGroups = (int) (Math.log10(bytes) / Math.log10(1024)); return new DecimalFormat("#,##0.#").format(bytes / Math.pow(1024, digitGroups)) + " " + UNITS[digitGroups]; // $NON-NLS-1$ //$NON-NLS-2$ }
public double get_logP_additive() { double logp = 0; if (A_t < 0) { logp = -1 * Math.log10(A_t_p_value_cu * 2); } else { logp = -1 * Math.log10((1 - A_t_p_value_cu) * 2); } return logp; }
public double get_logP_dominance() { double logp = 0; if (D_t < 0) { logp = -1 * Math.log10(D_t_p_value_cu * 2); } else { logp = -1 * Math.log10((1 - D_t_p_value_cu) * 2); } return logp; }
/** * Initializes this tree by splitting it until hashdepth is reached, or until an additional level * of splits would violate maxsize. * * <p>NB: Replaces all nodes in the tree. */ public void init() { // determine the depth to which we can safely split the tree byte sizedepth = (byte) (Math.log10(maxsize) / Math.log10(2)); byte depth = (byte) Math.min(sizedepth, hashdepth); Token mintoken = partitioner.getMinimumToken(); root = initHelper(mintoken, mintoken, (byte) 0, depth); size = (long) Math.pow(2, depth); }
private String getTweetPadding(int tweetIdx) { // copiado do Utils.java int MaxNTweets = 10000; StringBuilder strBuild = new StringBuilder(); int current = (int) Math.floor(Math.log10(tweetIdx)) + 1; int expected = (int) Math.floor(Math.log10(MaxNTweets)); if (tweetIdx == 0) current = 1; for (int i = 0; i < (expected - current); i++) strBuild.append(0); strBuild.append(tweetIdx); return strBuild.toString(); }
public boolean isPowerOfTwo(int n) { // math sol if (n <= 0) return false; if (n == 1) return true; double x = Math.log10(n) / Math.log10(2); int _x = (int) x; return x - _x == 0; }
private void finalizeLpgn(Score score, Score count, int N) { if (count == null || count.getValue() == 0) { double p = Math.pow(10.0, -score.getValue()); score.setValue(getLp(1.0 - Math.pow(1.0 - p, N))); return; } int n = (int) count.getValue(); double lpgn = -Math.log10(gamma(score.getValue(), n)); for (int i = n + 1; i <= N; i++) lpgn += -Math.log10(i); for (int i = 2; i <= N - n; i++) lpgn -= -Math.log10(i); score.setValue(lpgn > 300 ? 300 : lpgn); }
/** * Calculate fire load index: rating of the maximum effort required to contain all probable fires. * * @param timberSpreadIndex * @param buildupIndex * @return double */ private double calculateFireLoadIndex(double timberSpreadIndex, double buildupIndex) { double logFireLoadIndex = 0; double fireLoadIndex = -1; if ((timberSpreadIndex > 0.) && (buildupIndex > 0.)) { logFireLoadIndex = 1.75 * Math.log10(timberSpreadIndex) + .32 * Math.log10(buildupIndex) - 1.64; fireLoadIndex = Math.pow(10., logFireLoadIndex); } return fireLoadIndex; }