/* Function for calculating laplacePerTerm for individual query terms */
  public static double laplacePerTerm(
      String termFreq, double docLen, double avgLen, long vocabSize, int queryWords) {

    // System.out.println("Query Words:::");

    String[] termFs = termFreq.split(" ");

    // System.out.println("TermFs::: "+ termFreq);

    double log_p_lap = 0.0;

    /*
     * if(termFs.length <= queryWords){
     *
     * for(int j=0;j<(queryWords-termFs.length); j++){ log_p_lap +=
     * Math.log(((0 + 1) / (docLen + vocabSize)); } log_p_lap+= (Math.log((0
     * + 1) / (docLen + vocabSize))) * (queryWords - termFs.length);
     *
     * }
     */
    if (termFs.length < queryWords) {
      log_p_lap += (Math.log((0 + 1) / (docLen + vocabSize))) * (queryWords - termFs.length);
    }

    for (int i = 0; i < termFs.length; i++) {

      log_p_lap += Math.log(((Integer.parseInt(termFs[i])) + 1) / (docLen + vocabSize));
    }

    return log_p_lap;
  }
Ejemplo n.º 2
0
 public static String humanReadableByteCount(long bytes, boolean si) {
   int unit = si ? 1000 : 1024;
   if (bytes < unit) return bytes + " B";
   int exp = (int) (Math.log(bytes) / Math.log(unit));
   String pre = (si ? "kMGTPE" : "KMGTPE").charAt(exp - 1) + (si ? "" : "i");
   return String.format("%.1f %sB", bytes / Math.pow(unit, exp), pre);
 }
Ejemplo n.º 3
0
    public Double eval(Double lmbda) {
      double c1 = 0.0;
      double c3 = -1.0 / (double) Math.sqrt(2.0 * variance);
      double sum = 0.0;

      double cpart = -0.5 * Math.log(Math.sqrt(2.0 * Math.PI * variance));

      for (int i = 0; i < data.size(); i++) {
        for (int k = 0; k < channels.length; k++) {
          c1 += cpart;
          double pi = 0.0;

          for (Integer t : transcripts.keySet()) {
            if (transcripts.get(t).contains(i)) {
              double dit = delta(i, t);
              double gammai = gammas[t][k];
              double pit = gammai * Math.exp(-lmbda * dit);
              pi += pit;
            }
          }

          double zi = Math.log(pi);
          double err = data.values(i)[channels[k]] - zi;

          sum += (err * err);
        }
      }

      return c1 + c3 * sum;
    }
Ejemplo n.º 4
0
    public Double eval(Double gamma) {
      double c1 = 0.0;
      double c3 = -1.0 / (double) Math.sqrt(2.0 * variance);
      double sum = 0.0;

      double cpart = -0.5 * Math.log(Math.sqrt(2.0 * Math.PI * variance));

      for (Integer i : transcripts.get(gammat)) {
        c1 += cpart;
        double pi = 0.0;

        for (int t = 0; t < fiveprime.length; t++) {
          if (transcripts.get(t).contains(i)) {
            double gammai = gammat == t ? gamma : gammas[t][gammak];
            double dit = delta(i, t);

            double pit = gammai * Math.exp(-lambda * dit);
            pi += pit;
          }
        }

        double zi = Math.log(pi);
        double err = data.values(i)[channels[gammak]] - zi;

        sum += (err * err);
      }

      return c1 + c3 * sum;
    }
Ejemplo n.º 5
0
  private static JointClassification jointJointClassify(ssd.Document doc) {

    double[][] classprobs = new double[PartsOfSpeech.length][CATEGORIES.length];
    double[] jointprobs = new double[CATEGORIES.length];
    double[] ranks = new double[CATEGORIES.length];
    double[] classif_variances = new double[PartsOfSpeech.length];

    for (double d : jointprobs) d = 0;

    for (int i = 0; i < CATEGORIES.length; i++) ranks[i] = 0;

    JointClassification jc = new JointClassification(CATEGORIES, jointprobs);

    for (int i = 0; i < PartsOfSpeech.length; i++) {
      String pos = PartsOfSpeech[i];
      if (doc.mPosTexts.get(pos) != null) {
        JointClassification jc1 = compiledClassifiers.get(pos).classify(doc.mPosTexts.get(pos));
        for (int j = 0; j < CATEGORIES.length; j++)
          classprobs[i][j] = getCategoryConditionalProbability(CATEGORIES[j], jc1);
      }
    }

    for (int i = 0; i < classif_variances.length; i++) {
      classif_variances[i] =
          variance(classprobs[i]); // get the variance of each classifier into classif_variances
    }

    double average_variance = 0, max_classif_variance = -1, sum_variance = 0;
    for (double d : classif_variances) {
      sum_variance += d;
      if (d > max_classif_variance) max_classif_variance = d;
    }

    average_variance = sum_variance / classif_variances.length;
    // say we cheat and let the average be the same as the 'all' partofspeech
    for (int i = 0; i < PartsOfSpeech.length; i++)
      if (PartsOfSpeech[i].equals("all")) average_variance = classif_variances[i];
    // average_variance=max_classif_variance;
    /*switch this all back, write the paper, finish the presentation. WRAP THE F**K UP */
    /* No! this method achieves 67% accuracy on 20news! */

    for (int i = 0; i < CATEGORIES.length; i++)
      for (int j = 0; j < classif_variances.length; j++)
        ranks[i] += classif_variances[j] * classprobs[j][i] / sum_variance;

    double ranksum = 0;
    for (double r : ranks) ranksum += r;

    for (int i = 0; i < CATEGORIES.length; i++) ranks[i] = ranks[i] / ranksum;

    for (int i = 0; i < CATEGORIES.length; i++) ranks[i] = Math.log(ranks[i]) / Math.log(2);

    ranks = sort_with_categories(ranks);
    // the new transformed matrix is a matrix of log2 of probabilities of belonging calculated from
    // ranks
    jc = new JointClassification(CATEGORIES, ranks);

    return jc;
  }
Ejemplo n.º 6
0
  public static double getEntropy(double[] P) {
    double entropy = 0;
    for (double p : P) if (1 / (p * Math.log(1 / p)) != 0) entropy += -p * Math.log(p);
    double psum = 0;
    for (double p : P) psum += p;
    System.err.println("Sum of probs: " + psum + "[ ");
    for (double p : P) System.err.printf("%1.2f,", p);
    System.err.println("Entropy: " + entropy);

    return entropy;
  }
Ejemplo n.º 7
0
  // Query based on term
  public static HashMap<String, Double> queryTF(
      Client client,
      String qb,
      String index,
      String type,
      HashMap<String, Double> result,
      HashMap<String, Double> lengthmap,
      HashMap<String, Double> staticmap)
      throws IOException, JSONException {
    HashMap parammap = new HashMap();
    parammap.put("field", "text");
    parammap.put("term", qb);
    SearchResponse responsesearch =
        client
            .prepareSearch(index)
            .setQuery(
                new FunctionScoreQueryBuilder(QueryBuilders.matchQuery("text", qb))
                    .boostMode("replace")
                    .add(ScoreFunctionBuilders.scriptFunction("getTF").params(parammap)))
            .setSize(100000)
            .setNoFields()
            .execute()
            .actionGet();
    JSONObject obj = new JSONObject(responsesearch);
    JSONObject obj2 = obj.getJSONObject("hits");
    JSONArray hits = obj2.getJSONArray("hits");
    Iterator itrnext = staticmap.keySet().iterator();
    while (itrnext.hasNext()) {
      String id = itrnext.next().toString();
      double oldtf = staticmap.get(id);
      double v = 178081;
      double len = lengthmap.get(id);
      double newtf = 1 / (len + v);
      double lap = Math.log(newtf);
      staticmap.put(id, oldtf + lap);
    }

    for (int i = 0; i < hits.length(); i++) {
      JSONObject newobj = hits.getJSONObject(i);
      String id = newobj.getString("id");
      Double tf = newobj.getDouble("score");
      double len = lengthmap.get(id);
      double v = 178081;
      double lap = (tf + 1) / (len + v);
      double inc = 1 / (len + v);
      lap = Math.log(lap);
      inc = Math.log(inc);
      if (staticmap.containsKey(id)) {
        double oldtf = staticmap.get(id);
        staticmap.put(id, oldtf + lap - inc);
      } else staticmap.put(id, lap);
    }
    return staticmap;
  }
Ejemplo n.º 8
0
  public TopicScores getTokenDocumentDiscrepancies() {
    TopicScores scores = new TopicScores("token-doc-diff", numTopics, numTopWords);
    scores.wordScoresDefined = true;

    for (int topic = 0; topic < numTopics; topic++) {
      int[][] matrix = topicCodocumentMatrices[topic];
      TreeSet<IDSorter> sortedWords = topicSortedWords.get(topic);

      double topicScore = 0.0;

      double[] wordDistribution = new double[numTopWords];
      double[] docDistribution = new double[numTopWords];

      double wordSum = 0.0;
      double docSum = 0.0;

      int position = 0;
      Iterator<IDSorter> iterator = sortedWords.iterator();
      while (iterator.hasNext() && position < numTopWords) {
        IDSorter info = iterator.next();

        wordDistribution[position] = info.getWeight();
        docDistribution[position] = matrix[position][position];

        wordSum += wordDistribution[position];
        docSum += docDistribution[position];

        position++;
      }

      for (position = 0; position < numTopWords; position++) {
        double p = wordDistribution[position] / wordSum;
        double q = docDistribution[position] / docSum;
        double meanProb = 0.5 * (p + q);

        double score = 0.0;
        if (p > 0) {
          score += 0.5 * p * Math.log(p / meanProb);
        }
        if (q > 0) {
          score += 0.5 * q * Math.log(q / meanProb);
        }

        scores.setTopicWordScore(topic, position, score);
        topicScore += score;
      }

      scores.setTopicScore(topic, topicScore);
    }

    return scores;
  }
Ejemplo n.º 9
0
  // Entropy(S) = - sum(1..n) [ p(i)*log(pi)}
  private double computeEntropy() {
    double sum = 0.0;
    int numTags = myTags.size();

    for (int i = 0; i < numTags; i++) {
      String tag = (String) myTags.elementAt(i);
      if (!tag.equals(boundaryTag)) {
        double probTag = getUnigramProb(tag);
        double logProbTag = Math.log(probTag) / Math.log(2.0); // Math.log is natural log, base e
        sum += probTag * logProbTag;
      }
    }
    return -sum;
  }
Ejemplo n.º 10
0
 public static double betainv(double x, double p, double q) {
   // ALGORITHM AS 63 APPL. STATIST. VOL.32, NO.1
   // Computes P(Beta>x)
   double beta = Maths.logBeta(p, q), acu = 1E-14;
   double cx, psq, pp, qq, x2, term, ai, betain, ns, rx, temp;
   boolean indx;
   if (p <= 0 || q <= 0) return (-1.0);
   if (x <= 0 || x >= 1) return (-1.0);
   psq = p + q;
   cx = 1 - x;
   if (p < psq * x) {
     x2 = cx;
     cx = x;
     pp = q;
     qq = p;
     indx = true;
   } else {
     x2 = x;
     pp = p;
     qq = q;
     indx = false;
   }
   term = 1;
   ai = 1;
   betain = 1;
   ns = qq + cx * psq;
   rx = x2 / cx;
   temp = qq - ai;
   if (ns == 0) rx = x2;
   while (temp > acu && temp > acu * betain) {
     term = term * temp * rx / (pp + ai);
     betain = betain + term;
     temp = Math.abs(term);
     if (temp > acu && temp > acu * betain) {
       ai++;
       ns--;
       if (ns >= 0) {
         temp = qq - ai;
         if (ns == 0) rx = x2;
       } else {
         temp = psq;
         psq += 1;
       }
     }
   }
   betain *= Math.exp(pp * Math.log(x2) + (qq - 1) * Math.log(cx) - beta) / pp;
   if (indx) betain = 1 - betain;
   return (betain);
 }
Ejemplo n.º 11
0
  public double findEntropy(Collection<String[]> data) {
    double p_good = ((double) count(true, data)) / data.size();
    double p_bad = ((double) count(false, data)) / data.size();

    double entropy;

    if (p_good > 0 && p_bad > 0)
      entropy = -1.0 / Math.log(2) * (p_good * Math.log(p_good) + p_bad * Math.log(p_bad));
    else entropy = 0;

    // System.out.println("p_good (" + p_good + ")  p_bad (" + p_bad +")" + "  entropy (" + entropy
    // + ")");

    return entropy;
  }
Ejemplo n.º 12
0
    public Double eval(Double lmbda) {
      double c = 1.0 / (double) Math.sqrt(variance);
      double sum = 0.0;

      for (int i = 0; i < data.size(); i++) {
        for (int k = 0; k < channels.length; k++) {
          double pi = 0.0;
          double pdi = 0.0;

          for (Integer t : transcripts.keySet()) {
            if (transcripts.get(t).contains(i)) {
              double gammai = gammas[t][k];
              double dit = delta(i, t);

              double falloff = Math.exp(-lmbda * dit);
              double pit = gammai * falloff;
              double pdit = pit * dit;

              pi += pit;
              pdi += pdit;
            }
          }

          double zi = Math.log(pi);
          double err = data.values(i)[channels[k]] - zi;
          double ratio = pdi / pi;
          double termi = (err * ratio);

          sum += termi;
        }
      }

      return c * sum;
    }
Ejemplo n.º 13
0
    public Double eval(Double gamma) {
      double c = 1.0 / (double) Math.sqrt(variance);
      double sum = 0.0;
      for (Integer i : transcripts.get(gammat)) {
        double pi = 0.0;

        for (int t = 0; t < fiveprime.length; t++) {
          if (transcripts.get(t).contains(i)) {
            double gammai = gammat == t ? gamma : gammas[t][gammak];
            double dit = delta(i, t);

            double pit = gammai * Math.exp(-lambda * dit);
            pi += pit;
          }
        }

        double zi = Math.log(pi);
        double err = data.values(i)[channels[gammak]] - zi;
        double ratio = (Math.exp(-lambda * delta(i, gammat))) / pi;
        double termi = (err * ratio);

        sum += termi;
      }

      return c * sum;
    }
Ejemplo n.º 14
0
  public double error() {
    double sum = 0.0;

    for (int i = 0; i < data.size(); i++) {
      for (int k = 0; k < channels.length; k++) {
        double pi = 0.0;

        for (Integer t : transcripts.keySet()) {
          if (transcripts.get(t).contains(i)) {
            double gammai = gammas[t][k];
            double dit = delta(i, t);

            double pit = gammai * Math.exp(-lambda * dit);
            pi += pit;
          }
        }

        double zi = Math.log(pi);
        double err = Math.abs(data.values(i)[channels[k]] - zi);

        sum += err;
      }
    }

    return sum;
  }
Ejemplo n.º 15
0
 /**
  * Apply this operator (function) to the supplied argument
  *
  * @param value the argument
  * @return the result
  */
 protected double applyFunction(double value) {
   switch (m_operator) {
     case 'l':
       return Math.log(value);
     case 'b':
       return Math.abs(value);
     case 'c':
       return Math.cos(value);
     case 'e':
       return Math.exp(value);
     case 's':
       return Math.sqrt(value);
     case 'f':
       return Math.floor(value);
     case 'h':
       return Math.ceil(value);
     case 'r':
       return Math.rint(value);
     case 't':
       return Math.tan(value);
     case 'n':
       return Math.sin(value);
   }
   return Double.NaN;
 }
Ejemplo n.º 16
0
  /**
   * @param placeNameServiceSet the set of PlaceNameService objects that PlaceNameLayer will render.
   * @throws IllegalArgumentException if {@link
   *     gov.nasa.worldwind.layers.placename.PlaceNameServiceSet} is null
   */
  public PlaceNameLayer(PlaceNameServiceSet placeNameServiceSet) {
    if (placeNameServiceSet == null) {
      String message = Logging.getMessage("nullValue.PlaceNameServiceSetIsNull");
      Logging.logger().fine(message);
      throw new IllegalArgumentException(message);
    }

    //
    this.placeNameServiceSet = placeNameServiceSet.deepCopy();
    for (int i = 0; i < this.placeNameServiceSet.getServiceCount(); i++) {
      // todo do this for long as well and pick min
      int calc1 =
          (int)
              (PlaceNameService.TILING_SECTOR.getDeltaLatDegrees()
                  / this.placeNameServiceSet
                      .getService(i)
                      .getTileDelta()
                      .getLatitude()
                      .getDegrees());
      int numLevels = (int) Math.log(calc1);
      navTiles.add(
          new NavigationTile(
              this.placeNameServiceSet.getService(i),
              PlaceNameService.TILING_SECTOR,
              numLevels,
              "top"));
    }

    if (!WorldWind.getMemoryCacheSet().containsCache(Tile.class.getName())) {
      long size = Configuration.getLongValue(AVKey.PLACENAME_LAYER_CACHE_SIZE, 2000000L);
      MemoryCache cache = new BasicMemoryCache((long) (0.85 * size), size);
      cache.setName("Placename Tiles");
      WorldWind.getMemoryCacheSet().addCache(Tile.class.getName(), cache);
    }
  }
Ejemplo n.º 17
0
  public TopicScores getCoherence() {
    TopicScores scores = new TopicScores("coherence", numTopics, numTopWords);
    scores.wordScoresDefined = true;

    for (int topic = 0; topic < numTopics; topic++) {
      int[][] matrix = topicCodocumentMatrices[topic];

      double topicScore = 0.0;

      for (int row = 0; row < numTopWords; row++) {
        double rowScore = 0.0;
        double minScore = 0.0;
        for (int col = 0; col < row; col++) {
          double score =
              Math.log((matrix[row][col] + model.beta) / (matrix[col][col] + model.beta));
          rowScore += score;
          if (score < minScore) {
            minScore = score;
          }
        }
        topicScore += rowScore;
        scores.setTopicWordScore(topic, row, minScore);
      }

      scores.setTopicScore(topic, topicScore);
    }

    return scores;
  }
Ejemplo n.º 18
0
  @Override
  public void trainMostSimilar(List<EnsembleSim> simList) {
    if (simList.isEmpty()) {
      throw new IllegalStateException("no examples to train on!");
    }
    mostSimilarInterpolator.trainMostSimilar(simList);

    // Remove things that have no observed metrics
    List<EnsembleSim> pruned = new ArrayList<EnsembleSim>();
    for (EnsembleSim es : simList) {
      if (es != null && es.getNumMetricsWithScore() > 0) {
        pruned.add(es);
      }
    }

    double[][] X = new double[pruned.size()][numMetrics * 2];
    double[] Y = new double[pruned.size()];
    for (int i = 0; i < pruned.size(); i++) {
      Y[i] = pruned.get(i).knownSim.similarity;
      EnsembleSim es = mostSimilarInterpolator.interpolate(pruned.get(i));
      for (int j = 0; j < numMetrics; j++) {
        X[i][2 * j] = es.getScores().get(j);
        X[i][2 * j + 1] = Math.log(es.getRanks().get(j) + 1);
      }
    }

    OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression();
    regression.newSampleData(Y, X);

    mostSimilarCoefficients = new TDoubleArrayList(regression.estimateRegressionParameters());
    double pearson = Math.sqrt(regression.calculateRSquared());
    LOG.info("coefficients are " + mostSimilarCoefficients.toString());
    LOG.info("pearson for multiple regression is " + pearson);
  }
Ejemplo n.º 19
0
  public TopicScores getDistanceFromUniform() {
    int[] tokensPerTopic = model.tokensPerTopic;

    TopicScores scores = new TopicScores("uniform_dist", numTopics, numTopWords);
    scores.wordScoresDefined = true;

    int numTypes = alphabet.size();

    for (int topic = 0; topic < numTopics; topic++) {

      double topicScore = 0.0;
      int position = 0;
      TreeSet<IDSorter> sortedWords = topicSortedWords.get(topic);

      for (IDSorter info : sortedWords) {
        int type = info.getID();
        double count = info.getWeight();

        double score =
            (count / tokensPerTopic[topic]) * Math.log((count * numTypes) / tokensPerTopic[topic]);

        if (position < numTopWords) {
          scores.setTopicWordScore(topic, position, score);
        }

        topicScore += score;
        position++;
      }

      scores.setTopicScore(topic, topicScore);
    }

    return scores;
  }
Ejemplo n.º 20
0
  public double getChi2(Graph g) {
    double chi2 = Double.MAX_VALUE;

    for (int i = 0; i < components.length; i++) {
      chi2 = Math.min(chi2, -2 * Math.log(weights[i]) + components[i].getChi2(g));
    }

    return chi2;
  }
Ejemplo n.º 21
0
 public static Complex log(double x_re, double x_im) {
   double h;
   /* #ifdef JAVA5 */
   // h = Math.hypot(x_re, x_im);
   /* #else */
   h = DComplex.hypot(x_re, x_im);
   /* #endif */
   return make(Math.log(h), Math.atan2(x_im, x_re));
 }
Ejemplo n.º 22
0
 /*Generates a random exponential distribution random time difference vs. timeIn based on the frequency lambda */
 private double nextRandomTime(double lambda) {
   double timeLag = 0;
   /*System.out.println("I am computing nextRandomTime");
   System.out.println("When I am getting timeIn of " +timeIn+" and lambda of "+lambda);*/
   double random = Math.random();
   timeLag = -Math.log(random) / (lambda / (LENGTHOFADAY * 1000));
   /*System.out.println("I am returning a time difference of "+timeLag);*/
   return timeLag;
 }
Ejemplo n.º 23
0
  public boolean play() {

    try {
      if (playState != STOPPED) playStop();

      if (audioBytes == null) return false;

      DataLine.Info info = new DataLine.Info(Clip.class, format);

      clip = (Clip) AudioSystem.getLine(info);
      clip.addLineListener(new ClipListener());

      long clipStart = (long) (audioBytes.length * getStartTime() / (getDuration() * 1000.0));
      long clipEnd = (long) (audioBytes.length * getEndTime() / (getDuration() * 1000.0));
      if ((clipEnd - clipStart) > MAX_CLIP_LENGTH) clipEnd = clipStart + MAX_CLIP_LENGTH;
      byte[] clipBytes = new byte[(int) (clipEnd - clipStart)];
      System.arraycopy(audioBytes, (int) clipStart, clipBytes, 0, clipBytes.length);
      clip.open(format, clipBytes, 0, clipBytes.length);

      FloatControl panControl = (FloatControl) clip.getControl(FloatControl.Type.PAN);

      panControl.setValue((float) panSetting / 100.0f);

      double value = (double) gainSetting;

      FloatControl gainControl = (FloatControl) clip.getControl(FloatControl.Type.MASTER_GAIN);
      float dB = (float) (Math.log(value == 0.0 ? 0.0001 : value) / Math.log(10.0) * 20.0);
      gainControl.setValue(dB);
      double playStartTime = (player.getSeekTime() / 100) * (playGetLength());
      clip.setMicrosecondPosition((long) playStartTime);

      clip.start();

      playState = PLAYING;

      return true;

    } catch (Exception ex) {
      ex.printStackTrace();
      playState = STOPPED;
      clip = null;
      return false;
    }
  }
Ejemplo n.º 24
0
  /**
   * Convert data to probability co-occurrences (aka calculating the kernel)
   *
   * @param d the data to convert
   * @param u the perplexity of the model
   * @return the probabilities of co-occurrence
   */
  public INDArray computeGaussianPerplexity(final INDArray d, double u) {
    int n = d.rows();
    final INDArray p = zeros(n, n);
    final INDArray beta = ones(n, 1);
    final double logU = Math.log(u);

    log.info("Calculating probabilities of data similarities..");
    for (int i = 0; i < n; i++) {
      if (i % 500 == 0 && i > 0) log.info("Handled " + i + " records");

      double betaMin = Double.NEGATIVE_INFINITY;
      double betaMax = Double.POSITIVE_INFINITY;
      int[] vals = Ints.concat(ArrayUtil.range(0, i), ArrayUtil.range(i + 1, d.columns()));
      INDArrayIndex[] range = new INDArrayIndex[] {new NDArrayIndex(vals)};

      INDArray row = d.slice(i).get(range);
      Pair<INDArray, INDArray> pair = hBeta(row, beta.getDouble(i));
      INDArray hDiff = pair.getFirst().sub(logU);
      int tries = 0;

      // while hdiff > tolerance
      while (BooleanIndexing.and(abs(hDiff), Conditions.greaterThan(tolerance)) && tries < 50) {
        // if hdiff > 0
        if (BooleanIndexing.and(hDiff, Conditions.greaterThan(0))) {
          if (Double.isInfinite(betaMax)) beta.putScalar(i, beta.getDouble(i) * 2.0);
          else beta.putScalar(i, (beta.getDouble(i) + betaMax) / 2.0);
          betaMin = beta.getDouble(i);
        } else {
          if (Double.isInfinite(betaMin)) beta.putScalar(i, beta.getDouble(i) / 2.0);
          else beta.putScalar(i, (beta.getDouble(i) + betaMin) / 2.0);
          betaMax = beta.getDouble(i);
        }

        pair = hBeta(row, beta.getDouble(i));
        hDiff = pair.getFirst().subi(logU);
        tries++;
      }

      p.slice(i).put(range, pair.getSecond());
    }

    // dont need data in memory after
    log.info("Mean value of sigma " + sqrt(beta.rdiv(1)).mean(Integer.MAX_VALUE));
    BooleanIndexing.applyWhere(p, Conditions.isNan(), new Value(realMin));

    // set 0 along the diagonal
    INDArray permute = p.transpose();

    INDArray pOut = p.add(permute);

    pOut.divi(pOut.sum(Integer.MAX_VALUE));
    BooleanIndexing.applyWhere(
        pOut, Conditions.lessThan(Nd4j.EPS_THRESHOLD), new Value(Nd4j.EPS_THRESHOLD));
    // ensure no nans
    return pOut;
  }
Ejemplo n.º 25
0
  /*
   * Compute entropy value for an array of bytes.
   *
   * The returned number represents entropy per bit!
   * For good long number seed (8bytes seed) it should be in range <2.75,3> (higher is better)
   *
   * For large set of bytes (>100) it should be almost 8 (means almost 8 random bits per byte).
   */
  public static float entropy(byte[] f) {
    int counts[] = new int[256];
    float entropy = 0;
    float total = f.length;

    for (byte b : f) counts[b+128]++;
    for (int c : counts) {
      if (c == 0) continue;
      float p = c / total;

      /* Compute entropy per bit in byte.
       *
       * To compute entropy per byte compute log with base 256 = log(p)/log(256).
       */
      entropy -= p * Math.log(p)/Math.log(2);
    }

    return entropy;
  }
Ejemplo n.º 26
0
  public TopicScores getDocumentEntropy(int[] tokensPerTopic) {
    TopicScores scores = new TopicScores("document_entropy", numTopics, numTopWords);

    for (int topic = 0; topic < numTopics; topic++) {
      scores.setTopicScore(
          topic,
          -sumCountTimesLogCount[topic] / tokensPerTopic[topic] + Math.log(tokensPerTopic[topic]));
    }

    return scores;
  }
Ejemplo n.º 27
0
 private static void playWav(String name, boolean loop, double volume)
     throws FileNotFoundException, IOException, UnsupportedAudioFileException,
         LineUnavailableException {
   AudioInputStream ais = AudioSystem.getAudioInputStream(new File(path + name));
   Clip clip = AudioSystem.getClip();
   clip.open(ais);
   if (loop) {
     clip.loop(-1);
   }
   ((FloatControl) clip.getControl(FloatControl.Type.MASTER_GAIN))
       .setValue((float) (Math.log(volume) / Math.log(10.) * 20.));
   clip.start();
   wavMap.put(name, clip);
   //        // open the sound file as a Java input stream
   //        InputStream in = new FileInputStream(path + name);
   //        // create an audiostream from the inputstream
   //        AudioStream audioStream = new AudioStream(in);
   //        // play the audio clip with the audioplayer class
   //        AudioPlayer.player.start(audioStream);
   //        wavMap.put(name, audioStream);
 }
Ejemplo n.º 28
0
 public double pchisq(double q, double df) {
   // Posten, H. (1989) American Statistician 43 p. 261-265
   double df2 = df * .5;
   double q2 = q * .5;
   int n = 5, k;
   double tk, CFL, CFU, prob;
   if (q <= 0 || df <= 0)
     throw new IllegalArgumentException("Illegal argument " + q + " or " + df + " for qnorm(p).");
   if (q < df) {
     tk = q2 * (1 - n - df2) / (df2 + 2 * n - 1 + n * q2 / (df2 + 2 * n));
     for (k = n - 1; k > 1; k--)
       tk = q2 * (1 - k - df2) / (df2 + 2 * k - 1 + k * q2 / (df2 + 2 * k + tk));
     CFL = 1 - q2 / (df2 + 1 + q2 / (df2 + 2 + tk));
     prob = Math.exp(df2 * Math.log(q2) - q2 - Maths.logGamma(df2 + 1) - Math.log(CFL));
   } else {
     tk = (n - df2) / (q2 + n);
     for (k = n - 1; k > 1; k--) tk = (k - df2) / (q2 + k / (1 + tk));
     CFU = 1 + (1 - df2) / (q2 + 1 / (1 + tk));
     prob = 1 - Math.exp((df2 - 1) * Math.log(q2) - q2 - Maths.logGamma(df2) - Math.log(CFU));
   }
   return prob;
 }
Ejemplo n.º 29
0
 public static DComplex power(double x_re, double x_im, double y_re, double y_im) {
   double h;
   /* #ifdef JAVA5 */
   // h = Math.hypot(x_re, x_im);
   /* #else */
   h = DComplex.hypot(x_re, x_im);
   /* #endif */
   double logr = Math.log(h);
   double t = Math.atan2(x_im, x_re);
   double r = Math.exp(logr * y_re - y_im * t);
   t = y_im * logr + y_re * t;
   return Complex.polar(r, t);
 }
  private Map<String, Double> getExpressionPvals(
      Set<String> targets, boolean[] set1, boolean[] set2) {
    Map<String, Double> map = new HashMap<String, Double>();

    Histogram2D h = new Histogram2D(0.2);

    System.out.println("targets = " + targets.size());
    Progress prg = new Progress(targets.size());
    for (String sym : targets) {
      prg.tick();

      if (expMan.getNonZeroRatio(sym) == 0) continue;
      double pval = calcDiffPval(sym, set1, set2, true);

      if (Double.isNaN(pval)) continue;
      if (pval == 0) pval = 1E-11;

      double pPerm = calcDiffPval(sym, set1, set2, false);
      if (pPerm == 0) pPerm = 1E-5;

      h.count(-Math.log(pval), -Math.log(pPerm));

      // pval = 0 is not real and it is not compatible with fisher's combined probability.
      // below is a better approximation.
      //			if (pval == 0)
      //			{
      //				pval = 1E-11;
      //			}

      map.put(sym, pval);
    }

    Histogram2DPlot p = new Histogram2DPlot(h);
    p.setLines(Arrays.asList(new double[] {1, 0}));
    p.setVisible(true);

    return map;
  }