/** * Copies source to dest. * * <p>Neither source nor dest can be null. * * @param source Mean to copy * @param dest Mean to copy to * @throws NullArgumentException if either source or dest is null */ public static void copy(Mean source, Mean dest) throws NullArgumentException { MathUtils.checkNotNull(source); MathUtils.checkNotNull(dest); dest.setData(source.getDataRef()); dest.incMoment = source.incMoment; dest.moment = source.moment.copy(); }
@Test public void testResults() throws InterruptedException { TaskUtil.run(d_model.getActivityTask()); assertTrue(d_model.isReady()); final InconsistencyParameter w = new InconsistencyParameter(Arrays.asList(d_ta, d_tb, d_tc, d_ta)); assertEquals(Collections.singletonList(w), d_model.getInconsistencyFactors()); double[] dAB = ResultsUtil.getSamples(d_model.getResults(), d_model.getRelativeEffect(d_ta, d_tb), 3); double[] dBC = ResultsUtil.getSamples(d_model.getResults(), d_model.getRelativeEffect(d_tb, d_tc), 3); double[] wABC = ResultsUtil.getSamples(d_model.getResults(), w, 3); // Values below obtained via a run through regular JAGS with 30k/20k // iterations. Taking .15 sd as acceptable margin (same as JAGS does // for testing against WinBUGS results). double mAB = 0.4713884; double sAB = 0.4838365; assertEquals(mAB, d_mean.evaluate(dAB), FACTOR * sAB); assertEquals(sAB, d_stdDev.evaluate(dAB), FACTOR * sAB); double mBC = -0.4645146; double sBC = 0.6111192; assertEquals(mBC, d_mean.evaluate(dBC), FACTOR * sBC); assertEquals(sBC, d_stdDev.evaluate(dBC), FACTOR * sBC); double mABC = -0.1466253; double sABC = 0.4568596; assertEquals(mABC, d_mean.evaluate(wABC), FACTOR * sABC); assertEquals(sABC, d_stdDev.evaluate(wABC), FACTOR * sABC); double[] dBA = ResultsUtil.getSamples(d_model.getResults(), d_model.getRelativeEffect(d_tb, d_ta), 3); assertEquals(-mAB, d_mean.evaluate(dBA), FACTOR * sAB); assertEquals(sAB, d_stdDev.evaluate(dBA), FACTOR * sAB); double[] dAC = ResultsUtil.getSamples(d_model.getResults(), d_model.getRelativeEffect(d_ta, d_tc), 3); double mAC = 0.1534991; double sAC = 0.5514409; assertEquals(mAC, d_mean.evaluate(dAC), FACTOR * sAC); assertEquals(sAC, d_stdDev.evaluate(dAC), FACTOR * sAC); }
private void setFactor() { double[] array = new double[mappingReads.size()]; int i = 0; for (Long l : mappingReads.values()) { array[i++] = l; } double meanReads = mean.evaluate(array); for (Map.Entry<String, Long> entry : mappingReads.entrySet()) { factor.put(entry.getKey(), meanReads / entry.getValue()); } }
/** * Computes the covariance between the two arrays. * * <p>Array lengths must match and the common length must be at least 2. * * @param xArray first data array * @param yArray second data array * @param biasCorrected if true, returned value will be bias-corrected * @return returns the covariance for the two arrays * @throws MathIllegalArgumentException if the arrays lengths do not match or there is * insufficient data */ public double covariance(final double[] xArray, final double[] yArray, boolean biasCorrected) throws MathIllegalArgumentException { Mean mean = new Mean(); double result = 0d; int length = xArray.length; if (length != yArray.length) { throw new MathIllegalArgumentException( LocalizedFormats.DIMENSIONS_MISMATCH_SIMPLE, length, yArray.length); } else if (length < 2) { throw new MathIllegalArgumentException( LocalizedFormats.INSUFFICIENT_OBSERVED_POINTS_IN_SAMPLE, length, 2); } else { double xMean = mean.evaluate(xArray); double yMean = mean.evaluate(yArray); for (int i = 0; i < length; i++) { double xDev = xArray[i] - xMean; double yDev = yArray[i] - yMean; result += (xDev * yDev - result) / (i + 1); } } return biasCorrected ? result * ((double) length / (double) (length - 1)) : result; }
private void addControlSamples(String result, Sample sample) { List<Double> list = new LinkedList<>(); for (String s : controlSamples) { for (Map.Entry<String, Map<String, Sample>> entry : samples.entrySet()) { list.add(entry.getValue().get(s).getNorm1b()); } } if (!list.isEmpty()) { double meanVal = mean.evaluate(toDoubleArray(list)); StringBuilder builder = new StringBuilder(result); builder .append("\t") .append( String.format( "%.3f%n", meanVal == 0 ? sample.getNorm1b() / meanVal / log.value(2) : 0)); } }
public static Double mean(double[] values) { Mean meanCalculator = new Mean(); return meanCalculator.evaluate(values); }
@Test public void testLSH() { RandomGenerator random = RandomManager.getRandom(); Mean avgPercentTopRecsConsidered = new Mean(); Mean avgNDCG = new Mean(); Mean avgPercentAllItemsConsidered = new Mean(); for (int iteration = 0; iteration < ITERATIONS; iteration++) { LongObjectMap<float[]> Y = new LongObjectMap<float[]>(); for (int i = 0; i < NUM_ITEMS; i++) { Y.put(i, RandomUtils.randomUnitVector(NUM_FEATURES, random)); } float[] userVec = RandomUtils.randomUnitVector(NUM_FEATURES, random); double[] results = doTestRandomVecs(Y, userVec); double percentTopRecsConsidered = results[0]; double ndcg = results[1]; double percentAllItemsConsidered = results[2]; log.info( "Considered {}% of all candidates, {} nDCG, got {}% recommendations correct", 100 * percentAllItemsConsidered, ndcg, 100 * percentTopRecsConsidered); avgPercentTopRecsConsidered.increment(percentTopRecsConsidered); avgNDCG.increment(ndcg); avgPercentAllItemsConsidered.increment(percentAllItemsConsidered); } log.info("{}", avgPercentTopRecsConsidered.getResult()); log.info("{}", avgNDCG.getResult()); log.info("{}", avgPercentAllItemsConsidered.getResult()); assertTrue(avgPercentTopRecsConsidered.getResult() > 0.8); assertTrue(avgNDCG.getResult() > 0.8); assertTrue(avgPercentAllItemsConsidered.getResult() < 0.09); }
/** * Mean/mean linking coefficients are computed from the mean item difficulty and mean item * discrimination. The summary statistics are computed in a storeless manner. This method allows * for the incremental update to item difficulty summary statistics by combining them with other * summary statistics. * * @param meanDiscrimination item discrimination mean. * @param meanDifficulty item difficulty mean. */ public void incrementMeanMean(Mean meanDiscrimination, Mean meanDifficulty) { meanDiscrimination.increment(discrimination); meanDifficulty.increment(difficulty); }
/** * Mean/sigma linking coefficients are computed from the mean and standard deviation of item * difficulty. The summary statistics are computed in a storeless manner. This method allows for * the incremental update to item difficulty summary statistics by combining them with other * summary statistics. * * @param mean item difficulty mean. * @param sd item difficulty standard deviation. */ public void incrementMeanSigma(Mean mean, StandardDeviation sd) { mean.increment(difficulty); sd.increment(difficulty); }
@Test public void testLSHEffect() { RandomGenerator random = RandomManager.getRandom(); PoissonDistribution itemPerUserDist = new PoissonDistribution( random, 20, PoissonDistribution.DEFAULT_EPSILON, PoissonDistribution.DEFAULT_MAX_ITERATIONS); int features = 20; ALSServingModel mainModel = new ALSServingModel(features, true, 1.0, null); ALSServingModel lshModel = new ALSServingModel(features, true, 0.5, null); int userItemCount = 20000; for (int user = 0; user < userItemCount; user++) { String userID = "U" + user; float[] vec = VectorMath.randomVectorF(features, random); mainModel.setUserVector(userID, vec); lshModel.setUserVector(userID, vec); int itemsPerUser = itemPerUserDist.sample(); Collection<String> knownIDs = new ArrayList<>(itemsPerUser); for (int i = 0; i < itemsPerUser; i++) { knownIDs.add("I" + random.nextInt(userItemCount)); } mainModel.addKnownItems(userID, knownIDs); lshModel.addKnownItems(userID, knownIDs); } for (int item = 0; item < userItemCount; item++) { String itemID = "I" + item; float[] vec = VectorMath.randomVectorF(features, random); mainModel.setItemVector(itemID, vec); lshModel.setItemVector(itemID, vec); } int numRecs = 10; Mean meanMatchLength = new Mean(); for (int user = 0; user < userItemCount; user++) { String userID = "U" + user; List<Pair<String, Double>> mainRecs = mainModel.topN(new DotsFunction(mainModel.getUserVector(userID)), null, numRecs, null); List<Pair<String, Double>> lshRecs = lshModel.topN(new DotsFunction(lshModel.getUserVector(userID)), null, numRecs, null); int i = 0; while (i < lshRecs.size() && i < mainRecs.size() && lshRecs.get(i).equals(mainRecs.get(i))) { i++; } meanMatchLength.increment(i); } log.info("Mean matching prefix: {}", meanMatchLength.getResult()); assertTrue(meanMatchLength.getResult() >= 4.0); meanMatchLength.clear(); for (int item = 0; item < userItemCount; item++) { String itemID = "I" + item; List<Pair<String, Double>> mainRecs = mainModel.topN( new CosineAverageFunction(mainModel.getItemVector(itemID)), null, numRecs, null); List<Pair<String, Double>> lshRecs = lshModel.topN( new CosineAverageFunction(lshModel.getItemVector(itemID)), null, numRecs, null); int i = 0; while (i < lshRecs.size() && i < mainRecs.size() && lshRecs.get(i).equals(mainRecs.get(i))) { i++; } meanMatchLength.increment(i); } log.info("Mean matching prefix: {}", meanMatchLength.getResult()); assertTrue(meanMatchLength.getResult() >= 5.0); }