private List<Vector2D> makeBlobs(int centers, double clusterStd, double min, double max) { NormalDistribution dist = new NormalDistribution(random, 0.0, clusterStd, 1e-9); double range = max - min; Vector2D[] centerPoints = new Vector2D[centers]; for (int i = 0; i < centers; i++) { centerPoints[i] = new Vector2D(random.nextDouble() * range + min, random.nextDouble() * range + min); } int[] nSamplesPerCenter = new int[centers]; int count = samples / centers; Arrays.fill(nSamplesPerCenter, count); for (int i = 0; i < samples % centers; i++) { nSamplesPerCenter[i]++; } List<Vector2D> points = new ArrayList<>(); for (int i = 0; i < centers; i++) { for (int j = 0; j < nSamplesPerCenter[i]; j++) { points.add(new Vector2D(dist.sample(), dist.sample()).add(centerPoints[i])); } } return points; }
@Override protected OperationData process(IDataset input, IMonitor monitor) { double theta = 0; try { theta = ScanMetadata.getTheta(input); } catch (Exception e) { } NormalDistribution beamfootprint = new NormalDistribution( 0, (1e-3 * model.getBeamHeight() / 2 * Math.sqrt(2 * Math.log(2) - 0.5))); double areaCorrection = 2 * (beamfootprint.cumulativeProbability( (model.getFootprint() * Math.sin((theta + model.getAngularFudgeFactor()) * Math.PI / 180)))); Dataset output = DatasetUtils.cast(input, Dataset.FLOAT64); output = Maths.multiply(input, areaCorrection); Dataset outputSum = DatasetFactory.createFromObject((DatasetUtils.cast(output, Dataset.FLOAT64)).sum()); return new OperationData(output, outputSum); }
@Test /** Test of integrator for the sine function. */ public void testSinFunction() { UnivariateFunction f = new Gaussian(10, 2); UnivariateIntegrator integrator = new SimpsonIntegrator(); double a, b, expected, tolerance, result; a = 8; b = 12; expected = 0.68269; tolerance = 0.00001; tolerance = Math.abs(expected * integrator.getRelativeAccuracy()); result = integrator.integrate(MAX_EVAL, f, a, b); assertEquals(expected, result, tolerance); log.info( "Result: " + result + ", tolerance: " + tolerance + " - Relative accuracy: " + integrator.getRelativeAccuracy() + " - Absolute accuracy: " + integrator.getAbsoluteAccuracy() + " - Iterations: " + integrator.getIterations()); NormalDistribution distribution = new NormalDistribution(10, 2); result = distribution.cumulativeProbability(a, b); log.info("Distribution result: " + result); }
/** * As described by Bernt Arne Ødegaard in Financial Numerical Recipes in C++. * * <p>Returns P(X < a, Y < b) where X, Y are gaussian random variables N(0, 1) of the bivariate * normal distribution with correlation c in [-1, 1] between X and Y. */ public static double cdf(double a, double b, double c) { if (a == Double.NaN || b == Double.NaN || c == Double.NaN) { throw new IllegalArgumentException("Arguments must be a number."); } if (a == Double.NaN) { System.out.println(""); } a = handleInfinity(a); b = handleInfinity(b); c = handleInfinity(c); if (a == Double.NaN) { System.out.println(""); } if (a <= 0 && b <= 0 && c <= 0) { final double aprime = a / FastMath.sqrt(2d * (1d - c * c)); final double bprime = b / FastMath.sqrt(2d * (1d - c * c)); double sum = 0; for (int i = 0; i < A.length; i++) { for (int j = 0; j < A.length; j++) { sum += A[i] * A[j] * f(B[i], B[j], aprime, bprime, c); } } sum *= FastMath.sqrt(1d - c * c) / FastMath.PI; return sum; } // a or b may be too big and their multiplication may result in NaN. if (c * a * b <= 0) { // c is smaller (between [-1, 1]) and will help to avoid NaNs. So we multiply c // first. if ((a <= 0) && (b >= 0) && (c >= 0)) { return normal.cumulativeProbability(a) - cdf(a, -b, -c); } else if ((a >= 0) && (b <= 0) && (c >= 0)) { return normal.cumulativeProbability(b) - cdf(-a, b, -c); } else if ((a >= 0) && (b >= 0) && (c <= 0)) { return normal.cumulativeProbability(a) + normal.cumulativeProbability(b) - 1 + cdf(-a, -b, c); } } else if (c * a * b >= 0) { final double denum = FastMath.sqrt(a * a - 2d * c * a * b + b * b); final double rho1 = ((c * a - b) * FastMath.signum(a)) / denum; final double rho2 = ((c * b - a) * FastMath.signum(b)) / denum; final double delta = (1d - FastMath.signum(a) * FastMath.signum(b)) / 4d; return cdf(a, 0, rho1) + cdf(b, 0, rho2) - delta; } throw new RuntimeException( "Should never get here. Values of [a; b ; c] = [" + a + "; " + b + "; " + c + "]."); }
private static SparseRealMatrix initializeMatrix(SparseRealMatrix matrix, double sigma) { NormalDistribution normRandom = new NormalDistribution(0.0, sigma); int r = matrix.getRowDimension(); int c = matrix.getColumnDimension(); for (int i = 0; i < r; i++) { for (int j = 0; j < c; j++) { double x = normRandom.sample(); matrix.setEntry(i, j, x); } } return matrix; }
// ======== per-timeslot activities ======== @Override public void step() { // check for end-of-shift Shift newShift = shiftSchedule[indexOfShift(getNowInstant())]; if (newShift != currentShift) { log.info(getName() + " start of shift"); // Take all batteries out of service double totalEnergy = getEnergyCharging() + getEnergyInUse(); setEnergyCharging(getEnergyCharging() + getEnergyInUse()); setCapacityInUse(0.0); setEnergyInUse(0.0); // Put the strongest batteries in trucks for the next shift if (null != newShift) { setCapacityInUse(newShift.getTrucks() * batteryCapacity); setEnergyInUse(Math.min(getCapacityInUse(), totalEnergy)); setEnergyCharging(totalEnergy - getEnergyInUse()); } log.info( getName() + ": new shift cInUse " + capacityInUse + ", eInUse " + energyInUse + ", eCharging " + energyCharging); currentShift = newShift; } // discharge batteries on active trucks if (null != currentShift) { double usage = Math.max(0.0, normal.sample() * truckStd + truckKW * currentShift.getTrucks()); double deficit = usage - getEnergyInUse(); log.debug(getName() + ": trucks use " + usage + " kWh"); if (deficit > 0.0) { log.warn(getName() + ": trucks use more energy than available by " + deficit + " kWh"); addEnergyInUse(deficit); addEnergyCharging(-deficit); } addEnergyInUse(-usage); } // use energy on chargers, accounting for regulation double regulation = getSubscription().getRegulation(); log.info(getName() + ": regulation " + regulation); double energyUsed = useEnergy(regulation); // Record energy used getSubscription().usePower(energyUsed); log.info( getName() + " cInUse " + capacityInUse + ", eInUse " + energyInUse + ", eCharging " + energyCharging); }
private void initial() { NormalDistribution nd = new NormalDistribution(); String[] seqList = this.sequence.split(";"); for (String seq : seqList) { BaseIsotopomer bIso = new BaseIsotopomer(seq, this.isotopomerWidth); TIntDoubleMap chargeScaleMap = new TIntDoubleHashMap(); int centralCharge = bIso.getCentralChargeState(); int totalChargeState = (bIso.getMaxChargeState() - centralCharge) * 2; double step = 6.0 / totalChargeState; // calculate right side of the central charge state (inclusive) to 10+ charge double x = 0.0; for (int i = centralCharge; i > 8; i--) { double factor = nd.density(x); x += step; chargeScaleMap.put(i, factor); } // calculate left side of the central charge state (exclusive) to max charge x = step; for (int i = centralCharge + 1; i <= bIso.getMaxChargeState(); i++) { double factor = nd.density(x); x += step; chargeScaleMap.put(i, factor); } // build list of Isotopomer for (int charge : chargeScaleMap.keys()) { ChargedIsotopomer cIso = new ChargedIsotopomer(bIso, charge); isotopomerList.add(cIso); TDoubleDoubleMap tempPeakMap = cIso.getScaledPeakMap(chargeScaleMap.get(charge)); for (double mz : tempPeakMap.keys()) { double intensity = peakMap.get(mz); if (intensity == peakMap.getNoEntryValue()) { peakMap.put(mz, tempPeakMap.get(mz)); } else { peakMap.put(mz, intensity + tempPeakMap.get(mz)); } } } } }
// Model "a" with a normal distribution, and test whether cdf(mean(b)) > pvalue public boolean significantIncrease(List<Double> a, List<Double> b, double pvalue) { double meanA = mean(a); double sd = 0; for (Double val : a) { sd += (val - meanA) * (val - meanA); } sd = Math.sqrt(sd / (a.size() - 1)); if (sd <= 0) { return true; } double meanB = mean(b); NormalDistribution dist = new NormalDistribution(meanA, sd); double p = dist.cumulativeProbability(meanB); boolean significant = (p > pvalue); System.out.println("p-value=" + p + ", " + (significant ? "increase" : "no increase")); return significant; }
/** {@inheritDoc} */ public ConfidenceInterval createInterval( int numberOfTrials, int numberOfSuccesses, double confidenceLevel) { IntervalUtils.checkParameters(numberOfTrials, numberOfSuccesses, confidenceLevel); final double alpha = (1.0 - confidenceLevel) / 2; final NormalDistribution normalDistribution = new NormalDistribution(); final double z = normalDistribution.inverseCumulativeProbability(1 - alpha); final double zSquared = FastMath.pow(z, 2); final double mean = (double) numberOfSuccesses / (double) numberOfTrials; final double factor = 1.0 / (1 + (1.0 / numberOfTrials) * zSquared); final double modifiedSuccessRatio = mean + (1.0 / (2 * numberOfTrials)) * zSquared; final double difference = z * FastMath.sqrt( 1.0 / numberOfTrials * mean * (1 - mean) + (1.0 / (4 * FastMath.pow(numberOfTrials, 2)) * zSquared)); final double lowerBound = factor * (modifiedSuccessRatio - difference); final double upperBound = factor * (modifiedSuccessRatio + difference); return new ConfidenceInterval(lowerBound, upperBound, confidenceLevel); }
// Gets a new random-number opSeed just in case we don't already have one. // Useful for mock-based testing. private void ensureSeeds() { if (null == opSeed) { opSeed = service .getRandomSeedRepo() .getRandomSeed(LiftTruck.class.getName() + "-" + name, 0, "model"); evalSeed = service .getRandomSeedRepo() .getRandomSeed(LiftTruck.class.getName() + "-" + name, 0, "eval"); normal = new NormalDistribution(0.0, 1.0); normal.reseedRandomGenerator(opSeed.nextLong()); } }
public static double normalInverseCDF(double pValue, double mean, double sigma) { NormalDistribution normDist = new NormalDistribution(mean, sigma); double ret = normDist.inverseCumulativeProbability(pValue); return ret; }
public static double normalCDF(double x, double mean, double sigma) { NormalDistribution normDist = new NormalDistribution(mean, sigma); double ret = normDist.cumulativeProbability(x); return ret; }
private Vector2D generateNoiseVector(NormalDistribution distribution) { return new Vector2D(distribution.sample(), distribution.sample()); }