public NormalDistributionInitializer(RandomGenerator rng, double mean, double standardDeviation, long seed) { this.seed = seed; normalDistribution = new NormalDistribution(rng, mean, standardDeviation, 1e-9); }
public NormalDistributionInitializer(RandomGenerator rng, double mean, double standardDeviation) { normalDistribution = new NormalDistribution(rng, mean, standardDeviation, 1e-9); }
private double cumulativeProbability(double x) { try { NormalDistribution normDist = new NormalDistribution(); return normDist.cumulativeProbability(x); } catch (IllegalArgumentException ex) { return Double.NaN; } }
public NormalDistributionInitializer() { this.seed = new Random().nextLong(); normalDistribution = new NormalDistribution(0, 0.0001); normalDistribution.reseedRandomGenerator(seed); }
public NormalDistributionInitializer(double mean, double standardDeviation) { this.seed = new Random().nextLong(); normalDistribution = new NormalDistribution(mean, standardDeviation); normalDistribution.reseedRandomGenerator(seed); }
@Setup public void setup() { randomValues = new int[numEvents]; Random r = ThreadLocalRandom.current(); for (int i = 0; i < numEvents; i++) { randomValues[i] = r.nextInt(UPPER_LIMIT); } fixedHistogramForAdds = new FixedBucketsHistogram( LOWER_LIMIT, UPPER_LIMIT, numBuckets, FixedBucketsHistogram.OutlierHandlingMode.OVERFLOW ); NormalDistribution normalDistribution = new NormalDistribution(50000, 10000); normalDistributionValues = new float[numEvents]; for (int i = 0; i < numEvents; i++) { normalDistributionValues[i] = (float) normalDistribution.sample(); } }
public double [] pValues(){ double [] res = zValues(); RealDistribution rd = _dispersionEstimated?new TDistribution(_training_metrics.residual_degrees_of_freedom()):new NormalDistribution(); for(int i = 0; i < res.length; ++i) res[i] = 2*rd.cumulativeProbability(-Math.abs(res[i])); return res; } double[][] _global_beta_multinomial;
/** * @param Wmin smallest Wilcoxon signed rank value * @param N number of subjects (corresponding to x.length) * @return two-sided asymptotic p-value */ private double calculateAsymptoticPValue(final double Wmin, final int N) { final double ES = (double) (N * (N + 1)) / 4.0; /* Same as (but saves computations): * final double VarW = ((double) (N * (N + 1) * (2*N + 1))) / 24; */ final double VarS = ES * ((double) (2 * N + 1) / 6.0); // - 0.5 is a continuity correction final double z = (Wmin - ES - 0.5) / FastMath.sqrt(VarS); // No try-catch or advertised exception because args are valid // pass a null rng to avoid unneeded overhead as we will not sample from this distribution final NormalDistribution standardNormal = new NormalDistribution(null, 0, 1); return 2*standardNormal.cumulativeProbability(z); }
/** * @param Umin smallest Mann-Whitney U value * @param n1 number of subjects in first sample * @param n2 number of subjects in second sample * @return two-sided asymptotic p-value * @throws ConvergenceException if the p-value can not be computed * due to a convergence error * @throws MaxCountExceededException if the maximum number of * iterations is exceeded */ private double calculateAsymptoticPValue(final double Umin, final int n1, final int n2) throws ConvergenceException, MaxCountExceededException { /* long multiplication to avoid overflow (double not used due to efficiency * and to avoid precision loss) */ final long n1n2prod = (long) n1 * n2; // http://en.wikipedia.org/wiki/Mann%E2%80%93Whitney_U#Normal_approximation final double EU = n1n2prod / 2.0; final double VarU = n1n2prod * (n1 + n2 + 1) / 12.0; final double z = (Umin - EU) / FastMath.sqrt(VarU); // No try-catch or advertised exception because args are valid // pass a null rng to avoid unneeded overhead as we will not sample from this distribution final NormalDistribution standardNormal = new NormalDistribution(null, 0, 1); return 2 * standardNormal.cumulativeProbability(z); }
/** {@inheritDoc} */ public ConfidenceInterval createInterval(int numberOfTrials, int numberOfSuccesses, double confidenceLevel) { IntervalUtils.checkParameters(numberOfTrials, numberOfSuccesses, confidenceLevel); final double mean = (double) numberOfSuccesses / (double) numberOfTrials; final double alpha = (1.0 - confidenceLevel) / 2; final NormalDistribution normalDistribution = new NormalDistribution(); final double difference = normalDistribution.inverseCumulativeProbability(1 - alpha) * FastMath.sqrt(1.0 / numberOfTrials * mean * (1 - mean)); return new ConfidenceInterval(mean - difference, mean + difference, confidenceLevel); }
/** * Creates a new Poisson distribution with specified mean, convergence * criterion and maximum number of iterations. * * @param rng Random number generator. * @param p Poisson mean. * @param epsilon Convergence criterion for cumulative probabilities. * @param maxIterations the maximum number of iterations for cumulative * probabilities. * @throws NotStrictlyPositiveException if {@code p <= 0}. * @since 3.1 */ public PoissonDistribution(RandomGenerator rng, double p, double epsilon, int maxIterations) throws NotStrictlyPositiveException { super(rng); if (p <= 0) { throw new NotStrictlyPositiveException(LocalizedFormats.MEAN, p); } mean = p; this.epsilon = epsilon; this.maxIterations = maxIterations; // Use the same RNG instance as the parent class. normal = new NormalDistribution(rng, p, FastMath.sqrt(p), NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY); exponential = new ExponentialDistribution(rng, 1, ExponentialDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY); }
public Builder normal(double mean, double sd, long duration, TimeUnit units) { final NormalDistribution distribution = new NormalDistribution(mean, sd); return add("normal(" + mean + ")", () -> (long)distribution.sample(), duration, units); }
/** {@inheritDoc} */ public ConfidenceInterval createInterval(int numberOfTrials, int numberOfSuccesses, double confidenceLevel) { IntervalUtils.checkParameters(numberOfTrials, numberOfSuccesses, confidenceLevel); final double alpha = (1.0 - confidenceLevel) / 2; final NormalDistribution normalDistribution = new NormalDistribution(); final double z = normalDistribution.inverseCumulativeProbability(1 - alpha); final double zSquared = FastMath.pow(z, 2); final double modifiedNumberOfTrials = numberOfTrials + zSquared; final double modifiedSuccessesRatio = (1.0 / modifiedNumberOfTrials) * (numberOfSuccesses + 0.5 * zSquared); final double difference = z * FastMath.sqrt(1.0 / modifiedNumberOfTrials * modifiedSuccessesRatio * (1 - modifiedSuccessesRatio)); return new ConfidenceInterval(modifiedSuccessesRatio - difference, modifiedSuccessesRatio + difference, confidenceLevel); }
break; case NORMAL: distribution = new NormalDistribution(schema.getMean(), schema.getStandardDeviation()); break; case ROUNDED_NORMAL: NormalDistribution normalDist = new NormalDistribution(schema.getMean(), schema.getStandardDeviation()); distribution = new RealRoundingDistribution(normalDist); break;
/** {@inheritDoc} */ public ConfidenceInterval createInterval(int numberOfTrials, int numberOfSuccesses, double confidenceLevel) { IntervalUtils.checkParameters(numberOfTrials, numberOfSuccesses, confidenceLevel); final double alpha = (1.0 - confidenceLevel) / 2; final NormalDistribution normalDistribution = new NormalDistribution(); final double z = normalDistribution.inverseCumulativeProbability(1 - alpha); final double zSquared = FastMath.pow(z, 2); final double mean = (double) numberOfSuccesses / (double) numberOfTrials; final double factor = 1.0 / (1 + (1.0 / numberOfTrials) * zSquared); final double modifiedSuccessRatio = mean + (1.0 / (2 * numberOfTrials)) * zSquared; final double difference = z * FastMath.sqrt(1.0 / numberOfTrials * mean * (1 - mean) + (1.0 / (4 * FastMath.pow(numberOfTrials, 2)) * zSquared)); final double lowerBound = factor * (modifiedSuccessRatio - difference); final double upperBound = factor * (modifiedSuccessRatio + difference); return new ConfidenceInterval(lowerBound, upperBound, confidenceLevel); }
/** * The within-bin smoothing kernel. Returns a Gaussian distribution * parameterized by {@code bStats}, unless the bin contains only one * observation, in which case a constant distribution is returned. * * @param bStats summary statistics for the bin * @return within-bin kernel parameterized by bStats */ protected RealDistribution getKernel(SummaryStatistics bStats) { if (bStats.getN() == 1 || bStats.getVariance() == 0) { return new ConstantRealDistribution(bStats.getMean()); } else { return new NormalDistribution(randomData.getRandomGenerator(), bStats.getMean(), bStats.getStandardDeviation(), NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY); } } }
@Test public void testSample() throws Exception { double[] data = new double[10001]; Sampler<Double> sampler = new Normal(); for (int i = 0; i < data.length; i++) { data[i] = sampler.sample(); } Arrays.sort(data); NormalDistribution reference = new NormalDistribution(RandomUtils.getRandom().getRandomGenerator(), 0, 1, NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY); assertEquals("Median", reference.inverseCumulativeProbability(0.5), data[5000], 0.04); } }
@Test public void testNormalDistribution() { NormalDistribution normalDistribution = new NormalDistribution( new JDKRandomGenerator(1000), 50000, 10000 ); FixedBucketsHistogram h = new FixedBucketsHistogram( 0, 100000, 1000, FixedBucketsHistogram.OutlierHandlingMode.OVERFLOW ); for (int i = 0; i < 100000; i++) { double val = normalDistribution.sample(); h.add(val); } float[] quantiles = h.percentilesFloat(new double[]{12.5f, 25.0f, 50.0f, 98f}); Assert.assertArrayEquals( new float[]{38565.324f, 43297.95f, 50091.902f, 70509.125f}, quantiles, 0.01f ); }
static public double probability(GaussianDistribution gaussianDistribution, Number x){ NormalDistribution distribution = new NormalDistribution(gaussianDistribution.getMean(), Math.sqrt(gaussianDistribution.getVariance())); return distribution.density(x.doubleValue()); }
public double getZScoreEquivalent(double zscore) { // compute zscore to CDF double cdf = (new NormalDistribution()).cumulativeProbability(zscore); // for normal distribution, mahalanobis distance is chi-squared // https://en.wikipedia.org/wiki/Mahalanobis_distance#Normal_distributions return (new ChiSquaredDistribution(p)).inverseCumulativeProbability(cdf); } }