protected double[][] computeWeightedLogProb(double[][] samples) { final double[][] lpr = logProbability(samples); for (int j = 0; j < lpr[0].length; j++) { final double logw = Math.log(this.weights[j]); for (int i = 0; i < lpr.length; i++) { lpr[i][j] += logw; } } return lpr; }
@Override public double estimateLogProbability(double[] sample) { return estimateLogProbability(new double[][] { sample })[0]; }
/** * Predict the log-posterior for the given sample; this is the * log-probability of the sample point belonging to each of the gaussians in * the mixture. * * @param sample * the sample * @return the log-probability for each gaussian */ public double[] predictLogPosterior(double[] sample) { return predictLogPosterior(new double[][] { sample })[0]; }
/** * Predict the log-posterior for the given samples; this is the * log-probability of each sample point belonging to each of the gaussians * in the mixture. * * @param samples * the samples * @return the log-probability for each gaussian */ public double[][] predictLogPosterior(double[][] samples) { if (samples[0].length != this.gaussians[0].getMean().getColumnDimension()) { throw new IllegalArgumentException( "The number of dimensions of the given data is not compatible with the model"); } final double[][] lpr = computeWeightedLogProb(samples); final double[] logprob = logsumexp(lpr); final double[][] responsibilities = new double[samples.length][gaussians.length]; for (int i = 0; i < samples.length; i++) { for (int j = 0; j < gaussians.length; j++) { responsibilities[i][j] = lpr[i][j] - logprob[i]; // note no exp // as want // log prob } } return responsibilities; }
private static MixtureOfGaussians loadMoG() throws IOException { final File f = new File(GMM_MATLAB_FILE); final MatFileReader reader = new MatFileReader(f); final MLStructure codebook = (MLStructure) reader.getContent().get("codebook"); final MLSingle mean = (MLSingle) codebook.getField("mean"); final MLSingle variance = (MLSingle) codebook.getField("variance"); final MLSingle coef = (MLSingle) codebook.getField("coef"); final int n_gaussians = mean.getN(); final int n_dims = mean.getM(); final MultivariateGaussian[] ret = new MultivariateGaussian[n_gaussians]; final double[] weights = new double[n_gaussians]; for (int i = 0; i < n_gaussians; i++) { weights[i] = coef.get(i, 0); final DiagonalMultivariateGaussian d = new DiagonalMultivariateGaussian(n_dims); for (int j = 0; j < n_dims; j++) { d.mean.set(0, j, mean.get(j, i)); d.variance[j] = variance.get(j, i); } ret[i] = d; } return new MixtureOfGaussians(ret, weights); }
final double[][] posteriors = gmm.scoreSamples(X).secondObject();
@Override public double[] sample(Random rng) { return sample(1, rng)[0]; }
/** * Get the probability for a given points in space relative to the PDF * represented by the gaussian mixture. * * @param samples * the points * @return the probability */ @Override public double[] estimateLogProbability(double[][] samples) { if (samples[0].length != this.gaussians[0].getMean().getColumnDimension()) { throw new IllegalArgumentException( "The number of dimensions of the given data is not compatible with the model"); } final double[][] lpr = computeWeightedLogProb(samples); final double[] logprob = new double[samples.length]; for (int i = 0; i < samples.length; i++) { for (int j = 0; j < lpr[0].length; j++) { logprob[i] += Math.exp(lpr[i][j]); } logprob[i] = Math.log(logprob[i]); } return logprob; }
/** * Compute the posterior distribution of the samples, and the overall log * probability of each sample as belonging to the model. * * @param samples * the samples * @return a pair of (log probabilities, log posterior probabilities) */ public IndependentPair<double[], double[][]> scoreSamples(double[][] samples) { if (samples[0].length != this.gaussians[0].getMean().getColumnDimension()) { throw new IllegalArgumentException( "The number of dimensions of the given data is not compatible with the model"); } final double[][] lpr = computeWeightedLogProb(samples); final double[] logprob = logsumexp(lpr); final double[][] responsibilities = new double[samples.length][gaussians.length]; for (int i = 0; i < samples.length; i++) { for (int j = 0; j < gaussians.length; j++) { responsibilities[i][j] = Math.exp(lpr[i][j] - logprob[i]); } } return IndependentPair.pair(logprob, responsibilities); }
private static MixtureOfGaussians loadMoG() throws IOException { final File f = new File(GMM_MATLAB_FILE); final MatFileReader reader = new MatFileReader(f); final MLStructure codebook = (MLStructure) reader.getContent().get("codebook"); final MLSingle mean = (MLSingle) codebook.getField("mean"); final MLSingle variance = (MLSingle) codebook.getField("variance"); final MLSingle coef = (MLSingle) codebook.getField("coef"); final int n_gaussians = mean.getN(); final int n_dims = mean.getM(); final MultivariateGaussian[] ret = new MultivariateGaussian[n_gaussians]; final double[] weights = new double[n_gaussians]; for (int i = 0; i < n_gaussians; i++) { weights[i] = coef.get(i, 0); final DiagonalMultivariateGaussian d = new DiagonalMultivariateGaussian(n_dims); for (int j = 0; j < n_dims; j++) { d.mean.set(0, j, mean.get(j, i)); d.variance[j] = variance.get(j, i); } ret[i] = d; } return new MixtureOfGaussians(ret, weights); }
final double[][] posteriors = gmm.scoreSamples(X).secondObject();
final MixtureOfGaussians gmm = gmmem.estimate(sample64); final double[][] v1 = gmm.logProbability(new double[][] { sample64[0] }); final double[][] v2 = MixtureOfGaussians.logProbability(new double[][] { sample64[0] }, gmm.gaussians);
public static MixtureOfGaussians loadMoG(File f) throws IOException { final MatFileReader reader = new MatFileReader(f); final MLStructure codebook = (MLStructure) reader.getContent().get("codebook"); final MLSingle mean = (MLSingle) codebook.getField("mean"); final MLSingle variance = (MLSingle) codebook.getField("variance"); final MLSingle coef = (MLSingle) codebook.getField("coef"); final int n_gaussians = mean.getN(); final int n_dims = mean.getM(); final MultivariateGaussian[] ret = new MultivariateGaussian[n_gaussians]; final double[] weights = new double[n_gaussians]; for (int i = 0; i < n_gaussians; i++) { weights[i] = coef.get(i, 0); final DiagonalMultivariateGaussian d = new DiagonalMultivariateGaussian(n_dims); for (int j = 0; j < n_dims; j++) { d.mean.set(0, j, mean.get(j, i)); d.variance[j] = variance.get(j, i); } ret[i] = d; } return new MixtureOfGaussians(ret, weights); }
@Override public double estimateProbability(double[] sample) { return Math.exp(estimateLogProbability(sample)); }
/** * Predict the class (the index of the most-probable gaussian) to which the * given data point belongs. * * @param data * the data point * @return the class index */ public int predict(double[] data) { final double[] posterior = predictLogPosterior(data); return ArrayUtils.maxIndex(posterior); } }
final MixtureOfGaussians gmm = gmmem.estimate(sample64); final double[][] v1 = gmm.logProbability(new double[][] { sample64[0] }); final double[][] v2 = MixtureOfGaussians.logProbability(new double[][] { sample64[0] }, gmm.gaussians);
public static MixtureOfGaussians loadMoG(File f) throws IOException { final MatFileReader reader = new MatFileReader(f); final MLStructure codebook = (MLStructure) reader.getContent().get("codebook"); final MLSingle mean = (MLSingle) codebook.getField("mean"); final MLSingle variance = (MLSingle) codebook.getField("variance"); final MLSingle coef = (MLSingle) codebook.getField("coef"); final int n_gaussians = mean.getN(); final int n_dims = mean.getM(); final MultivariateGaussian[] ret = new MultivariateGaussian[n_gaussians]; final double[] weights = new double[n_gaussians]; for (int i = 0; i < n_gaussians; i++) { weights[i] = coef.get(i, 0); final DiagonalMultivariateGaussian d = new DiagonalMultivariateGaussian(n_dims); for (int j = 0; j < n_dims; j++) { d.mean.set(0, j, mean.get(j, i)); d.variance[j] = variance.get(j, i); } ret[i] = d; } return new MixtureOfGaussians(ret, weights); }