/** * Take the data that has been transformed to the principal components about the mean and * transform it back into the original feature set. Make sure to fill in zeroes in columns * where components were dropped! * @param data Data of the same features used to construct the PCA object but as the components * @return The records in terms of the original features */ public INDArray convertBackToFeatures(INDArray data) { return Nd4j.tensorMmul(eigenvectors, data, new int[][] {{1}, {1}}).transposei().addiRowVector(mean); }
/** * Takes a set of data on each row, with the same number of features as the constructing data * and returns the data in the coordinates of the basis set about the mean. * @param data Data of the same features used to construct the PCA object * @return The record in terms of the principal component vectors, you can set unused ones to zero. */ public INDArray convertToComponents(INDArray data) { INDArray dx = data.subRowVector(mean); return Nd4j.tensorMmul(eigenvectors.transpose(), dx, new int[][] {{1}, {1}}).transposei(); }
/** * Generates a set of <i>count</i> random samples with the same variance and mean and eigenvector/values * as the data set used to initialize the PCA object, with same number of features <i>N</i>. * @param count The number of samples to generate * @return A matrix of size <i>count</i> rows by <i>N</i> columns */ public INDArray generateGaussianSamples(long count) { INDArray samples = Nd4j.randn(new long[] {count, eigenvalues.columns()}); INDArray factors = Transforms.pow(eigenvalues, -0.5, true); samples.muliRowVector(factors); return Nd4j.tensorMmul(eigenvectors, samples, new int[][] {{1}, {1}}).transposei().addiRowVector(mean); }
public static INDArray tailor4d2d(@NonNull INDArray data) { long instances = data.size(0); long channels = data.size(1); long height = data.size(2); long width = data.size(3); INDArray in2d = Nd4j.create(channels, height * width * instances); long tads = data.tensorssAlongDimension(3, 2, 0); for (int i = 0; i < tads; i++) { INDArray thisTAD = data.tensorAlongDimension(i, 3, 2, 0); in2d.putRow(i, Nd4j.toFlattened(thisTAD)); } return in2d.transposei(); }
gradientOutput.getRow(i).assign(classificationDifferences.sum(0).addi(classificationDifferences.sum(1).transposei().negi()));
/** * Take the data that has been transformed to the principal components about the mean and * transform it back into the original feature set. Make sure to fill in zeroes in columns * where components were dropped! * @param data Data of the same features used to construct the PCA object but as the components * @return The records in terms of the original features */ public INDArray convertBackToFeatures(INDArray data) { return Nd4j.tensorMmul(eigenvectors, data, new int[][] {{1}, {1}}).transposei().addiRowVector(mean); }
/** * Takes a set of data on each row, with the same number of features as the constructing data * and returns the data in the coordinates of the basis set about the mean. * @param data Data of the same features used to construct the PCA object * @return The record in terms of the principal component vectors, you can set unused ones to zero. */ public INDArray convertToComponents(INDArray data) { INDArray dx = data.subRowVector(mean); return Nd4j.tensorMmul(eigenvectors.transpose(), dx, new int[][] {{1},{1}}).transposei(); }
/** * Generates a set of <i>count</i> random samples with the same variance and mean and eigenvector/values * as the data set used to initialize the PCA object, with same number of features <i>N</i>. * @param count The number of samples to generate * @return A matrix of size <i>count</i> rows by <i>N</i> columns */ public INDArray generateGaussianSamples(int count) { INDArray samples = Nd4j.randn(count, eigenvalues.columns()); INDArray factors = Transforms.pow(eigenvalues, -0.5, true); samples.muliRowVector(factors); return Nd4j.tensorMmul(eigenvectors, samples, new int[][] {{1}, {1}}).transposei().addiRowVector(mean); }
@Override public INDArray encode(String[] words) { INDArray vector = Nd4j.zeros(getEmbeddingVectorSize(), 1); for(String s : words) { double[] bits = bloom.getBitArray(preprocessor.preProcess(s)); INDArray x = Nd4j.create(bits); vector.addi(x.transposei()); } return vector; }
@Override public INDArray encode(Iterable<? extends Span> spans) { INDArray vector = Nd4j.zeros(getEmbeddingVectorSize(), 1); for(Span s : spans) { double[] bits = bloom.getBitArray(preprocessor.preProcess(s.getText())); INDArray x = Nd4j.create(bits); vector.addi(x.transposei()); } return vector; }
public static INDArray tailor4d2d(@NonNull INDArray data) { int instances = data.size(0); int channels = data.size(1); int height = data.size(2); int width = data.size(3); INDArray in2d = Nd4j.create(channels, height * width * instances); int tads = data.tensorssAlongDimension(3, 2, 0); for (int i = 0; i < tads; i++) { INDArray thisTAD = data.tensorAlongDimension(i, 3, 2, 0); in2d.putRow(i, Nd4j.toFlattened(thisTAD)); } return in2d.transposei(); }
public INDArray decode(INDArray y) { INDArray W = getParam(PretrainParamInitializer.WEIGHT_KEY); INDArray vBias = getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY); INDArray preAct = y.mmul(W.transposei()).addiRowVector(vBias); //return Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(conf.getLayer().getActivationFunction(), preAct)); return layerConf().getActivationFn().getActivation(preAct, true); }
@Override public void computeGradientAndScore() { INDArray W = getParam(PretrainParamInitializer.WEIGHT_KEY); double corruptionLevel = layerConf().getCorruptionLevel(); INDArray corruptedX = corruptionLevel > 0 ? getCorruptedInput(input, corruptionLevel) : input; setInput(corruptedX); INDArray y = encode(corruptedX, true); INDArray z = decode(y); INDArray visibleLoss = input.sub(z); INDArray hiddenLoss = layerConf().getSparsity() == 0 ? visibleLoss.mmul(W).muli(y).muli(y.rsub(1)) : visibleLoss.mmul(W).muli(y).muli(y.add(-layerConf().getSparsity())); INDArray wGradient = corruptedX.transposei().mmul(hiddenLoss).addi(visibleLoss.transposei().mmul(y)); INDArray hBiasGradient = hiddenLoss.sum(0); INDArray vBiasGradient = visibleLoss.sum(0); gradient = createGradient(wGradient, vBiasGradient, hBiasGradient); setScoreWithZ(z); }
INDArray wGradient = input().transposei().mmul(probHidden.getFirst()).subi(negVProb.transpose().mmul(negHProb));