@Override public double maxValue() { return delegate.maxValue(); }
double max = vec1.maxValue(); assertEquals(-1.0, max, 0.0); max = vec1.maxValue(); assertEquals(0.0, max, 0.0); max = vec1.maxValue(); assertEquals(0.0, max, 0.0); max = vec1.maxValue(); assertEquals(0.0, max, 0.0); max = vec1.maxValue(); assertEquals(0.0, max, EPSILON); max = vec1.maxValue(); assertEquals(0.0, max, EPSILON); max = vec1.maxValue(); assertEquals(0.0, max, EPSILON); max = vec1.maxValue(); assertEquals(Double.NEGATIVE_INFINITY, max, EPSILON); max = vec1.maxValue(); assertEquals(Double.NEGATIVE_INFINITY, max, EPSILON); max = vec1.maxValue(); assertEquals(Double.NEGATIVE_INFINITY, max, EPSILON);
vec1.setQuick(2, -2); max = vec1.maxValue(); assertEquals(0.0, max, 0.0); max = vec1.maxValue(); assertEquals(0.0, max, 0.0); max = vec1.maxValue(); assertEquals(0.0, max, 0.0); max = vec1.maxValue(); assertEquals(0.0, max, EPSILON); max = vec1.maxValue(); assertEquals(0.0, max, EPSILON); max = vec1.maxValue(); assertEquals(0.0, max, EPSILON); max = vec1.maxValue(); assertEquals(Double.NEGATIVE_INFINITY, max, EPSILON); max = vec1.maxValue(); assertEquals(Double.NEGATIVE_INFINITY, max, EPSILON); max = vec1.maxValue(); assertEquals(Double.NEGATIVE_INFINITY, max, EPSILON);
@Override public double maxValue() { return delegate.maxValue(); }
/** * Decides whether the vector should be classified or not based on the max pdf * value of the clusters and threshold value. * * @return whether the vector should be classified or not. */ private static boolean shouldClassify(Vector pdfPerCluster, Double clusterClassificationThreshold) { return pdfPerCluster.maxValue() >= clusterClassificationThreshold; }
private static void classifyAndWrite(List<Cluster> clusterModels, Double clusterClassificationThreshold, boolean emitMostLikely, SequenceFile.Writer writer, VectorWritable vw, Vector pdfPerCluster) throws IOException { Map<Text, Text> props = Maps.newHashMap(); if (emitMostLikely) { int maxValueIndex = pdfPerCluster.maxValueIndex(); WeightedPropertyVectorWritable weightedPropertyVectorWritable = new WeightedPropertyVectorWritable(pdfPerCluster.maxValue(), vw.get(), props); write(clusterModels, writer, weightedPropertyVectorWritable, maxValueIndex); } else { writeAllAboveThreshold(clusterModels, clusterClassificationThreshold, writer, vw, pdfPerCluster); } }
/** * Decides whether the vector should be classified or not based on the max pdf * value of the clusters and threshold value. * * @return whether the vector should be classified or not. */ private static boolean shouldClassify(Vector pdfPerCluster, Double clusterClassificationThreshold) { return pdfPerCluster.maxValue() >= clusterClassificationThreshold; }
private static void classifyAndWrite(List<Cluster> clusterModels, Double clusterClassificationThreshold, boolean emitMostLikely, SequenceFile.Writer writer, VectorWritable vw, Vector pdfPerCluster) throws IOException { Map<Text, Text> props = new HashMap<>(); if (emitMostLikely) { int maxValueIndex = pdfPerCluster.maxValueIndex(); WeightedPropertyVectorWritable weightedPropertyVectorWritable = new WeightedPropertyVectorWritable(pdfPerCluster.maxValue(), vw.get(), props); write(clusterModels, writer, weightedPropertyVectorWritable, maxValueIndex); } else { writeAllAboveThreshold(clusterModels, clusterClassificationThreshold, writer, vw, pdfPerCluster); } }
/** * Decides whether the vector should be classified or not based on the max pdf * value of the clusters and threshold value. * * @return whether the vector should be classified or not. */ private static boolean shouldClassify(Vector pdfPerCluster, Double clusterClassificationThreshold) { return pdfPerCluster.maxValue() >= clusterClassificationThreshold; }
private static void classifyAndWrite(List<Cluster> clusterModels, Double clusterClassificationThreshold, boolean emitMostLikely, SequenceFile.Writer writer, VectorWritable vw, Vector pdfPerCluster) throws IOException { Map<Text, Text> props = Maps.newHashMap(); if (emitMostLikely) { int maxValueIndex = pdfPerCluster.maxValueIndex(); WeightedPropertyVectorWritable weightedPropertyVectorWritable = new WeightedPropertyVectorWritable(pdfPerCluster.maxValue(), vw.get(), props); write(clusterModels, writer, weightedPropertyVectorWritable, maxValueIndex); } else { writeAllAboveThreshold(clusterModels, clusterClassificationThreshold, writer, vw, pdfPerCluster); } }
@Override public double maxValue() { return delegate.maxValue(); }
@Override public double maxValue() { return delegate.maxValue(); }
@Override public double apply(Vector column) { return column.maxValue(); } }).maxValue();
/** * always use global min and max * @param vector * @param inputsEachClass * @return */ public List<EmpiricalCDF> generateCDFs(Vector vector, List<List<Double>> inputsEachClass){ double min = vector.minValue(); double max = vector.maxValue(); return inputsEachClass.stream().map(list -> new EmpiricalCDF(list,min,max,numBins)).collect(Collectors.toList()); }
@Override public double apply(Vector f) { // Return the sum of three discrepancy measures. return Math.abs(f.minValue()) + Math.abs(f.maxValue() - 6) + Math.abs(f.norm(1) - 6); } });
@Override public Vector classify(Vector instance) { Vector result = classifyNoLink(instance); // Convert to probabilities by exponentiation. double max = result.maxValue(); result.assign(Functions.minus(max)).assign(Functions.EXP); result = result.divide(result.norm(1)); return result.viewPart(1, result.size() - 1); }
@Override public Vector classify(Vector instance) { Vector result = classifyNoLink(instance); // Convert to probabilities by exponentiation. double max = result.maxValue(); result.assign(Functions.minus(max)).assign(Functions.EXP); result = result.divide(result.norm(1)); return result.viewPart(1, result.size() - 1); }
@Override public Vector classify(Vector instance) { Vector result = classifyNoLink(instance); // Convert to probabilities by exponentiation. double max = result.maxValue(); result.assign(Functions.minus(max)).assign(Functions.EXP); result = result.divide(result.norm(1)); return result.viewPart(1, result.size() - 1); }
@Test public void testAddToVectorUsesProductOfWeights() { WordValueEncoder wv = new StaticWordValueEncoder("word"); ContinuousValueEncoder cv = new ContinuousValueEncoder("cont"); InteractionValueEncoder enc = new InteractionValueEncoder("interactions", wv, cv); Vector v1 = new DenseVector(200); enc.addInteractionToVector("a","0.9",0.5, v1); int k = enc.getProbes(); // should set k distinct locations to 0.9*0.5 assertEquals((float) k*0.5*0.9, v1.norm(1), 0); assertEquals(0.5*0.9, v1.maxValue(), 0); }