@Override public void reset() { super.reset(); min = Double.POSITIVE_INFINITY; max = Double.NEGATIVE_INFINITY; } }
@Override public void reset() { super.reset(); min = Double.POSITIVE_INFINITY; max = Double.NEGATIVE_INFINITY; } }
@Override public void reset() { super.reset(); min = Double.POSITIVE_INFINITY; max = Double.NEGATIVE_INFINITY; } }
@SuppressWarnings("unchecked") @Override public Instance instantiate(Database database) { DistanceQuery<V> dq = QueryUtil.getDistanceQuery(database, distFunc); Relation<V> relation = (Relation<V>) dq.getRelation(); RangeQuery<V> rq = database.getRangeQuery(dq); mvSize.reset(); mvVar.reset(); DataStore<PreDeConModel> storage = preprocess(PreDeConModel.class, relation, rq); if(LOG.isVerbose()) { LOG.verbose("Average neighborhood size: " + mvSize.toString()); LOG.verbose("Average variance size: " + mvVar.toString()); final int dim = RelationUtil.dimensionality(relation); if(mvSize.getMean() < 5 * dim) { LOG.verbose("The epsilon parameter may be chosen too small."); } else if(mvSize.getMean() > .5 * relation.size()) { LOG.verbose("The epsilon parameter may be chosen too large."); } else { LOG.verbose("As a first guess, you can try minPts < " + ((int) mvSize.getMean() / dim) // + " and delta > " + mvVar.getMean() + // ", but you will need to experiment with these parameters and epsilon."); } } return new Instance(dq.getRelation().getDBIDs(), storage); }
@SuppressWarnings("unchecked") @Override public Instance instantiate(Database database) { DistanceQuery<V> dq = QueryUtil.getDistanceQuery(database, distFunc); Relation<V> relation = (Relation<V>) dq.getRelation(); RangeQuery<V> rq = database.getRangeQuery(dq); mvSize.reset(); mvVar.reset(); DataStore<PreDeConModel> storage = preprocess(PreDeConModel.class, relation, rq); if(LOG.isVerbose()) { LOG.verbose("Average neighborhood size: " + mvSize.toString()); LOG.verbose("Average variance size: " + mvVar.toString()); final int dim = RelationUtil.dimensionality(relation); if(mvSize.getMean() < 5 * dim) { LOG.verbose("The epsilon parameter may be chosen too small."); } else if(mvSize.getMean() > .5 * relation.size()) { LOG.verbose("The epsilon parameter may be chosen too large."); } else { LOG.verbose("As a first guess, you can try minPts < " + ((int) mvSize.getMean() / dim) // + " and delta > " + mvVar.getMean() + // ", but you will need to experiment with these parameters and epsilon."); } } return new Instance(dq.getRelation().getDBIDs(), storage); }
@SuppressWarnings("unchecked") @Override public <T> NeighborPredicate.Instance<T> instantiate(Database database, SimpleTypeInformation<?> type) { DistanceQuery<V> dq = QueryUtil.getDistanceQuery(database, distFunc); Relation<V> relation = (Relation<V>) dq.getRelation(); RangeQuery<V> rq = database.getRangeQuery(dq); mvSize.reset(); mvVar.reset(); DataStore<PreDeConModel> storage = preprocess(PreDeConModel.class, relation, rq); if(LOG.isVerbose()) { LOG.verbose("Average neighborhood size: " + mvSize.toString()); LOG.verbose("Average variance size: " + mvVar.toString()); final int dim = RelationUtil.dimensionality(relation); if(mvSize.getMean() < 5 * dim) { LOG.verbose("The epsilon parameter may be chosen too small."); } else if(mvSize.getMean() > .5 * relation.size()) { LOG.verbose("The epsilon parameter may be chosen too large."); } else { LOG.verbose("As a first guess, you can try minPts < " + ((int) mvSize.getMean() / dim) // + " and delta > " + mvVar.getMean() + // ", but you will need to experiment with these parameters and epsilon."); } } return (NeighborPredicate.Instance<T>) new Instance(dq.getRelation().getDBIDs(), storage); }
s.reset(); // Reused double simAA = kernelMatrix.getSimilarity(pA, pA);
mv.put(data[i]); mc.reset(); // Reset statistics for(int j = i + 1 - WINDOWSIZE; j <= i; j++) { mc.put(data[j]);
s.reset(); // Reused double simAA = kernelMatrix.getSimilarity(pA, pA);
s.reset(); // Reused double simAA = kernelMatrix.getSimilarity(pA, pA);
final double density = kernel.subspaceDensity(subspace, neigh); meanv.reset(); for(DoubleDBIDListIter neighbor = neigh.iter(); neighbor.valid(); neighbor.advance()) { DoubleDBIDList n2 = subsetNeighborhoodQuery(neighc, neighbor, df, adjustedEps, kernel);
final double density = kernel.subspaceDensity(subspace, neigh); meanv.reset(); for(DoubleDBIDListIter neighbor = neigh.iter(); neighbor.valid(); neighbor.advance()) { subsetNeighborhoodQuery(neighcand, neighbor, df, adjustedEps, kernel, nn);
/** * Test with default parameters. */ @Test public void defaultParameters() { String filename = UNITTEST + "normalization-test-1.csv"; InstanceMeanVarianceNormalization<DoubleVector> filter = new ELKIBuilder<>(InstanceMeanVarianceNormalization.class).build(); MultipleObjectsBundle bundle = readBundle(filename, filter); int dim = getFieldDimensionality(bundle, 0, TypeUtil.NUMBER_VECTOR_FIELD); // Verify that the resulting data has mean 0 and variance 1 in each row. MeanVariance mvs = new MeanVariance(); for(int row = 0; row < bundle.dataLength(); row++) { mvs.reset(); DoubleVector d = get(bundle, row, 0, DoubleVector.class); for(int col = 0; col < dim; col++) { final double v = d.doubleValue(col); if(v > Double.NEGATIVE_INFINITY && v < Double.POSITIVE_INFINITY) { mvs.put(v); } } assertEquals("Mean is not 0", 0., mvs.getMean(), 1e-14); assertEquals("Variance is not 1", 1., mvs.getNaiveVariance(), 1e-14); } } }
final double density = kernel.subspaceDensity(subspace, neigh); meanv.reset(); for(DoubleDBIDListIter neighbor = neigh.iter(); neighbor.valid(); neighbor.advance()) { subsetNeighborhoodQuery(neighcand, neighbor, df, adjustedEps, kernel, nn);
mv.reset(); for(int j = 0; j < neighbors.size(); j++) { mv.put(scratch[i][j]);
mv.reset(); for(int j = 0; j < neighbors.size(); j++) { mv.put(scratch[i][j]);
s.reset(); DoubleDBIDListIter iB = nl.iter(), iC = nl.iter(); for(; iB.valid(); iB.advance()) {