@Override protected KMeansBisecting<V, M> makeInstance() { return new KMeansBisecting<>(k, kMeansVariant); } }
/** * Run KMeansBisecting with fixed parameters and compare cluster size to * expected value. */ @Test public void testKMeansBisectingClusterSize() { Database db = makeSimpleDatabase(UNITTEST + "bisecting-test.csv", 300); Clustering<MeanModel> result = new ELKIBuilder<KMeansBisecting<DoubleVector, MeanModel>>(KMeansBisecting.class) // .with(KMeans.K_ID, 3) // .with(KMeans.SEED_ID, 0) // .with(BestOfMultipleKMeans.Parameterizer.TRIALS_ID, 5) // .with(BestOfMultipleKMeans.Parameterizer.KMEANS_ID, KMeansLloyd.class) // .with(BestOfMultipleKMeans.Parameterizer.QUALITYMEASURE_ID, WithinClusterVarianceQualityMeasure.class) // .build().run(db); testClusterSizes(result, new int[] { 103, 97, 100 }); }
/** * Run KMeansBisecting with fixed parameters (k = 2) and compare f-measure to * golden standard. */ @Test public void testKMeansBisectingFMeasure() { Database db = makeSimpleDatabase(UNITTEST + "bisecting-test.csv", 300); KMeansBisecting<DoubleVector, MeanModel> kmeans = new ELKIBuilder<KMeansBisecting<DoubleVector, MeanModel>>(KMeansBisecting.class) // .with(KMeans.K_ID, 2) // .with(KMeans.SEED_ID, 0) // .with(BestOfMultipleKMeans.Parameterizer.TRIALS_ID, 5) // .with(BestOfMultipleKMeans.Parameterizer.KMEANS_ID, KMeansLloyd.class) // .with(BestOfMultipleKMeans.Parameterizer.QUALITYMEASURE_ID, WithinClusterVarianceQualityMeasure.class) // .build(); // run KMedians on database Clustering<MeanModel> result = kmeans.run(db); testFMeasure(db, result, 0.7408); } }
@Override protected KMeansBisecting<V, M> makeInstance() { return new KMeansBisecting<>(k, kMeansVariant); } }
@Override protected KMeansBisecting<V, M> makeInstance() { return new KMeansBisecting<>(k, kMeansVariant); } }