@GwtIncompatible public void testResistsHashFloodingOnContains() { CallsCounter smallCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesSmall = createAdversarialInput(10, smallCounter); ImmutableSet<?> smallSet = ConstructionPathway.COPY_OF_LIST.create(haveSameHashesSmall); long worstCaseOpsSmall = worstCaseQueryOperations(smallSet, smallCounter); CallsCounter largeCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesLarge = createAdversarialInput(15, largeCounter); ImmutableSet<?> largeSet = ConstructionPathway.COPY_OF_LIST.create(haveSameHashesLarge); long worstCaseOpsLarge = worstCaseQueryOperations(largeSet, largeCounter); double ratio = (double) worstCaseOpsLarge / worstCaseOpsSmall; int smallSize = haveSameHashesSmall.size(); int largeSize = haveSameHashesLarge.size(); assertThat(ratio) .named( "ratio of equals/hashCode/compareTo operations to worst-case query an ImmutableSet " + "of size %s versus size %s", haveSameHashesLarge.size(), haveSameHashesSmall.size()) .isAtMost(2 * Math.log(largeSize) / Math.log(smallSize)); // We allow up to 2x wobble in the constant factors. }
public void testPercentiles_index_compute_doubleCollection() { for (int index = 0; index <= 100; index++) { assertThat(percentiles().index(index).compute(PSEUDORANDOM_DATASET)) .named("quantile at index " + index) .isWithin(ALLOWED_ERROR) .of(expectedLargeDatasetPercentile(index)); } }
@GwtIncompatible public void testResistsHashFloodingOnCount() { CallsCounter smallCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesSmall = createAdversarialInput(10, smallCounter); int smallSize = haveSameHashesSmall.size(); ImmutableMultiset<?> smallMap = ConstructionPathway.COPY_OF_COLLECTION.create(haveSameHashesSmall); long worstCaseQuerySmall = worstCaseQueryOperations(smallMap, smallCounter); CallsCounter largeCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesLarge = createAdversarialInput(15, largeCounter); int largeSize = haveSameHashesLarge.size(); ImmutableMultiset<?> largeMap = ConstructionPathway.COPY_OF_COLLECTION.create(haveSameHashesLarge); long worstCaseQueryLarge = worstCaseQueryOperations(largeMap, largeCounter); double ratio = (double) worstCaseQueryLarge / worstCaseQuerySmall; assertThat(ratio) .named( "Ratio of worst case query operations for an ImmutableMultiset of size %s versus %s", largeSize, smallSize) .isAtMost(2 * Math.log(largeSize) / Math.log(smallSize)); // allow up to 2x wobble in the constant factors }
@GwtIncompatible public void testResistsHashFloodingOnGet() { CallsCounter smallCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesSmall = createAdversarialInput(10, smallCounter); int smallSize = haveSameHashesSmall.size(); ImmutableMap<?, ?> smallMap = ConstructionPathway.BUILDER_PUT_ONE_BY_ONE.create( haveSameHashesSmall, "valueObject", smallCounter); long worstCaseQuerySmall = worstCaseQueryOperations(smallMap, smallCounter); CallsCounter largeCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesLarge = createAdversarialInput(15, largeCounter); int largeSize = haveSameHashesLarge.size(); ImmutableMap<?, ?> largeMap = ConstructionPathway.BUILDER_PUT_ONE_BY_ONE.create( haveSameHashesLarge, "valueObject", largeCounter); long worstCaseQueryLarge = worstCaseQueryOperations(largeMap, largeCounter); double ratio = (double) worstCaseQueryLarge / worstCaseQuerySmall; assertThat(ratio) .named( "Ratio of worst case query operations for an ImmutableMap of size %s versus %s", largeSize, smallSize) .isAtMost(2 * Math.log(largeSize) / Math.log(smallSize)); // allow up to 2x wobble in the constant factors }
@GwtIncompatible public void testResistsHashFloodingInConstruction() { CallsCounter smallCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesSmall = createAdversarialInput(10, smallCounter); int smallSize = haveSameHashesSmall.size(); CallsCounter largeCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesLarge = createAdversarialInput(15, largeCounter); int largeSize = haveSameHashesLarge.size(); for (ConstructionPathway pathway : ConstructionPathway.values()) { smallCounter.zero(); pathway.create(haveSameHashesSmall); largeCounter.zero(); pathway.create(haveSameHashesLarge); double ratio = (double) largeCounter.total() / smallCounter.total(); assertThat(ratio) .named( "ratio of equals/hashCode/compareTo operations to build an ImmutableSet via pathway " + "%s of size %s versus size %s", pathway, haveSameHashesLarge.size(), haveSameHashesSmall.size()) .isAtMost(2.0 * (largeSize * Math.log(largeSize)) / (smallSize * Math.log(smallSize))); // We allow up to 2x wobble in the constant factors. } }
@GwtIncompatible public void testResistsHashFloodingOnForwardGet() { for (AdversaryType adversary : AdversaryType.values()) { CallsCounter smallCounter = new CallsCounter(); List<? extends Entry<?, ?>> smallEntries = adversary.createAdversarialEntries(10, smallCounter); ImmutableBiMap<?, ?> smallMap = ConstructionPathway.COPY_OF_ENTRIES.create(smallEntries, smallCounter); int smallSize = smallEntries.size(); long smallOps = worstCaseQueryOperations(smallMap, smallCounter); CallsCounter largeCounter = new CallsCounter(); List<? extends Entry<?, ?>> largeEntries = adversary.createAdversarialEntries(15, largeCounter); ImmutableBiMap<?, ?> largeMap = ConstructionPathway.COPY_OF_ENTRIES.create(largeEntries, largeCounter); int largeSize = largeEntries.size(); long largeOps = worstCaseQueryOperations(largeMap, largeCounter); if (smallOps == 0 && largeOps == 0) { continue; // no queries on the CHCAE objects } double ratio = (double) largeOps / smallOps; assertThat(ratio) .named( "Ratio of worst case get operations for an ImmutableBiMap with %s of size " + "%s versus %s", adversary, largeSize, smallSize) .isAtMost(2 * Math.log(largeSize) / Math.log(smallSize)); // allow up to 2x wobble in the constant factors } }
@GwtIncompatible public void testResistsHashFloodingInConstruction() { CallsCounter smallCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesSmall = createAdversarialInput(10, smallCounter); int smallSize = haveSameHashesSmall.size(); CallsCounter largeCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesLarge = createAdversarialInput(15, largeCounter); int largeSize = haveSameHashesLarge.size(); for (ConstructionPathway pathway : ConstructionPathway.values()) { smallCounter.zero(); pathway.create(haveSameHashesSmall); long smallOps = smallCounter.total(); largeCounter.zero(); pathway.create(haveSameHashesLarge); long largeOps = largeCounter.total(); double ratio = (double) largeOps / smallOps; assertThat(ratio) .named( "ratio of equals/hashCode/compareTo operations to build an ImmutableMultiset via %s" + " with %s entries versus %s entries", pathway, largeSize, smallSize) .isAtMost(2 * (largeSize * Math.log(largeSize)) / (smallSize * Math.log(smallSize))); // allow up to 2x wobble in the constant factors } }
@GwtIncompatible public void testResistsHashFloodingInConstruction() { CallsCounter smallCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesSmall = createAdversarialInput(10, smallCounter); int smallSize = haveSameHashesSmall.size(); CallsCounter largeCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesLarge = createAdversarialInput(15, largeCounter); int largeSize = haveSameHashesLarge.size(); for (ConstructionPathway pathway : ConstructionPathway.values()) { smallCounter.zero(); pathway.create(haveSameHashesSmall, "valueObject", smallCounter); long smallOps = smallCounter.total(); largeCounter.zero(); pathway.create(haveSameHashesLarge, "valueObject", largeCounter); long largeOps = largeCounter.total(); double ratio = (double) largeOps / smallOps; assertThat(ratio) .named( "ratio of equals/hashCode/compareTo operations to build an ImmutableMap via %s" + " with %s entries versus %s entries", pathway, largeSize, smallSize) .isAtMost(2 * (largeSize * Math.log(largeSize)) / (smallSize * Math.log(smallSize))); // allow up to 2x wobble in the constant factors } }
@GwtIncompatible public void testResistsHashFloodingOnInverseGet() { for (AdversaryType adversary : AdversaryType.values()) { CallsCounter smallCounter = new CallsCounter(); List<? extends Entry<?, ?>> smallEntries = adversary.createAdversarialEntries(10, smallCounter); ImmutableBiMap<?, ?> smallMap = ConstructionPathway.COPY_OF_ENTRIES.create(smallEntries, smallCounter); int smallSize = smallEntries.size(); long smallOps = worstCaseQueryOperations(smallMap.inverse(), smallCounter); CallsCounter largeCounter = new CallsCounter(); List<? extends Entry<?, ?>> largeEntries = adversary.createAdversarialEntries(15, largeCounter); ImmutableBiMap<?, ?> largeMap = ConstructionPathway.COPY_OF_ENTRIES.create(largeEntries, largeCounter); int largeSize = largeEntries.size(); long largeOps = worstCaseQueryOperations(largeMap.inverse(), largeCounter); if (smallOps == 0 && largeOps == 0) { continue; // no queries on the CHCAE objects } double ratio = (double) largeOps / smallOps; assertThat(ratio) .named( "Ratio of worst case get operations for an ImmutableBiMap with %s of size " + "%s versus %s", adversary, largeSize, smallSize) .isAtMost(2 * Math.log(largeSize) / Math.log(smallSize)); // allow up to 2x wobble in the constant factors } }
@GwtIncompatible public void testResistsHashFloodingInConstruction() { for (AdversaryType adversary : AdversaryType.values()) { CallsCounter smallCounter = new CallsCounter(); List<? extends Entry<?, ?>> smallEntries = adversary.createAdversarialEntries(10, smallCounter); int smallSize = smallEntries.size(); CallsCounter largeCounter = new CallsCounter(); List<? extends Entry<?, ?>> largeEntries = adversary.createAdversarialEntries(15, largeCounter); int largeSize = largeEntries.size(); for (ConstructionPathway pathway : ConstructionPathway.values()) { smallCounter.zero(); pathway.create(smallEntries, smallCounter); long smallOps = smallCounter.total(); largeCounter.zero(); pathway.create(largeEntries, largeCounter); long largeOps = largeCounter.total(); double ratio = (double) largeOps / smallOps; assertThat(ratio) .named( "ratio of equals/hashCode/compareTo operations to build an ImmutableBiMap with %s" + " via %s with %s entries versus %s entries", adversary, pathway, largeSize, smallSize) .isAtMost(2 * (largeSize * Math.log(largeSize)) / (smallSize * Math.log(smallSize))); // allow up to 2x wobble in the constant factors } } }
double mean = Stats.meanOf(values.asArray()); if (values.hasAnyNaN()) { assertThat(mean).named("mean of " + values).isNaN(); } else if (values.hasAnyPositiveInfinity() && values.hasAnyNegativeInfinity()) { assertThat(mean).named("mean of " + values).isNaN(); } else if (values.hasAnyPositiveInfinity()) { assertThat(mean).named("mean of " + values).isPositiveInfinity(); } else if (values.hasAnyNegativeInfinity()) { assertThat(mean).named("mean of " + values).isNegativeInfinity(); } else { assertThat(mean).named("mean of " + values).isWithin(ALLOWED_ERROR).of(MANY_VALUES_MEAN);
double populationCovariance = stats.populationCovariance(); if (values.hasAnyNonFinite()) { assertThat(populationCovariance).named("population covariance of " + values).isNaN(); } else { assertThat(populationCovariance) .named("population covariance of " + values) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_PRODUCTS_OF_DELTAS / MANY_VALUES_COUNT);
double max = Stats.of(values.asIterable().iterator()).max(); if (values.hasAnyNaN()) { assertThat(max).named("max of " + values).isNaN(); } else if (values.hasAnyPositiveInfinity()) { assertThat(max).named("max of " + values).isPositiveInfinity(); } else { assertThat(max).named("max of " + values).isWithin(ALLOWED_ERROR).of(MANY_VALUES_MAX);
@AndroidIncompatible // slow public void testPercentiles_index_computeInPlace() { // Assert that the computation gives the correct result for all possible percentiles. for (int index = 0; index <= 100; index++) { double[] dataset = Doubles.toArray(PSEUDORANDOM_DATASET); assertThat(percentiles().index(index).computeInPlace(dataset)) .named("quantile at index " + index) .isWithin(ALLOWED_ERROR) .of(expectedLargeDatasetPercentile(index)); } // Assert that the dataset contains the same elements after the in-place computation (although // they may be reordered). We only do this for one index rather than for all indexes, as it is // quite expensives (quadratic in the size of PSEUDORANDOM_DATASET). double[] dataset = Doubles.toArray(PSEUDORANDOM_DATASET); @SuppressWarnings("unused") double actual = percentiles().index(33).computeInPlace(dataset); assertThat(dataset).usingExactEquality().containsExactlyElementsIn(PSEUDORANDOM_DATASET); }
double mean = Stats.of(values.asArray()).mean(); if (values.hasAnyNaN()) { assertThat(mean).named("mean of " + values).isNaN(); } else if (values.hasAnyPositiveInfinity() && values.hasAnyNegativeInfinity()) { assertThat(mean).named("mean of " + values).isNaN(); } else if (values.hasAnyPositiveInfinity()) { assertThat(mean).named("mean of " + values).isPositiveInfinity(); } else if (values.hasAnyNegativeInfinity()) { assertThat(mean).named("mean of " + values).isNegativeInfinity(); } else { assertThat(mean).named("mean of " + values).isWithin(ALLOWED_ERROR).of(MANY_VALUES_MEAN);
double minByAddAllStats = accumulatorByAddAllStats.min(); if (values.hasAnyNaN()) { assertThat(min).named("min of " + values).isNaN(); assertThat(minByAddAllStats).named("min by addAll(Stats) of " + values).isNaN(); } else if (values.hasAnyNegativeInfinity()) { assertThat(min).named("min of " + values).isNegativeInfinity(); assertThat(minByAddAllStats) .named("min by addAll(Stats) of " + values) .isNegativeInfinity(); } else { assertThat(min).named("min of " + values).isWithin(ALLOWED_ERROR).of(MANY_VALUES_MIN); assertThat(minByAddAllStats) .named("min by addAll(Stats) of " + values) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MIN);
double maxByAddAllStats = accumulatorByAddAllStats.max(); if (values.hasAnyNaN()) { assertThat(max).named("max of " + values).isNaN(); assertThat(maxByAddAllStats).named("max by addAll(Stats) of " + values).isNaN(); } else if (values.hasAnyPositiveInfinity()) { assertThat(max).named("max of " + values).isPositiveInfinity(); assertThat(maxByAddAllStats) .named("max by addAll(Stats) of " + values) .isPositiveInfinity(); } else { assertThat(max).named("max of " + values).isWithin(ALLOWED_ERROR).of(MANY_VALUES_MAX); assertThat(maxByAddAllStats) .named("max by addAll(Stats) of " + values) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MAX);
double min = accumulator.snapshot().min(); if (values.hasAnyNaN()) { assertThat(min).named("min of " + values).isNaN(); } else if (values.hasAnyNegativeInfinity()) { assertThat(min).named("min of " + values).isNegativeInfinity(); } else { assertThat(min).named("min of " + values).isWithin(ALLOWED_ERROR).of(MANY_VALUES_MIN);
double populationVariance = Stats.of(values.asIterable()).populationVariance(); if (values.hasAnyNonFinite()) { assertThat(populationVariance).named("population variance of " + values).isNaN(); } else { assertThat(populationVariance) .named("population variance of " + values) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT);