public void testMedian_compute_doubleCollection() { assertThat(median().compute(SIXTEEN_SQUARES_DOUBLES)) .isWithin(ALLOWED_ERROR) .of(SIXTEEN_SQUARES_MEDIAN); }
/** Asserts that the subject is {@link Double#NEGATIVE_INFINITY}. */ public final void isNegativeInfinity() { isEqualTo(Double.NEGATIVE_INFINITY); }
/** * Asserts whether actual bitmap is very similar to the expected bitmap at some quality level. * * <p>This is defined as their PSNR value is greater than or equal to the threshold. The higher * the threshold, the more similar they are. * * @param expectedBitmap The expected bitmap. * @param actualBitmap The actual bitmap. * @param psnrThresholdDb The PSNR threshold (in dB), at or above which bitmaps are considered * very similar. */ public static void assertBitmapsAreSimilar( Bitmap expectedBitmap, Bitmap actualBitmap, double psnrThresholdDb) { assertThat(getPsnr(expectedBitmap, actualBitmap)).isAtLeast(psnrThresholdDb); }
private static void assertEquivalent(double actual, double expected) { if (expected == POSITIVE_INFINITY) { assertThat(actual).isPositiveInfinity(); } else if (expected == NEGATIVE_INFINITY) { assertThat(actual).isNegativeInfinity(); } else if (Double.isNaN(expected)) { assertThat(actual).isNaN(); } else { assertThat(actual).isWithin(ALLOWED_ERROR).of(expected); } } }
public void testSum() { assertThat(EMPTY_STATS_VARARGS.sum()).isEqualTo(0.0); assertThat(EMPTY_STATS_ITERABLE.sum()).isEqualTo(0.0); assertThat(ONE_VALUE_STATS.sum()).isWithin(ALLOWED_ERROR).of(ONE_VALUE); assertThat(TWO_VALUES_STATS.sum()).isWithin(ALLOWED_ERROR).of(TWO_VALUES_MEAN * 2); assertThat(MANY_VALUES_STATS_VARARGS.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(MANY_VALUES_STATS_ITERABLE.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(MANY_VALUES_STATS_ITERATOR.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(MANY_VALUES_STATS_SNAPSHOT.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(INTEGER_MANY_VALUES_STATS_VARARGS.sum()) .isWithin(ALLOWED_ERROR) .of(INTEGER_MANY_VALUES_MEAN * INTEGER_MANY_VALUES_COUNT); assertThat(INTEGER_MANY_VALUES_STATS_ITERABLE.sum()) .isWithin(ALLOWED_ERROR) .of(INTEGER_MANY_VALUES_MEAN * INTEGER_MANY_VALUES_COUNT); assertThat(LONG_MANY_VALUES_STATS_ITERATOR.sum()) .isWithin(ALLOWED_ERROR) .of(LONG_MANY_VALUES_MEAN * LONG_MANY_VALUES_COUNT); assertThat(LONG_MANY_VALUES_STATS_SNAPSHOT.sum()) .isWithin(ALLOWED_ERROR) .of(LONG_MANY_VALUES_MEAN * LONG_MANY_VALUES_COUNT); }
} catch (IllegalStateException expected) { assertThat(ONE_VALUE_PAIRED_STATS.populationCovariance()).isWithin(0.0).of(0.0); assertThat(createSingleStats(Double.POSITIVE_INFINITY, 1.23).populationCovariance()).isNaN(); assertThat(createSingleStats(Double.NEGATIVE_INFINITY, 1.23).populationCovariance()).isNaN(); assertThat(createSingleStats(Double.NaN, 1.23).populationCovariance()).isNaN(); assertThat(TWO_VALUES_PAIRED_STATS.populationCovariance()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_SUM_OF_PRODUCTS_OF_DELTAS / 2); double populationCovariance = stats.populationCovariance(); if (values.hasAnyNonFinite()) { assertThat(populationCovariance).named("population covariance of " + values).isNaN(); } else { assertThat(populationCovariance) .named("population covariance of " + values) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_PRODUCTS_OF_DELTAS / MANY_VALUES_COUNT); .isWithin(ALLOWED_ERROR) .of(0.0); assertThat(VERTICAL_VALUES_PAIRED_STATS.populationCovariance()).isWithin(ALLOWED_ERROR).of(0.0); assertThat(CONSTANT_VALUES_PAIRED_STATS.populationCovariance()).isWithin(ALLOWED_ERROR).of(0.0);
.isAtMost(PlaneRoadModel.DELTA); assertThat(mp.distance().doubleValue(SI.METER)) .isWithin(PlaneRoadModel.DELTA).of(.25); .isAtMost(PlaneRoadModel.DELTA); assertThat(mp2.distance().doubleValue(SI.METER)) .isWithin(PlaneRoadModel.DELTA).of(.25); .isAtMost(PlaneRoadModel.DELTA); assertThat(mp1c.distance().doubleValue(SI.METER)) .isWithin(PlaneRoadModel.DELTA).of(0); .isGreaterThan(1d); .isWithin(PlaneRoadModel.DELTA).of(1d); .isWithin(PlaneRoadModel.DELTA).of(0d); .isWithin(PlaneRoadModel.DELTA).of(1d);
@Test public void testCreateEmpty() { assertThat(MutableSumDouble.create().getSum()).isWithin(TOLERANCE).of(0); assertThat(MutableSumLong.create().getSum()).isWithin(TOLERANCE).of(0); assertThat(MutableCount.create().getCount()).isEqualTo(0); assertThat(MutableMean.create().getMean()).isWithin(TOLERANCE).of(0); assertThat(MutableLastValueDouble.create().getLastValue()).isNaN(); assertThat(MutableLastValueLong.create().getLastValue()).isNaN(); BucketBoundaries bucketBoundaries = BucketBoundaries.create(Arrays.asList(0.1, 2.2, 33.3)); MutableDistribution mutableDistribution = MutableDistribution.create(bucketBoundaries); assertThat(mutableDistribution.getMean()).isWithin(TOLERANCE).of(0); assertThat(mutableDistribution.getCount()).isEqualTo(0); assertThat(mutableDistribution.getSumOfSquaredDeviations()).isWithin(TOLERANCE).of(0); assertThat(mutableDistribution.getBucketCounts()).isEqualTo(new long[4]); assertThat(mutableDistribution.getExemplars()).isEqualTo(new Exemplar[4]); MutableDistribution mutableDistributionNoHistogram = MutableDistribution.create(BUCKET_BOUNDARIES_EMPTY); assertThat(mutableDistributionNoHistogram.getExemplars()).isNull(); }
@GwtIncompatible public void testResistsHashFloodingOnContains() { CallsCounter smallCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesSmall = createAdversarialInput(10, smallCounter); ImmutableSet<?> smallSet = ConstructionPathway.COPY_OF_LIST.create(haveSameHashesSmall); long worstCaseOpsSmall = worstCaseQueryOperations(smallSet, smallCounter); CallsCounter largeCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesLarge = createAdversarialInput(15, largeCounter); ImmutableSet<?> largeSet = ConstructionPathway.COPY_OF_LIST.create(haveSameHashesLarge); long worstCaseOpsLarge = worstCaseQueryOperations(largeSet, largeCounter); double ratio = (double) worstCaseOpsLarge / worstCaseOpsSmall; int smallSize = haveSameHashesSmall.size(); int largeSize = haveSameHashesLarge.size(); assertThat(ratio) .named( "ratio of equals/hashCode/compareTo operations to worst-case query an ImmutableSet " + "of size %s versus size %s", haveSameHashesLarge.size(), haveSameHashesSmall.size()) .isAtMost(2 * Math.log(largeSize) / Math.log(smallSize)); // We allow up to 2x wobble in the constant factors. }
@Test public void nonNumberArray() { Struct row = execute( Statement.newBuilder( "SELECT [IEEE_DIVIDE(1, 0), IEEE_DIVIDE(-1, 0), IEEE_DIVIDE(0, 0)]"), Type.array(Type.float64())); assertThat(row.getDoubleList(0)).hasSize(3); assertThat(row.getDoubleList(0).get(0)).isPositiveInfinity(); assertThat(row.getDoubleList(0).get(1)).isNegativeInfinity(); assertThat(row.getDoubleList(0).get(2)).isNaN(); }
assertThat(sum(interArrivalTimes.subList(0, 3))).isAtMost(100d); assertThat(interArrivalTimes.get(3)).isIn(acceptableDuration); assertThat(interArrivalTimes.get(4)).isIn(acceptableDuration); assertThat(interArrivalTimes.get(5)).isIn(acceptableDuration); assertThat(interArrivalTimes.get(6)).isIn(acceptableDuration); assertThat(sum(interArrivalTimes.subList(6, 10))).isAtMost(150d); assertThat(interArrivalTimes.get(10)).isIn(acceptableDuration); assertThat(interArrivalTimes.get(11)).isIn(acceptableDuration); assertThat(interArrivalTimes.get(12)).isIn(acceptableDuration); assertThat(interArrivalTimes.get(13)).isIn(acceptableDuration); assertThat(sum(interArrivalTimes.subList(13, 30))).isAtMost(150d); assertThat(tt.getClockModes().get(30)).isEqualTo(REAL_TIME);
interArrivalTimes.add((l2 - l1) / 1000000d); assertThat(interArrivalTimes.get(0)).isAtLeast(400d); assertThat(interArrivalTimes.get(0)).isAtMost(500d); assertThat(interArrivalTimes.get(1)).isAtMost(500d); assertThat(interArrivalTimes.get(2)).isAtLeast(400d); assertThat(interArrivalTimes.get(2)).isAtMost(500d); assertThat(interArrivalTimes.get(3)).isAtMost(500d); assertThat(getModel().isExecutorAlive()).isFalse();
/** * Asserts that {@code transformation} behaves as expected for {@link * LinearTransformation#forNaN}. */ static void assertLinearTransformationNaN(LinearTransformation transformation) { assertThat(transformation.isHorizontal()).isFalse(); assertThat(transformation.isVertical()).isFalse(); assertThat(transformation.slope()).isNaN(); assertThat(transformation.transform(0.0)).isNaN(); assertThat(transformation.inverse()).isSameAs(transformation); }
private static void assertHasOriginalAspectRatio(Bitmap original, Bitmap transformed) { double originalAspectRatio = (double) original.getWidth() / (double) original.getHeight(); double transformedAspectRatio = (double) transformed.getWidth() / (double) transformed.getHeight(); assertThat(transformedAspectRatio) .isIn(Range.open(originalAspectRatio - 0.05f, originalAspectRatio + 0.05f)); }
assertThat(Stats.meanOf(ONE_VALUE)).isWithin(ALLOWED_ERROR).of(ONE_VALUE); assertThat(Stats.meanOf(POSITIVE_INFINITY)).isPositiveInfinity(); assertThat(Stats.meanOf(NEGATIVE_INFINITY)).isNegativeInfinity(); assertThat(Stats.meanOf(NaN)).isNaN(); assertThat(Stats.meanOf(TWO_VALUES)).isWithin(ALLOWED_ERROR).of(TWO_VALUES_MEAN); double mean = Stats.meanOf(values.asArray()); if (values.hasAnyNaN()) { assertThat(mean).named("mean of " + values).isNaN(); } else if (values.hasAnyPositiveInfinity() && values.hasAnyNegativeInfinity()) { assertThat(mean).named("mean of " + values).isNaN(); } else if (values.hasAnyPositiveInfinity()) { assertThat(mean).named("mean of " + values).isPositiveInfinity(); } else if (values.hasAnyNegativeInfinity()) { assertThat(mean).named("mean of " + values).isNegativeInfinity(); } else { assertThat(mean).named("mean of " + values).isWithin(ALLOWED_ERROR).of(MANY_VALUES_MEAN); assertThat(Stats.meanOf(MANY_VALUES)).isWithin(ALLOWED_ERROR).of(MANY_VALUES_MEAN); assertThat(Stats.meanOf(MANY_VALUES.iterator())).isWithin(ALLOWED_ERROR).of(MANY_VALUES_MEAN); assertThat(Stats.meanOf(INTEGER_MANY_VALUES)) .isWithin(ALLOWED_ERROR) .of(INTEGER_MANY_VALUES_MEAN); assertThat(Stats.meanOf(Ints.toArray(INTEGER_MANY_VALUES))) .isWithin(ALLOWED_ERROR) .of(INTEGER_MANY_VALUES_MEAN); assertThat(Stats.meanOf(LONG_MANY_VALUES)).isWithin(ALLOWED_ERROR).of(LONG_MANY_VALUES_MEAN); assertThat(Stats.meanOf(Longs.toArray(LONG_MANY_VALUES))) .isWithin(ALLOWED_ERROR)
assertThat(oneValueAccumulator.populationCovariance()).isWithin(0.0).of(0.0); assertThat(oneValueAccumulatorByAddAllEmptyPairedStats.populationCovariance()) .isWithin(0.0) .of(0.0); assertThat(twoValuesAccumulator.populationCovariance()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_SUM_OF_PRODUCTS_OF_DELTAS / 2); assertThat(twoValuesAccumulatorByAddAllPartitionedPairedStats.populationCovariance()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_SUM_OF_PRODUCTS_OF_DELTAS / 2); assertThat(manyValuesAccumulator.populationCovariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_PRODUCTS_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllPartitionedPairedStats.populationCovariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_PRODUCTS_OF_DELTAS / MANY_VALUES_COUNT); accumulatorByAddAllPartitionedPairedStats.populationCovariance(); if (values.hasAnyNonFinite()) { assertThat(populationCovariance).named("population covariance of " + values).isNaN(); assertThat(populationCovarianceByAddAllPartitionedPairedStats) .named("population covariance by addAll(PairedStats) of " + values) .isNaN(); } else { assertThat(populationCovariance) .named("population covariance of " + values) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_PRODUCTS_OF_DELTAS / MANY_VALUES_COUNT); assertThat(populationCovarianceByAddAllPartitionedPairedStats)
@AndroidIncompatible // slow public void testPercentiles_index_computeInPlace() { // Assert that the computation gives the correct result for all possible percentiles. for (int index = 0; index <= 100; index++) { double[] dataset = Doubles.toArray(PSEUDORANDOM_DATASET); assertThat(percentiles().index(index).computeInPlace(dataset)) .named("quantile at index " + index) .isWithin(ALLOWED_ERROR) .of(expectedLargeDatasetPercentile(index)); } // Assert that the dataset contains the same elements after the in-place computation (although // they may be reordered). We only do this for one index rather than for all indexes, as it is // quite expensives (quadratic in the size of PSEUDORANDOM_DATASET). double[] dataset = Doubles.toArray(PSEUDORANDOM_DATASET); @SuppressWarnings("unused") double actual = percentiles().index(33).computeInPlace(dataset); assertThat(dataset).usingExactEquality().containsExactlyElementsIn(PSEUDORANDOM_DATASET); }
@GwtIncompatible public void testResistsHashFloodingOnCount() { CallsCounter smallCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesSmall = createAdversarialInput(10, smallCounter); int smallSize = haveSameHashesSmall.size(); ImmutableMultiset<?> smallMap = ConstructionPathway.COPY_OF_COLLECTION.create(haveSameHashesSmall); long worstCaseQuerySmall = worstCaseQueryOperations(smallMap, smallCounter); CallsCounter largeCounter = new CallsCounter(); List<CountsHashCodeAndEquals> haveSameHashesLarge = createAdversarialInput(15, largeCounter); int largeSize = haveSameHashesLarge.size(); ImmutableMultiset<?> largeMap = ConstructionPathway.COPY_OF_COLLECTION.create(haveSameHashesLarge); long worstCaseQueryLarge = worstCaseQueryOperations(largeMap, largeCounter); double ratio = (double) worstCaseQueryLarge / worstCaseQuerySmall; assertThat(ratio) .named( "Ratio of worst case query operations for an ImmutableMultiset of size %s versus %s", largeSize, smallSize) .isAtMost(2 * Math.log(largeSize) / Math.log(smallSize)); // allow up to 2x wobble in the constant factors }
@Test public void writeFloat64NonNumbers() { write(baseInsert().set("Float64Value").to(Double.NEGATIVE_INFINITY).build()); Struct row = readLastRow("Float64Value"); assertThat(row.isNull(0)).isFalse(); assertThat(row.getDouble(0)).isNegativeInfinity(); write(baseInsert().set("Float64Value").to(Double.POSITIVE_INFINITY).build()); row = readLastRow("Float64Value"); assertThat(row.isNull(0)).isFalse(); assertThat(row.getDouble(0)).isPositiveInfinity(); write(baseInsert().set("Float64Value").to(Double.NaN).build()); row = readLastRow("Float64Value"); assertThat(row.isNull(0)).isFalse(); assertThat(row.getDouble(0)).isNaN(); }