@CollectionSize.Require(absent = ZERO) @CollectionFeature.Require(SUPPORTS_REMOVE) public void testElementSetRemovePropagatesToMultiset() { Set<E> elementSet = getMultiset().elementSet(); int size = getNumElements(); int expectedSize = size - getMultiset().count(e0()); assertTrue(elementSet.remove(e0())); assertFalse(getMultiset().contains(e0())); assertEquals(expectedSize, getMultiset().size()); }
/** An implementation of {@link Multiset#setCount(Object, int)}. */ static <E> int setCountImpl(Multiset<E> self, E element, int count) { checkNonnegative(count, "count"); int oldCount = self.count(element); int delta = count - oldCount; if (delta > 0) { self.add(element, delta); } else if (delta < 0) { self.remove(element, -delta); } return oldCount; }
@Test public void testSimpleIterator() { NodeLayout masterNodeLayout = new NodeLayout("large", "centos6", ImmutableSet.of("master1")); NodeLayout slaveLayout = new NodeLayout("medium", "centos6", ImmutableSet.of("slave1")); Multiset<NodeLayout> counts = HashMultiset.create(); counts.add(masterNodeLayout); counts.add(slaveLayout); ClusterLayout layout = new ClusterLayout(constraints, counts); Iterator<ClusterLayoutChange> iter = new AddServiceChangeIterator(layout, "slave2"); // only possible change is to add slave2 to the slave node List<ClusterLayoutChange> expected = Lists.newArrayList(); Multiset<NodeLayout> expectedCounts = HashMultiset.create(); expectedCounts.add(slaveLayout); expected.add(new AddServicesChange(expectedCounts, "slave2")); assertIterator(expected, iter); }
public void testHashFloat() { Multiset<Integer> violations = HashMultiset.create(); for (int k = 0; k < 1000; k++) { List<Float> original = Lists.newArrayList(); Random gen = RandomUtils.getRandom(); for (int i = 0; i < 10000; i++) { float x = (float) gen.nextDouble(); original.add(x); } violations.add(checkCounts(original) <= 12 ? 0 : 1); } // the hashes for floats don't really have 32 bits of entropy so the test // only succeeds at better than about 99% rate. assertTrue(violations.count(0) >= 985); }
@Override public boolean equals(@Nullable Object object) { if (object instanceof Multiset) { Multiset<?> that = (Multiset<?>) object; return this.size() == that.size() && delegate.equals(that.elementSet()); } return false; }
private void checkAttributeNamesForDuplicates(ValueType type, Protoclass protoclass) { if (!type.attributes.isEmpty()) { Multiset<String> attributeNames = HashMultiset.create(type.attributes.size()); for (ValueAttribute attribute : type.attributes) { if (attribute.isGenerateLazy) { attributeNames.add(attribute.name() + "$lazy"); // making lazy compare in it's own scope } else { attributeNames.add(attribute.name()); } } List<String> duplicates = Lists.newArrayList(); for (Multiset.Entry<String> entry : attributeNames.entrySet()) { if (entry.getCount() > 1) { duplicates.add(entry.getElement().replace("$lazy", "")); } } if (!duplicates.isEmpty()) { protoclass.report() .error("Duplicate attribute names %s. You should check if correct @Value.Style applied", duplicates); } } }
@Test public void testValidLayout() { NodeLayout masterNodeLayout = new NodeLayout("large-mem", "centos6", ImmutableSet.of("namenode")); NodeLayout slaveLayout = new NodeLayout("medium", "centos6", ImmutableSet.of("datanode")); NodeLayout reactorLayout = new NodeLayout("medium", "centos6", ImmutableSet.of("reactor", "zookeeper")); Multiset<NodeLayout> counts = HashMultiset.create(); counts.add(masterNodeLayout); counts.add(reactorLayout); counts.add(slaveLayout, 50); ClusterLayout layout = new ClusterLayout(constraints, counts); Assert.assertTrue(layout.isValid()); }
/** * A method to split a dataset into two datasets by a specified percentage. This method is class-aware. * It will ensure that "percentage" of the observations for each class make it into the training data set. * The remaining observations will be put into the testing data set. * * @param percentage * @return Dataset[] */ public Dataset[] split(int percentage) { generateClassCountsIfAbsent(); Multiset<String> splitCounts = HashMultiset.create(); List<Observation> trainingObservations = Lists.newArrayList(); List<Observation> testingObservations = Lists.newArrayList(); for (Observation observation : observations) { if (getCountServedToTraining(splitCounts, observation) <= (percentage / 100D)) { trainingObservations.add(observation); } else { testingObservations.add(observation); } splitCounts.add(observation.getClazz()); } return new Dataset[] { new Dataset(trainingObservations, classMetadata), new Dataset(testingObservations, classMetadata) }; }
/** * Assert that each of the lines passed in has been written, but don't care about the order. */ public void expectWritten(Collection<String> lines) { TestCase.assertEquals("File: " + filename, lines.size(), written.size()); Multiset<String> counts = HashMultiset.create(lines); for (Multiset.Entry<String> entry : counts.entrySet()) { TestUtil.assertCount(written, entry.getElement(), entry.getCount()); } written = Lists.newArrayList(); }
final Set<String> names = ImmutableSet.of("Marko", "Dan", "Stephen", "Daniel", "Josh", "Thad", "Pavel", "Matthias"); final int numG = 10; final long[] gids = new long[numG]; Multiset<Integer> partitions = HashMultiset.create(); partitions.add(partition); txx.commit(); assertTrue(partitions.elementSet().size() >= 3); // int numV = 0; while (parts.size() < numP) { int part = Iterables.get(partitions.elementSet(), random.nextInt(partitions.elementSet().size())); if (parts.add(part)) numV += partitions.count(part);
public List<StubMapping> process(Iterable<StubMapping> stubMappings) { final Multiset<RequestPattern> requestCounts = HashMultiset.create(); final List<StubMapping> processedStubMappings = new ArrayList<>(); for (StubMapping stubMapping: stubMappings) { requestCounts.add(stubMapping.getRequest()); // Skip duplicate requests if shouldRecordRepeatsAsScenarios is not enabled if ( requestCounts.count(stubMapping.getRequest()) > 1 && !shouldRecordRepeatsAsScenarios ) { continue; } if (bodyExtractMatcher != null && bodyExtractMatcher.match(stubMapping.getResponse()).isExactMatch()) { bodyExtractor.extractInPlace(stubMapping); } processedStubMappings.add(stubMapping); } if (shouldRecordRepeatsAsScenarios) { new ScenarioProcessor().putRepeatedRequestsInScenarios(processedStubMappings); } // Run any stub mapping transformer extensions return Lists.transform(processedStubMappings, transformerRunner); } }
@GwtIncompatible // SerializableTester public void testSerializationContainingSelf() { Multiset<Multiset<?>> multiset = HashMultiset.create(); multiset.add(multiset, 2); Multiset<Multiset<?>> copy = SerializableTester.reserialize(multiset); assertEquals(2, copy.size()); assertSame(copy, copy.iterator().next()); }