@Override public void clear() { multiset().clear(); }
@Override public void clear() { multiset().clear(); } }
@Override public void clear() { multiset().clear(); }
@Override public void clear() { multiset().clear(); } }
@Override public void clear() { multiset().clear(); } }
@Override public void clear() { multiset().clear(); }
@MapFeature.Require(SUPPORTS_REMOVE) public void testClearThroughKeys() { multimap().keys().clear(); assertCleared(); }
@CollectionSize.Require(SEVERAL) @CollectionFeature.Require(SUPPORTS_REMOVE) @MultisetFeature.Require(ENTRIES_ARE_VIEWS) public void testEntryReflectsClear() { initThreeCopies(); assertEquals(3, getMultiset().count(e0())); Multiset.Entry<E> entry = Iterables.getOnlyElement(getMultiset().entrySet()); assertEquals(3, entry.getCount()); getMultiset().clear(); assertEquals(0, entry.getCount()); }
totalModificationsSize.set(0); Multiset<TableReference> localWritesByTable = ImmutableMultiset.copyOf(writesByTable); writesByTable.clear(); Set<TableReference> localClearedTables = ImmutableSet.copyOf(clearedTables); clearedTables.clear();
numLostTasks += taskLostReasons.count(lostTaskReason); taskLostReasons.clear();
@Override public void clear() { trainingVectors.clear(); } }
public void clear() { head = null; tail = null; keyCount.clear(); keyToKeyHead.clear(); keyToKeyTail.clear(); }
@Override public void clear() { head = null; tail = null; keyCount.clear(); keyToKeyHead.clear(); keyToKeyTail.clear(); }
public void clear() { head = null; tail = null; keyCount.clear(); keyToKeyHead.clear(); keyToKeyTail.clear(); }
public void clear() { head = null; tail = null; keyCount.clear(); keyToKeyHead.clear(); keyToKeyTail.clear(); }
private void rebuild() { activeLangs.clear(); Stopwatch stopwatch = Stopwatch.createStarted(); rootOntology.getImportsClosure().stream() .flatMap(ont -> ont.getAxioms(ANNOTATION_ASSERTION).stream()) .filter(ActiveLanguagesManager::isLabellingAnnotation) .forEach(this::addAxiom); stopwatch.stop(); logger.info("{} Extracted {} languages in {} ms", projectId, activeLangs.elementSet().size(), stopwatch.elapsed(MILLISECONDS)); rebuildSortedLanguages(); logSortedLanguages(); }
@MapFeature.Require(SUPPORTS_REMOVE) public void testClearThroughKeys() { multimap().keys().clear(); assertCleared(); }
/** * Adds all of the tokens that we counted up to a vector. */ public void flush(double weight, Vector data) { for (String word : counts.elementSet()) { // weight words by log_2(tf) times whatever other weight we are given wordEncoder.addToVector(word, weight * Math.log1p(counts.count(word)) / LOG_2, data); } counts.clear(); }
@CollectionSize.Require(SEVERAL) @CollectionFeature.Require(SUPPORTS_REMOVE) @MultisetFeature.Require(ENTRIES_ARE_VIEWS) public void testEntryReflectsClear() { initThreeCopies(); assertEquals(3, getMultiset().count(e0())); Multiset.Entry<E> entry = Iterables.getOnlyElement(getMultiset().entrySet()); assertEquals(3, entry.getCount()); getMultiset().clear(); assertEquals(0, entry.getCount()); }