/** * Method for removing all field properties out of this ObjectNode * <b>except</b> for ones specified in argument. * * @param fieldNames Fields to <b>retain</b> in this ObjectNode * * @return This node (to allow call chaining) */ public ObjectNode retain(Collection<String> fieldNames) { _children.keySet().retainAll(fieldNames); return this; }
@Override public boolean retainAll(Collection<?> c) { try { return super.retainAll(checkNotNull(c)); } catch (UnsupportedOperationException e) { Set<K> toRetain = Sets.newHashSet(); for (Entry<K, V> entry : map().entrySet()) { if (c.contains(entry.getValue())) { toRetain.add(entry.getKey()); } } return map().keySet().retainAll(toRetain); } }
private void checkCrossDexMovingClasses() { // Here we will check if any classes that were deleted in one dex // would be added to another dex. e.g. classA is deleted in dex0 and // added in dex1. // Since DexClassesComparator will guarantee that a class can be either 'added' // or 'deleted' between two files it compares. We can achieve our checking works // by calculating the intersection of deletedClassDescs and addedClassDescs. Set<String> deletedClassDescs = new HashSet(deletedClassDescToDexNameMap.keySet()); Set<String> addedClassDescs = new HashSet(addedClassDescToDexNameMap.keySet()); deletedClassDescs.retainAll(addedClassDescs); // So far deletedClassNames only contains the intersect elements between // deletedClassNames and addedClassNames. Set<String> movedCrossFilesClassDescs = deletedClassDescs; if (!movedCrossFilesClassDescs.isEmpty()) { Logger.e("Warning:Class Moved. Some classes are just moved from one dex to another. " + "This behavior may leads to unnecessary enlargement of patch file. you should try to check them:"); for (String classDesc : movedCrossFilesClassDescs) { StringBuilder sb = new StringBuilder(); sb.append('{'); sb.append("classDesc:").append(classDesc).append(','); sb.append("from:").append(deletedClassDescToDexNameMap.get(classDesc)).append(','); sb.append("to:").append(addedClassDescToDexNameMap.get(classDesc)); sb.append('}'); Logger.e(sb.toString()); } } }
public void compute() { Set<String> allClasses = classRequires.keySet(); Set<String> emptyClasses = new TreeSet<>(); for (String c : allClasses) { Set<String> needs = classRequires.get(c); needs.retainAll(allClasses); Set<String> extra = new TreeSet<>(); for (String need : needs) { extra.addAll(classRequires.get(need)); } needs.addAll(extra); needs.retainAll(allClasses); classRequires.put(c, needs); if (needs.isEmpty()) { emptyClasses.add(c); } } for (String c : emptyClasses) { classRequires.remove(c); } }
Iterator<Entry<String, String>> iter = crtTbl.getDefaultSerdeProps().entrySet() .iterator(); while (iter.hasNext()) { Entry<String, String> m = iter.next(); tbl.setSerdeParam(m.getKey(), m.getValue()); retainer.add(META_TABLE_STORAGE); if (spec != null && spec.schemaProps() != null) { retainer.addAll(Arrays.asList(spec.schemaProps())); retainer.addAll(Arrays.asList(paramsStr.split(","))); if (!retainer.isEmpty()) { params.keySet().retainAll(retainer); } else { params.clear();
protected void handleViewChange(List<Address> mbrs) { super.handleViewChange(mbrs); if(mbrs == null) return; // add members not in membership to received and sent hashmap (with full credits) mbrs.stream().filter(addr -> !sent.containsKey(addr)).forEach(addr -> sent.put(addr, createCredit((int)max_credits))); // remove members that left sent.keySet().retainAll(mbrs); }
down.removeAll(tmp.keySet()); routes.keySet().retainAll(tmp.keySet()); // remove all sites which are not in the view for(Map.Entry<String,List<Address>> entry: tmp.entrySet()) { String key=entry.getKey(); List<Address> val=entry.getValue(); if(!routes.containsKey(key)) { routes.put(key, new ArrayList<>()); if(up != null) up.add(key); routes.remove(key); if(listener != null) { down.add(key); up.remove(key); if(!down.isEmpty()) listener.sitesDown(down.toArray(new String[down.size()])); if(!up.isEmpty()) listener.sitesUp(up.toArray(new String[up.size()]));
/** * Removes any key-value pairs from @commonValuesMap that do not agree with the respective * key-value pairs of @additionalValuesMap * * @param commonValuesMap Common values map, whose key set will be reduced. * @param additionalValuesMap Incoming values map, against which @commonValuesMap. */ static void findCommon(Map<String, String> commonValuesMap, Map<String, String> additionalValuesMap) { Set<String> sharedKeySet = commonValuesMap.keySet(); sharedKeySet.retainAll(additionalValuesMap.keySet()); for (String sharedKey : new HashSet<>(sharedKeySet)) { String commonNdValue = commonValuesMap.get(sharedKey); String incomingNdValue = additionalValuesMap.get(sharedKey); if (commonNdValue != null && !commonNdValue.equals(incomingNdValue) || commonNdValue == null && incomingNdValue != null) { commonValuesMap.remove(sharedKey); } } }
for (Entry<AccumuloColumnConstraint, Collection<Range>> constraintEntry : constraintRanges.asMap().entrySet()) { tasks.add(executor.submit(() -> { scan.setRanges(constraintEntry.getValue()); scan.fetchColumnFamily(new Text(Indexer.getIndexColumnFamily(constraintEntry.getKey().getFamily().getBytes(), constraintEntry.getKey().getQualifier().getBytes()).array())); Set<Range> columnRanges = new HashSet<>(); for (Entry<Key, Value> entry : scan) { entry.getKey().getColumnQualifier(tmpQualifier); columnRanges.add(new Range(tmpQualifier)); LOG.debug("Retrieved %d ranges for index column %s", columnRanges.size(), constraintEntry.getKey().getName()); scan.close(); return columnRanges; if (finalRanges.isEmpty()) { finalRanges.addAll(future.get()); finalRanges.retainAll(future.get());
for (TaggedValue tv : returnValues) { if (keys == null) { keys = new HashSet<String>(tv.getContainerMapping().keySet()); returnType = tv.getType(); keys.retainAll(tv.getContainerMapping().keySet()); final HashMap<String, TaggedValue> resultMapping = new HashMap<String, TaggedValue>(keys.size()); final List<String> filteredMappings = new ArrayList<String>(keys.size()); for (TaggedValue tv : returnValues) { final Map<String, TaggedValue> cm = tv.getContainerMapping(); if (cm.containsKey(key)) { if (!filteredMappings.contains(key) && cm.get(key) == null) { filteredMappings.add(key); resultMapping.put(key, cm.get(key)); && !cm.get(key).equals(resultMapping.get(key))) { filteredMappings.add(key); resultMapping.remove(key); while (it.hasNext()) { Map.Entry<String, TaggedValue> entry = it.next(); TaggedValue value = mergeReturnValues(Collections.singletonList(entry.getValue())); if (value == null) { it.remove();
DesignContext designContext) { Set<String> visited = new HashSet<>(); for (Entry<String, CELL> entry : cells.entrySet()) { if (visited.contains(entry.getKey())) { continue; visited.add(entry.getKey()); .entrySet().stream().filter(groupEntry -> groupEntry .getValue().contains(entry.getKey())) .findFirst(); Stream<String> columnIds = Stream.of(entry.getKey()); cells.keySet()); orderedSet.retainAll(groupCell.get().getValue()); columnIds = orderedSet.stream(); visited.addAll(orderedSet); cellElement.attr("colspan", "" + orderedSet.size()); writeCellState(cellElement, designContext, groupCell.get().getKey()); } else { writeCellState(cellElement, designContext, entry.getValue().getCellState());
/** * Slow implementation. */ public Arc getArc(Object source, Object target) { Set arcsFromSource = arcsBySource.get(source); Set arcsToTarget = arcsByTarget.get(target); Set result = Generics.newHashSet(); result.addAll(arcsFromSource); result.retainAll(arcsToTarget); // intersection if (result.size() < 1) { return null; } if (result.size() > 1) { throw new RuntimeException("Problem in TransducerGraph data structures."); } // get the only member Iterator iterator = result.iterator(); return (Arc) iterator.next(); }
private Map<String, JsonNode> buildDigests(final JsonNode schema) { final ImmutableMap.Builder<String, JsonNode> builder = ImmutableMap.builder(); final Map<String, Digester> map = Maps.newHashMap(digesterMap); map.keySet().retainAll(Sets.newHashSet(schema.fieldNames())); for (final Map.Entry<String, Digester> entry: map.entrySet()) builder.put(entry.getKey(), entry.getValue().digest(schema)); return builder.build(); }
@Override public boolean retainAll(Collection<?> c) { try { return super.retainAll(checkNotNull(c)); } catch (UnsupportedOperationException e) { // if the iterators don't support remove Set<Object> keys = Sets.newHashSetWithExpectedSize(c.size()); for (Object o : c) { if (contains(o)) { Entry<?, ?> entry = (Entry<?, ?>) o; keys.add(entry.getKey()); } } return map().keySet().retainAll(keys); } } }
Set<Integer> numbers = new TreeSet<Integer>(); numbers.add(2); numbers.add(5); System.out.println(numbers); // "[2, 5]" System.out.println(numbers.contains(7)); // "false" System.out.println(numbers.add(5)); // "false" System.out.println(numbers.size()); // "2" int sum = 0; for (int n : numbers) { sum += n; } System.out.println("Sum = " + sum); // "Sum = 7" numbers.addAll(Arrays.asList(1,2,3,4,5)); System.out.println(numbers); // "[1, 2, 3, 4, 5]" numbers.removeAll(Arrays.asList(4,5,6,7)); System.out.println(numbers); // "[1, 2, 3]" numbers.retainAll(Arrays.asList(2,3,4,5)); System.out.println(numbers); // "[2, 3]"
public void testEntrySetRetainAll() { final Map<K, V> map; try { map = makePopulatedMap(); } catch (UnsupportedOperationException e) { return; } Set<Entry<K, V>> entrySet = map.entrySet(); Set<Entry<K, V>> entriesToRetain = singleton(entrySet.iterator().next()); if (supportsRemove) { boolean shouldRemove = (entrySet.size() > entriesToRetain.size()); boolean didRemove = entrySet.retainAll(entriesToRetain); assertEquals(shouldRemove, didRemove); assertEquals(entriesToRetain.size(), map.size()); for (Entry<K, V> entry : entriesToRetain) { assertTrue(entrySet.contains(entry)); } } else { try { entrySet.retainAll(entriesToRetain); fail("Expected UnsupportedOperationException."); } catch (UnsupportedOperationException expected) { } } assertInvariants(map); }
private ListOffsetResult fetchOffsetsByTimes(Map<TopicPartition, Long> timestampsToSearch, Timer timer, boolean requireTimestamps) { ListOffsetResult result = new ListOffsetResult(); if (timestampsToSearch.isEmpty()) return result; Map<TopicPartition, Long> remainingToSearch = new HashMap<>(timestampsToSearch); do { RequestFuture<ListOffsetResult> future = sendListOffsetsRequests(remainingToSearch, requireTimestamps); client.poll(future, timer); if (!future.isDone()) break; if (future.succeeded()) { ListOffsetResult value = future.value(); result.fetchedOffsets.putAll(value.fetchedOffsets); if (value.partitionsToRetry.isEmpty()) return result; remainingToSearch.keySet().retainAll(value.partitionsToRetry); } else if (!future.isRetriable()) { throw future.exception(); } if (metadata.updateRequested()) client.awaitMetadataUpdate(timer); else timer.sleep(retryBackoffMs); } while (timer.notExpired()); throw new TimeoutException("Failed to get offsets by times in " + timer.elapsedMs() + "ms"); }