/** * Retrieve "known" attributes from the session, i.e. attributes listed * by name in {@code @SessionAttributes} or attributes previously stored * in the model that matched by type. * @param session the current session * @return a map with handler session attributes, possibly empty */ public Map<String, Object> retrieveAttributes(WebSession session) { Map<String, Object> attributes = new HashMap<>(); this.knownAttributeNames.forEach(name -> { Object value = session.getAttribute(name); if (value != null) { attributes.put(name, value); } }); return attributes; }
if (null != assignment) { Map<Integer, NodeInfo> taskToNodePort = StormCommon.taskToNodeport(assignment.get_executor_node_port()); for (Map.Entry<Integer, NodeInfo> taskToNodePortEntry : taskToNodePort.entrySet()) { Integer task = taskToNodePortEntry.getKey(); if (outboundTasks.contains(task)) { newTaskToNodePort.put(task, taskToNodePortEntry.getValue()); if (!localTaskIds.contains(task)) { neededConnections.add(taskToNodePortEntry.getValue()); Map<NodeInfo, IConnection> next = new HashMap<>(prev); for (NodeInfo nodeInfo : newConnections) { next.put(nodeInfo, mqContext.connect( topologyId, removeConnections.forEach(next::remove); return next; });
private Set<AsyncLoadBalanceClient> registerClients(final NodeIdentifier nodeId) { final Set<AsyncLoadBalanceClient> clients = new HashSet<>(); for (int i=0; i < clientsPerNode; i++) { final AsyncLoadBalanceClient client = clientFactory.createClient(nodeId); clients.add(client); logger.debug("Added client {} for communicating with Node {}", client, nodeId); } clientMap.put(nodeId, clients); allClients.addAll(clients); if (running) { clients.forEach(AsyncLoadBalanceClient::start); } return clients; }
/** * Create the map of predicate functions that specify which columns are to be included. * <p> * Qualified column names are comma-separated strings that are each {@link #parse(String) parsed} into {@link ColumnId} objects. * * @param columnBlacklist the comma-separated string listing the qualified names of the columns to be explicitly disallowed; * may be null * @return the predicate function; never null */ public static Map<TableId,Predicate<Column>> filter(String columnBlacklist) { Set<ColumnId> columnExclusions = columnBlacklist == null ? null : Strings.setOf(columnBlacklist, ColumnId::parse); Map<TableId,Set<String>> excludedColumnNamesByTable = new HashMap<>(); columnExclusions.forEach(columnId->{ excludedColumnNamesByTable.compute(columnId.tableId(), (tableId,columns)->{ if ( columns == null ) columns = new HashSet<String>(); columns.add(columnId.columnName().toLowerCase()); return columns; }); }); Map<TableId,Predicate<Column>> exclusionFilterByTable= new HashMap<>(); excludedColumnNamesByTable.forEach((tableId,excludedColumnNames)->{ exclusionFilterByTable.put(tableId, (col)->!excludedColumnNames.contains(col.name().toLowerCase())); }); return exclusionFilterByTable; }
/** * Get the coreference chain for just this sentence. * Note that this method is actually fairly computationally expensive to call, as it constructs and prunes * the coreference data structure for the entire document. * * @return A coreference chain, but only for this sentence */ public Map<Integer, CorefChain> coref() { // Get the raw coref structure Map<Integer, CorefChain> allCorefs = document.coref(); // Delete coreference chains not in this sentence Set<Integer> toDeleteEntirely = new HashSet<>(); for (Map.Entry<Integer, CorefChain> integerCorefChainEntry : allCorefs.entrySet()) { CorefChain chain = integerCorefChainEntry.getValue(); List<CorefChain.CorefMention> mentions = new ArrayList<>(chain.getMentionsInTextualOrder()); mentions.stream().filter(m -> m.sentNum != this.sentenceIndex() + 1).forEach(chain::deleteMention); if (chain.getMentionsInTextualOrder().isEmpty()) { toDeleteEntirely.add(integerCorefChainEntry.getKey()); } } // Clean up dangling empty chains toDeleteEntirely.forEach(allCorefs::remove); // Return return allCorefs; }
ignore.addAll(defaultIgnoreProperties); for (Map.Entry<String, Object> entry : newMap.entrySet()) { Object value = entry.getValue(); if (simpleTypePredicate.test(type)) { value = simpleConvertBuilder.apply(type).apply(value, null); if (ignoreProperty.contains(entry.getKey())) { if (cover) { value = coverStringConvert.apply(value); } else { ignore.add(entry.getKey()); ignore.add(entry.getKey()); ignore.forEach(newMap::remove); return newMap;
@Override public Set<Type> getTypes() { Set<Type> contracts = new HashSet<>(); contracts.addAll(binding.getContracts()); // Merge aliases with the main bean if (!binding.getAliases().isEmpty()) { binding.getAliases().forEach(alias -> contracts.add(alias.getContract())); } contracts.add(Object.class); return contracts; }
@Override public void assign(Set<StreamPartition<String>> collection) { checkIfClosed(); collection.forEach( streamPartition -> partitionResources.putIfAbsent( streamPartition, new PartitionResource(streamPartition) ) ); for (Iterator<Map.Entry<StreamPartition<String>, PartitionResource>> i = partitionResources.entrySet() .iterator(); i.hasNext(); ) { Map.Entry<StreamPartition<String>, PartitionResource> entry = i.next(); if (!collection.contains(entry.getKey())) { i.remove(); entry.getValue().stopBackgroundFetch(); } } }
public static Map<String, String> parseProperties(String content) throws IOException { Map<String, String> map = new HashMap<>(); if (StringUtils.isEmpty(content)) { logger.warn("You specified the config centre, but there's not even one single config item in it."); } else { Properties properties = new Properties(); properties.load(new StringReader(content)); properties.stringPropertyNames().forEach( k -> map.put(k, properties.getProperty(k)) ); } return map; }
Set<TopicPartition> topicPartitions = new HashSet<>(); for (int i = 0; i < numPartitions; i++) topicPartitions.add(new TopicPartition(topicName, i)); topicPartitions.forEach(tp -> subscriptions.seek(tp, 0L)); FetchRequest fetchRequest = (FetchRequest) request.requestBuilder().build(); LinkedHashMap<TopicPartition, FetchResponse.PartitionData<MemoryRecords>> responseMap = new LinkedHashMap<>(); for (Map.Entry<TopicPartition, FetchRequest.PartitionData> entry : fetchRequest.fetchData().entrySet()) { TopicPartition tp = entry.getKey(); long offset = entry.getValue().fetchOffset; responseMap.put(tp, new FetchResponse.PartitionData<>(Errors.NONE, offset + 2L, offset + 2, 0L, null, buildRecords(offset, 2, offset))); if (!fetchedRecords.isEmpty()) { fetchesRemaining.decrementAndGet(); fetchedRecords.entrySet().forEach(entry -> { TopicPartition tp = entry.getKey(); List<ConsumerRecord<byte[], byte[]>> records = entry.getValue(); assertEquals(2, records.size()); long nextOffset = nextFetchOffsets.get(tp); assertEquals(nextOffset, records.get(0).offset()); assertEquals(nextOffset + 1, records.get(1).offset()); nextFetchOffsets.put(tp, nextOffset + 2); });
Set<String> realIgnore = new HashSet<>(); for (Map.Entry<String, Object> entry : map.entrySet()) { Object value = entry.getValue(); if (value == null) { if (ToString.Feature.hasFeature(features, ToString.Feature.nullPropertyToEmpty)) { boolean isSimpleType = false; PropertyDescriptor propertyDescriptor = descriptorMap.get(entry.getKey()); Class propertyType = null; if (propertyDescriptor != null) { BiFunction<Object, ConvertConfig, Object> converter = converts.get(entry.getKey()); if (null != converter) { entry.setValue(converter.apply(value, convertConfig)); if (entry.getValue() == null) { realIgnore.add(entry.getKey()); realIgnore.forEach(map::remove);
public static Map<String, String> parseProperties(String content) throws IOException { Map<String, String> map = new HashMap<>(); if (StringUtils.isEmpty(content)) { logger.warn("You specified the config centre, but there's not even one single config item in it."); } else { Properties properties = new Properties(); properties.load(new StringReader(content)); properties.stringPropertyNames().forEach( k -> map.put(k, properties.getProperty(k)) ); } return map; }