private Nature(String name) { if (idMap == null) idMap = new TreeMap<String, Integer>(); assert !idMap.containsKey(name); this.name = name; ordinal = idMap.size(); idMap.put(name, ordinal); Nature[] extended = new Nature[idMap.size()]; if (values != null) System.arraycopy(values, 0, extended, 0, values.length); extended[ordinal] = this; values = extended; }
private static URL[] getURLs(String path) throws IOException { TreeMap<String, File> jars = new TreeMap<String, File>(); File[] files = new File(path).listFiles(); for (int i = 0; files != null && i < files.length; i++) { if (files[i].getName().startsWith(".")) continue; jars.put(files[i].getName(), files[i]); } URL[] urls = new URL[jars.size()]; ArrayList<File> v = new ArrayList<File>(jars.values()); for (int i = 0; i < urls.length; i++) { urls[i] = v.get(i).toURI().toURL(); } return urls; } }
if (param[0].endsWith("@")) tagMap.put(param[0], Integer.parseInt(param[2])); continue; attribute.p[i] = Integer.parseInt(param[2 + 2 * i]); map.put(param[0], attribute); if (map.size() == 0) return false; for (Map.Entry<String, Attribute> entry : map.entrySet()) String key = entry.getKey(); String[] param = key.split("@", 2); Attribute attribute = entry.getValue(); int total = tagMap.get(param[0] + "@"); for (int i = 0; i < attribute.p.length; ++i)
if (stripe == null) { stripe = new ArrayList<>(); candidateStripes.put(endRow, stripe); byte[] endRow = entry.getKey(); if (!files.isEmpty()) { HStoreFile firstFile = candidateStripes.firstEntry().getValue().get(0); boolean isOpen = isOpen(startOf(firstFile)) && isOpen(candidateStripes.lastKey()); if (!isOpen) { + Bytes.toString(candidateStripes.lastKey()) + "]"); if (!hasOverlaps) { ensureEdgeStripeMetadata(candidateStripes.firstEntry().getValue(), true); LOG.warn("Inconsistent files, everything goes to L0."); for (ArrayList<HStoreFile> files : candidateStripes.values()) { for (HStoreFile sf : files) { insertFileIntoStripe(level0Files, sf); state.stripeFiles = new ArrayList<>(candidateStripes.size()); state.stripeEndRows = new byte[Math.max(0, candidateStripes.size() - 1)][]; ArrayList<HStoreFile> newAllFiles = new ArrayList<>(level0Files); int i = candidateStripes.size() - 1; for (Map.Entry<byte[], ArrayList<HStoreFile>> entry : candidateStripes.entrySet()) { if (i > 0) { state.stripeEndRows[state.stripeFiles.size() - 1] = entry.getKey();
/** * The method normalizes the weights and bone indexes data. * First it truncates the amount to MAXIMUM_WEIGHTS_PER_VERTEX because this is how many weights JME can handle. * Next it normalizes the weights so that the sum of all verts is 1. * @param maximumSize * the maximum size that the data will be truncated to (usually: MAXIMUM_WEIGHTS_PER_VERTEX) */ private void normalizeBoneBuffers(int maximumSize) { for (TreeMap<Float, Integer> group : boneWeightAndIndexes) { if (group.size() > maximumSize) { NavigableMap<Float, Integer> descendingWeights = group.descendingMap(); while (descendingWeights.size() > maximumSize) { descendingWeights.pollLastEntry(); } } // normalizing the weights so that the sum of the values is equal to '1' TreeMap<Float, Integer> normalizedGroup = new TreeMap<Float, Integer>(); float sum = 0; for (Entry<Float, Integer> entry : group.entrySet()) { sum += entry.getKey(); } if (sum != 0 && sum != 1) { for (Entry<Float, Integer> entry : group.entrySet()) { normalizedGroup.put(entry.getKey() / sum, entry.getValue()); } group.clear(); group.putAll(normalizedGroup); } } }
this.shortLengths.put(i, edge); } else { this.lengths.put(i, edge); int si = this.lengths.size(); int ind = entry.getKey(); if (entry.getValue().getGeometry().getLength() > this.threshold) { index = ind; e = entry.getValue(); this.shortLengths.put(eB.getId(), eB); } else { this.lengths.put(eB.getId(), eB); this.lengths.put(eC.getId(), eC); for (Edge e : this.lengths.values()) { LineString l = e.getGeometry().toGeometry(this.geomFactory); edges.add(l);
@Override public int build(TreeMap<String, V> keyValueMap) { int size = keyValueMap.size(); int[] indexArray = new int[size]; valueArray = (V[]) keyValueMap.values().toArray(); List<String> keyList = new ArrayList<String>(size); int i = 0; for (Entry<String, V> entry : keyValueMap.entrySet()) { indexArray[i] = i; valueArray[i] = entry.getValue(); keyList.add(entry.getKey()); ++i; } build(keyList, indexArray); return 0; }
for (Map.Entry<String, String> entry : sortedProperties.entrySet()) { propertiesSizeInBytes += entry.getKey().getBytes().length; propertiesSizeInBytes += entry.getValue().getBytes().length; int pointersSizeInBytes = 2 * sortedProperties.size() * RECORD_SIZE_IN_BYTES; byte[] bytes = new byte[META_DATA_SIZE_IN_BYTES + pointersSizeInBytes + propertiesSizeInBytes]; byteBuffer.putInt(sortedProperties.size()); for (String value : sortedProperties.values()) { propertiesBuffer.put(value.getBytes()); byteBuffer.putInt(propertiesBuffer.position());
if (taskCheckpoints.entrySet().stream().anyMatch( sequenceCheckpoint -> sequenceCheckpoint.getValue().entrySet().stream().allMatch( partitionOffset -> { OrderedSequenceNumber<SequenceOffsetType> sequence = makeSequenceNumber(partitionOffset.getValue()); OrderedSequenceNumber<SequenceOffsetType> latestOffset = makeSequenceNumber( latestOffsetsFromDb == null ? partitionOffset.getValue() : latestOffsetsFromDb.getOrDefault( partitionOffset .getKey(), partitionOffset .getValue() ) && earliestConsistentSequenceId.compareAndSet(-1, sequenceCheckpoint.getKey())) || ( pendingCompletionTaskGroups.getOrDefault(groupId, new CopyOnWriteArrayList<>()).size() > 0 && earliestConsistentSequenceId.compareAndSet(-1, taskCheckpoints.firstKey()))) { .equals(taskGroup.checkpointSequences.firstEntry().getValue())) || taskCheckpoints.tailMap(taskGroup.checkpointSequences.firstKey()).size() != taskGroup.checkpointSequences.size()) { log.debug( "Adding task [%s] to kill list, checkpoints[%s], taskgroup checkpoints [%s]",
private static URL[] getURLs(String path) throws IOException { TreeMap<String, File> jars = new TreeMap<String, File>(); File[] files = new File(path).listFiles(); for (int i = 0; files != null && i < files.length; i++) { if (files[i].getName().startsWith(".")) continue; jars.put(files[i].getName(), files[i]); } URL[] urls = new URL[jars.size()]; ArrayList<File> v = new ArrayList<File>(jars.values()); for (int i = 0; i < urls.length; i++) { urls[i] = v.get(i).toURI().toURL(); } return urls; } }
/** * Increment the version info associated with the given node * * @param node The node */ public void incrementVersion(int node, long time) { if(node < 0 || node > Short.MAX_VALUE) throw new IllegalArgumentException(node + " is outside the acceptable range of node ids."); this.timestamp = time; Long version = versionMap.get((short) node); if(version == null) { version = 1L; } else { version = version + 1L; } versionMap.put((short) node, version); if(versionMap.size() >= MAX_NUMBER_OF_VERSIONS) { throw new IllegalStateException("Vector clock is full!"); } }
TreeMap<Character, MDAGNode> outgoingTransitionTreeMap2 = node2.outgoingTransitionTreeMap; if(outgoingTransitionTreeMap1.size() == outgoingTransitionTreeMap2.size()) for(Entry<Character, MDAGNode> transitionKeyValuePair : outgoingTransitionTreeMap1.entrySet()) Character currentCharKey = transitionKeyValuePair.getKey(); MDAGNode currentTargetNode = transitionKeyValuePair.getValue();
entries = new ArrayList<>(pendingEvts.size()); for (CacheContinuousQueryEntry evt : pendingEvts.values()) { if (evt != HOLE && !evt.isFiltered()) entries.add(new CacheContinuousQueryEvent<K, V>(cache, cctx, evt)); pendingEvts.put(entry.updateCounter(), entry); else { if (log.isDebugEnabled()) Iterator<Map.Entry<Long, CacheContinuousQueryEntry>> iter = pendingEvts.entrySet().iterator(); if (pendingEvts.size() >= MAX_BUFF_SIZE) { if (log.isDebugEnabled()) { log.debug("Pending events reached max of buffer size [lastFiredEvt=" + lastFiredEvt + Map.Entry<Long, CacheContinuousQueryEntry> e = iter.next(); if (e.getValue() != HOLE && !e.getValue().isFiltered()) entries.add(new CacheContinuousQueryEvent<K, V>(cache, cctx, e.getValue())); lastFiredEvt = e.getKey(); boolean fire = e.getKey() == lastFiredEvt + 1;; fire = e.getKey() - filtered <= lastFiredEvt + 1;
/** * 保存dat到磁盘 * @param map * @return */ static boolean saveDat(TreeMap<String, Character> map) { try { DataOutputStream out = new DataOutputStream(new BufferedOutputStream(IOUtil.newOutputStream(path + Predefine.VALUE_EXT))); out.writeInt(map.size()); for (Character character : map.values()) { out.writeChar(character); } out.close(); } catch (Exception e) { logger.warning("保存值" + path + Predefine.VALUE_EXT + "失败" + e); return false; } return trie.save(path + Predefine.TRIE_EXT); }
String id = (String)entry.getKey(); if (!idToIndex.containsKey(id)) { Short index = Short.valueOf(count); idToIndex.put(id, index); indexToId.put(index, id); if (++count == 0) { throw new InternalError("Too many time zone ids"); id = ((DateTimeZone)entry.getValue()).getID(); if (!idToIndex.containsKey(id)) { Short index = Short.valueOf(count); idToIndex.put(id, index); indexToId.put(index, id); if (++count == 0) { throw new InternalError("Too many time zone ids"); dout.writeShort(indexToId.size()); for (String id : indexToId.values()) { dout.writeUTF(id); String id = entry.getKey(); dout.writeShort(idToIndex.get(id).shortValue()); id = entry.getValue().getID(); dout.writeShort(idToIndex.get(id).shortValue());