Refine search
public LenientExplicitOrdering(List<T> order) { this.idxMap = Maps.newHashMapWithExpectedSize(order.size()); int idx = 0; for (T s : order) { idxMap.put(s, idx); idx++; } }
public Map<Integer, Long> asMap() { Map<Integer, Long> map = Maps.newHashMapWithExpectedSize(values.size()); for(Map.Entry<Integer, AtomicLong> entry: values.entrySet()) map.put(entry.getKey(), entry.getValue().get()); return Collections.unmodifiableMap(map); }
public static Map<Integer, MetricSnapshot> toThriftGaugeSnapshots(Map<Integer, AsmSnapshot> snapshots) { Map<Integer, MetricSnapshot> ret = Maps.newHashMapWithExpectedSize(snapshots.size()); for (Map.Entry<Integer, AsmSnapshot> entry : snapshots.entrySet()) { ret.put(entry.getKey(), convert((AsmGaugeSnapshot) entry.getValue())); } return ret; }
@Override public Map<K, List<Versioned<V>>> getAll(Iterable<K> keys, Map<K, T> transforms) throws VoldemortException { StoreUtils.assertValidKeys(keys); Map<ByteArray, K> byteKeyToKey = keysToBytes(keys); Map<ByteArray, List<Versioned<byte[]>>> storeResult = store.getAll(byteKeyToKey.keySet(), transformsToBytes(transforms)); Map<K, List<Versioned<V>>> result = Maps.newHashMapWithExpectedSize(storeResult.size()); for(Map.Entry<ByteArray, List<Versioned<byte[]>>> mapEntry: storeResult.entrySet()) { List<Versioned<V>> values = Lists.newArrayListWithExpectedSize(mapEntry.getValue() .size()); for(Versioned<byte[]> versioned: mapEntry.getValue()) values.add(new Versioned<V>(valueSerializer.toObject(versioned.getValue()), versioned.getVersion())); result.put(byteKeyToKey.get(mapEntry.getKey()), values); } return result; }
this.avgMillisPerRecord = Maps.newHashMapWithExpectedSize(this.partitions.size()); this.avgRecordSizes = Maps.newHashMapWithExpectedSize(this.partitions.size()); this.elapsedTime = Maps.newHashMapWithExpectedSize(this.partitions.size()); this.processedRecordCount = Maps.newHashMapWithExpectedSize(this.partitions.size()); this.partitionTotalSize = Maps.newHashMapWithExpectedSize(this.partitions.size()); this.decodeRecordTime = Maps.newHashMapWithExpectedSize(this.partitions.size()); this.fetchMessageBufferTime = Maps.newHashMapWithExpectedSize(this.partitions.size()); this.readRecordTime = Maps.newHashMapWithExpectedSize(this.partitions.size()); this.startFetchEpochTime = Maps.newHashMapWithExpectedSize(this.partitions.size()); this.stopFetchEpochTime= Maps.newHashMapWithExpectedSize(this.partitions.size());
public static Map<Integer, MetricSnapshot> toThriftHistoSnapshots(MetaType metaType, Map<Integer, AsmSnapshot> snapshots) { Map<Integer, MetricSnapshot> ret = Maps.newHashMapWithExpectedSize(snapshots.size()); for (Map.Entry<Integer, AsmSnapshot> entry : snapshots.entrySet()) { MetricSnapshot histogramSnapshot = convert(metaType, (AsmHistogramSnapshot) entry.getValue()); if (histogramSnapshot != null) { ret.put(entry.getKey(), histogramSnapshot); } } return ret; }
public RebalancerState(List<RebalanceTaskInfo> stealInfoList) { stealInfoMap = Maps.newHashMapWithExpectedSize(stealInfoList.size()); for(RebalanceTaskInfo rebalanceTaskInfo: stealInfoList) stealInfoMap.put(rebalanceTaskInfo.getDonorId(), rebalanceTaskInfo); }
private static List<Entity> compileEntities(Key[] keys, Iterator<Entity> entities) { Map<Key, Entity> map = Maps.newHashMapWithExpectedSize(keys.length); while (entities.hasNext()) { Entity entity = entities.next(); map.put(entity.getKey(), entity); } List<Entity> list = new ArrayList<>(keys.length); for (Key key : keys) { // this will include nulls for nonexistent keys list.add(map.get(key)); } return list; } }
List<RexNode> newExpList = Lists.newLinkedList(); Map<Integer, Pair<RelDataTypeField, RexNode>> replaceFieldMap = Maps .newHashMapWithExpectedSize(this.context.dynamicFields.size()); int paramIndex = this.rowType.getFieldList().size(); for (Map.Entry<String, RelDataType> rewriteField : this.context.rewriteFields.entrySet()) { String rewriteFieldName = rewriteField.getKey(); int rowIndex = this.columnRowType.getIndexByName(rewriteFieldName); RexInputRef newFieldRef = new RexInputRef(inputField.getIndex(), inputField.getType()); replaceFieldMap.put(rowIndex, new Pair<RelDataTypeField, RexNode>(newField, newFieldRef));
public static Map<Integer, MetricSnapshot> toThriftCounterSnapshots(Map<Integer, AsmSnapshot> snapshots) { Map<Integer, MetricSnapshot> ret = Maps.newHashMapWithExpectedSize(snapshots.size()); for (Map.Entry<Integer, AsmSnapshot> entry : snapshots.entrySet()) { ret.put(entry.getKey(), convert((AsmCounterSnapshot) entry.getValue())); } return ret; }
DictionaryAidedDepluralizer(String[] exceptions) { Map<String, String> map = Maps.newHashMapWithExpectedSize(exceptions.length); Splitter splitter = Splitter.on(':'); for (String s : exceptions) { List<String> parts = splitter.splitToList(s.toLowerCase()); if (parts.size() == 1) { // simple no-depluratization exception map.put(parts.get(0), parts.get(0)); } else if (parts.size() == 2) { // singular, then plural, so mapping plural->singular map.put(parts.get(1), parts.get(0)); } } this.dictionary = ImmutableMap.copyOf(map); }
public Map<String, String> getQueryParams() { if (queryMap == null) { final Map<String, String> queryMap = Maps.newHashMapWithExpectedSize(url.querySize()); for(String name : url.queryParameterNames()) { final List<String> values = url.queryParameterValues(name); final String valueString = Joiner.on(',').join(values); queryMap.put(name, valueString); } this.queryMap = queryMap; } return queryMap; }
public static Map<Integer, MetricSnapshot> toThriftMeterSnapshots(Map<Integer, AsmSnapshot> snapshots) { Map<Integer, MetricSnapshot> ret = Maps.newHashMapWithExpectedSize(snapshots.size()); for (Map.Entry<Integer, AsmSnapshot> entry : snapshots.entrySet()) { ret.put(entry.getKey(), convert((AsmMeterSnapshot) entry.getValue())); } return ret; }
public BrokerClusterInfo(List<BrokerGroupInfo> groupList) { this.groupList = groupList; this.groupMap = Maps.newHashMapWithExpectedSize(groupList.size()); for (BrokerGroupInfo group : groupList) { groupMap.put(group.getGroupName(), group); } }
/** * adjust cuboid row count, make sure parent not less than child */ public static Map<Long, Long> adjustCuboidStats(Map<Long, Long> statistics) { Map<Long, Long> ret = Maps.newHashMapWithExpectedSize(statistics.size()); List<Long> cuboids = Lists.newArrayList(statistics.keySet()); Collections.sort(cuboids); for (Long cuboid : cuboids) { Long rowCount = statistics.get(cuboid); for (Long childCuboid : ret.keySet()) { if (isDescendant(childCuboid, cuboid)) { Long childRowCount = ret.get(childCuboid); if (rowCount < childRowCount) { rowCount = childRowCount; } } } ret.put(cuboid, rowCount); } return ret; }
public static Map<Integer, MetricSnapshot> toThriftHistoSnapshots(Map<Integer, AsmSnapshot> snapshots) { Map<Integer, MetricSnapshot> ret = Maps.newHashMapWithExpectedSize(snapshots.size()); for (Map.Entry<Integer, AsmSnapshot> entry : snapshots.entrySet()) { MetricSnapshot histogramSnapshot = convert((AsmHistogramSnapshot) entry.getValue()); if (histogramSnapshot != null) { ret.put(entry.getKey(), histogramSnapshot); } } return ret; }
/** * Returns the 'current' version of RO store * * @param nodeId The id of the node on which the store is present * @param storeNames List of all the stores * @return Returns a map of store name to the respective max version * number */ public Map<String, Long> getROCurrentVersion(int nodeId, List<String> storeNames) { Map<String, Long> returnMap = Maps.newHashMapWithExpectedSize(storeNames.size()); Map<String, String> versionDirs = getROCurrentVersionDir(nodeId, storeNames); for(String storeName: versionDirs.keySet()) { returnMap.put(storeName, ReadOnlyUtils.getVersionId(new File(versionDirs.get(storeName)))); } return returnMap; }
static <E> ImmutableMultiset<E> create(Collection<? extends Entry<? extends E>> entries) { @SuppressWarnings("unchecked") Entry<E>[] entriesArray = entries.toArray(new Entry[0]); Map<E, Integer> delegateMap = Maps.newHashMapWithExpectedSize(entriesArray.length); long size = 0; for (int i = 0; i < entriesArray.length; i++) { Entry<E> entry = entriesArray[i]; int count = entry.getCount(); size += count; E element = checkNotNull(entry.getElement()); delegateMap.put(element, count); if (!(entry instanceof Multisets.ImmutableEntry)) { entriesArray[i] = Multisets.immutableEntry(element, count); } } return new JdkBackedImmutableMultiset<>( delegateMap, ImmutableList.asImmutableList(entriesArray), size); }
@Override public Map<K, Versioned<V>> getAll(Iterable<K> keys, Map<K, Object> transforms) { Map<K, List<Versioned<V>>> items = null; items = this.clientStore.getAll(keys, null); Map<K, Versioned<V>> result = Maps.newHashMapWithExpectedSize(items.size()); for(Entry<K, List<Versioned<V>>> mapEntry: items.entrySet()) { Versioned<V> value = getItemOrThrow(mapEntry.getKey(), null, mapEntry.getValue()); result.put(mapEntry.getKey(), value); } return result; }
/** * Returns the max version of push currently being used by read-only * store. Important to remember that this may not be the 'current' * version since multiple pushes (with greater version numbers) may be * in progress currently * * @param nodeId The id of the node on which the store is present * @param storeNames List of all the stores * @return Returns a map of store name to the respective max version * number */ public Map<String, Long> getROMaxVersion(int nodeId, List<String> storeNames) { Map<String, Long> returnMap = Maps.newHashMapWithExpectedSize(storeNames.size()); Map<String, String> versionDirs = getROMaxVersionDir(nodeId, storeNames); for(String storeName: versionDirs.keySet()) { returnMap.put(storeName, ReadOnlyUtils.getVersionId(new File(versionDirs.get(storeName)))); } return returnMap; }