Refine search
Multimap<K, V> multimap, final K sampleKey, final V sampleValue) { List<Entry<K, V>> originalEntries = Collections.unmodifiableList(Lists.newArrayList(multimap.entries())); multimap.clear(); fail("clear succeeded on unmodifiable multimap"); } catch (UnsupportedOperationException expected) { multimap.asMap().entrySet(), Maps.immutableEntry(sampleKey, sampleValueAsCollection)); assertCollectionIsUnmodifiable(multimap.entries(), Maps.immutableEntry(sampleKey, sampleValue)); assertMultimapRemainsUnmodified(multimap, originalEntries); Multimap<K, V> multimap2 = ArrayListMultimap.create(); multimap2.put(sampleKey, sampleValue); try {
public void testTrimToSize() { ArrayListMultimap<String, Integer> multimap = ArrayListMultimap.create(); multimap.put("foo", 1); multimap.put("foo", 2); multimap.put("bar", 3); multimap.trimToSize(); assertEquals(3, multimap.size()); assertThat(multimap.get("foo")).containsExactly(1, 2).inOrder(); assertThat(multimap.get("bar")).contains(3); } }
@Override public Optional<Multimap<String, String>> validate() { final ArrayListMultimap<String, String> errors = ArrayListMultimap.create(); final Path path = Paths.get(path()); if (!Files.exists(path)) { errors.put("path", "The file does not exist."); } else if (!Files.isReadable(path)) { errors.put("path", "The file cannot be read."); } return errors.isEmpty() ? Optional.empty() : Optional.of(errors); }
public <T> T deserialze(DefaultJSONParser parser, Type type, Object fieldName) { if (type == ArrayListMultimap.class) { ArrayListMultimap multimap = ArrayListMultimap.create(); JSONObject object = parser.parseObject(); for (Map.Entry entry : object.entrySet()) { Object value = entry.getValue(); if (value instanceof Collection) { multimap.putAll(entry.getKey(), (List) value); } else { multimap.put(entry.getKey(), value); } } return (T) multimap; } return null; }
Set<TableReference> tables = Sets.newHashSet(); for (Multimap<TableReference, Cell> v : scrubTimestampToTableNameToCell.values()) { tables.addAll(v.keySet()); numCells += v.size(); List<Future<Void>> scrubFutures = Lists.newArrayList(); Map<TableReference, Multimap<Cell, Long>> failedWrites = Maps.newHashMap(); final Multimap<TableReference, Cell> tableNameToCell = entry.getValue(); numCellsReadFromScrubTable += tableNameToCell.size(); Multimap<Cell, Long> failedCells = failedWrites.get(cells.getKey()); if (failedCells == null) { failedCells = ArrayListMultimap.create(cells.getValue().size(), 2); failedWrites.put(cells.getKey(), failedCells);
public void testForMap() { Map<String, Integer> map = Maps.newHashMap(); map.put("foo", 1); map.put("bar", 2); Multimap<String, Integer> multimap = HashMultimap.create(); multimap.put("foo", 1); multimap.put("bar", 2); Multimap<String, Integer> multimapView = Multimaps.forMap(map); new EqualsTester().addEqualityGroup(multimap, multimapView).addEqualityGroup(map).testEquals(); Multimap<String, Integer> multimap2 = HashMultimap.create(); multimap2.put("foo", 1); assertFalse(multimapView.equals(multimap2)); multimap2.put("bar", 1); assertThat(multimapView.keys()).contains("foo"); assertThat(multimapView.values()).contains(1); assertThat(multimapView.entries()).contains(Maps.immutableEntry("foo", 1)); assertThat(multimapView.asMap().entrySet()) .contains(Maps.immutableEntry("foo", (Collection<Integer>) Collections.singleton(1))); multimapView.clear(); assertFalse(multimapView.containsKey("foo")); assertEquals(multimap.hashCode(), multimapView.hashCode()); assertEquals(multimap.size(), multimapView.size()); assertEquals(multimapView, ArrayListMultimap.create());
Multimap<Operator<?>, ReduceSinkOperator> parentToRsOps = ArrayListMultimap.create(); Set<Operator<?>> visited = new HashSet<>(); for (Entry<String, TableScanOperator> e : pctx.getTopOps().entrySet()) { while (!parentToRsOps.isEmpty()) { rankOpsByAccumulatedSize(parentToRsOps.keySet()); LOG.debug("Sorted operators by size: {}", sortedRSGroups); Multimap<Operator<?>, ReduceSinkOperator> existingRsOps = ArrayListMultimap.create(); for (Entry<Operator<?>, Long> rsGroupInfo : sortedRSGroups) { Operator<?> rsParent = rsGroupInfo.getKey(); for (ReduceSinkOperator discardableRsOp : parentToRsOps.get(rsParent)) { if (removedOps.contains(discardableRsOp)) { LOG.debug("Skip {} as it has already been removed", discardableRsOp); if (lastDiscardableOp.getNumChild() != 0) { List<Operator<? extends OperatorDesc>> allChildren = Lists.newArrayList(lastDiscardableOp.getChildOperators()); for (Operator<? extends OperatorDesc> op : allChildren) { lastDiscardableOp.getChildOperators().remove(op); parentToRsOps = ArrayListMultimap.create(); visited = new HashSet<>(); for (Entry<Operator<?>, ReduceSinkOperator> e : existingRsOps.entries()) {
pkColumns = Lists.newArrayListWithExpectedSize(this.columns.size()+1); ++numPKColumns; } else { allColumns = new PColumn[this.columns.size()]; pkColumns = Lists.newArrayListWithExpectedSize(this.columns.size()); List<PColumn> sortedColumns = Lists.newArrayList(this.columns); Collections.sort(sortedColumns, new Comparator<PColumn>() { @Override ArrayListMultimap.create(this.columns.size(), 1); Map<KVColumnFamilyQualifier, PColumn> populateKvColumnsByQualifiers = Maps.newHashMapWithExpectedSize(this.columns.size()); for (PColumn column : sortedColumns) { allColumns[position] = column; if (populateColumnsByName.put(columnName, column)) { int count = 0; for (PColumn dupColumn : populateColumnsByName.get(columnName)) { if (Objects.equal(familyName, dupColumn.getFamilyName())) { count++; Map<PName, List<PColumn>> familyMap = Maps.newLinkedHashMap(); PColumn rowTimestampCol = null; boolean hasColsRequiringUpgrade = false;
private MultiTableColumnResolver(PhoenixConnection connection, int tsAddition, Map<String, UDFParseNode> udfParseNodes, TableName mutatingTableName) throws SQLException { super(connection, tsAddition, false, udfParseNodes, mutatingTableName); tableMap = ArrayListMultimap.<String, TableRef> create(); tables = Lists.newArrayList(); }
int numberOfSweptCells = 0; Multimap<Cell, Long> currentBatch = ArrayListMultimap.create(); List<Cell> currentBatchSentinels = Lists.newArrayList(); List<Long> currentCellTimestamps = ImmutableList.copyOf(cell.sortedTimestamps()); if (currentBatch.size() + currentCellTimestamps.size() < deleteBatchSize) { currentBatch.putAll(cell.cell(), currentCellTimestamps); } else { while (currentBatch.size() + currentCellTimestamps.size() >= deleteBatchSize) { int numberOfTimestampsForThisBatch = deleteBatchSize - currentBatch.size();
ListMultimap<Long, InternalRelation> mutations = ArrayListMultimap.create(); ListMultimap<InternalVertex, InternalRelation> mutatedProperties = ArrayListMultimap.create(); List<IndexSerializer.IndexUpdate> indexUpdates = Lists.newArrayList(); InternalVertex vertex = del.getVertex(pos); if (pos == 0 || !del.isLoop()) { if (del.isProperty()) mutatedProperties.put(vertex,del); mutations.put(vertex.longId(), del); InternalVertex vertex = add.getVertex(pos); if (pos == 0 || !add.isLoop()) { if (add.isProperty()) mutatedProperties.put(vertex,add); mutations.put(vertex.longId(), add); IndexSerializer.IndexUpdate<StaticBuffer,Entry> update = indexUpdate; if (update.isAddition()) mutator.mutateIndex(update.getKey(), Lists.newArrayList(update.getEntry()), KCVSCache.NO_DELETIONS); else mutator.mutateIndex(update.getKey(), KeyColumnValueStore.NO_ADDITIONS, Lists.newArrayList(update.getEntry())); } else { IndexSerializer.IndexUpdate<String,IndexEntry> update = indexUpdate;
Multimap<String, TableScanOperator> existingOps = ArrayListMultimap.create(); Set<Operator<?>> removedOps = new HashSet<>(); for (Entry<String, Long> tablePair : sortedTables) { String tableName = tablePair.getKey(); for (TableScanOperator discardableTsOp : tableNameToOps.get(tableName)) { if (removedOps.contains(discardableTsOp)) { LOG.debug("Skip {} as it has already been removed", discardableTsOp); continue; Collection<TableScanOperator> prevTsOps = existingOps.get(tableName); for (TableScanOperator retainableTsOp : prevTsOps) { if (removedOps.contains(retainableTsOp)) { Lists.newArrayList(discardableTsOp.getChildOperators()); for (Operator<? extends OperatorDesc> op : allChildren) { discardableTsOp.getChildOperators().remove(op); GenTezUtils.removeSemiJoinOperator( pctx, (ReduceSinkOperator) op, sjbi.getTsOp()); optimizerCache.tableScanToDPPSource.remove(sjbi.getTsOp(), op); GenTezUtils.removeSemiJoinOperator( pctx, (AppMasterEventOperator) op, dped.getTableScan()); optimizerCache.tableScanToDPPSource.remove(dped.getTableScan(), op);
public ResultCollector() { resultMap = newHashMap(); changeListeners = newArrayList(); statusChangeListeners = newArrayList(); failuresByPointOfFailure = ArrayListMultimap.create(); status = SCANNING; queueAggregator = new QueueAggregator(); queueAggregator.addListener(this); }
/** * Applies the given aggregation to the given column. * The apply and combine steps of a split-apply-combine. */ public Table aggregate(String colName1, AggregateFunction<?,?>... functions) { ArrayListMultimap<String, AggregateFunction<?,?>> columnFunctionMap = ArrayListMultimap.create(); columnFunctionMap.putAll(colName1, Lists.newArrayList(functions)); return aggregate(columnFunctionMap); }
List<Option> transitOptions = Lists.newArrayList(); ListMultimap<TraverseMode, Option> transitOptionsByAccessMode = ArrayListMultimap.create(); for (Option option : transitOptions) { for (StreetSegment segment : option.access) { transitOptionsByAccessMode.put(segment.mode.mode, option); for (Collection<Option> singleModeOptions : transitOptionsByAccessMode.asMap().values()) { int n = 0; for (Option option : singleModeOptions) {
private static Set<Class<?>> getExistingModifiableSubClasses( Set<PointcutClassName> pointcutClassNames, Class<?>[] classes, Instrumentation instrumentation) { List<Class<?>> matchingClasses = Lists.newArrayList(); Multimap<Class<?>, Class<?>> subClasses = ArrayListMultimap.create(); for (Class<?> clazz : classes) { if (!instrumentation.isModifiableClass(clazz)) { continue; } Class<?> superclass = clazz.getSuperclass(); if (superclass != null) { subClasses.put(superclass, clazz); } for (Class<?> iface : clazz.getInterfaces()) { subClasses.put(iface, clazz); } for (PointcutClassName pointcutClassName : pointcutClassNames) { if (pointcutClassName.appliesTo(clazz.getName())) { matchingClasses.add(clazz); break; } } } Set<Class<?>> matchingSubClasses = Sets.newHashSet(); for (Class<?> matchingClass : matchingClasses) { addToMatchingSubClasses(matchingClass, matchingSubClasses, subClasses); } return matchingSubClasses; }
storageType, partitionMap); ListMultimap<Integer, Pair<String, String>> nodeToEntries = ArrayListMultimap.create(); for(Map.Entry<String, String> entry: testData.getData().entrySet()) { for(Node node: testData.routeRequest(entry.getKey())) { nodeToEntries.put(node.getId(), Pair.create(entry.getKey(), entry.getValue())); for(Map.Entry<Integer, ReadOnlyStorageEngine> storeEntry: testData.getReadOnlyStores() .entrySet()) { List<Pair<String, String>> entries = Lists.newArrayList(nodeToEntries.get(storeEntry.getKey())); ClosableIterator<ByteArray> keyIterator = null; ClosableIterator<Pair<ByteArray, Versioned<byte[]>>> entryIterator = null; List<String> keys = Lists.newArrayList(); Iterator<Pair<String, String>> pairIterator = entries.iterator(); while(pairIterator.hasNext()) {
markForPartialFormat(); Multimap<Integer, Op> tokOps = ArrayListMultimap.create(); int opsN = ops.size(); for (int i = 0; i < opsN; i++) { tokOps.put( tokOps.putAll(j, makeComment(tokBefore)); tokOps.put(j, Doc.Break.makeForced()); ImmutableList.Builder<Op> newOps = ImmutableList.builder(); boolean afterForcedBreak = false; // Was the last Op a forced break? If so, suppress spaces. for (int i = 0; i < opsN; i++) {
public List<JavaSymbol> lookup(String name) { Scope scope = this; while (scope != null && !scope.symbols.containsKey(name)) { scope = scope.next; } return scope == null ? ImmutableList.<JavaSymbol>of() : scope.symbols.get(name); }