private Map<String, JsonNode> buildDigests(final JsonNode schema) { final ImmutableMap.Builder<String, JsonNode> builder = ImmutableMap.builder(); final Map<String, Digester> map = Maps.newHashMap(digesterMap); map.keySet().retainAll(Sets.newHashSet(schema.fieldNames())); for (final Map.Entry<String, Digester> entry: map.entrySet()) builder.put(entry.getKey(), entry.getValue().digest(schema)); return builder.build(); }
@Override public Optional<ConnectorNewTableLayout> getNewTableLayout(ConnectorSession connectorSession, ConnectorTableMetadata tableMetadata) { List<String> distributeColumns = (List<String>) tableMetadata.getProperties().get(DISTRIBUTED_ON); if (distributeColumns.isEmpty()) { return Optional.empty(); } Set<String> undefinedColumns = Sets.difference( ImmutableSet.copyOf(distributeColumns), tableMetadata.getColumns().stream() .map(ColumnMetadata::getName) .collect(toSet())); if (!undefinedColumns.isEmpty()) { throw new PrestoException(INVALID_TABLE_PROPERTY, "Distribute columns not defined on table: " + undefinedColumns); } return Optional.of(new ConnectorNewTableLayout(BlackHolePartitioningHandle.INSTANCE, distributeColumns)); }
private static Ordering<String> createExplicitComparator(String[] elements) { // Collapse equal elements, which Ordering.explicit() doesn't support, while // maintaining the ordering by first occurrence. Set<String> elementsPlus = Sets.newLinkedHashSet(); elementsPlus.add(BEFORE_FIRST); elementsPlus.add(BEFORE_FIRST_2); elementsPlus.addAll(Arrays.asList(elements)); elementsPlus.add(AFTER_LAST); elementsPlus.add(AFTER_LAST_2); return Ordering.explicit(Lists.newArrayList(elementsPlus)); }
@Override protected Set<String> create(String[] elements) { return Sets.difference(Sets.newHashSet("a"), Sets.newHashSet("a")); } })
@Override protected Set<String> create(String[] elements) { return Sets.intersection( Sets.<String>newHashSet(), Sets.newHashSet((String) null)); } })
private List<String> removedHosts(final List<String> currentHosts, final List<String> newHosts, final List<String> previouslyRemovedHosts) { final Set<String> ch = ImmutableSet.copyOf(currentHosts); final Set<String> nh = ImmutableSet.copyOf(newHosts); final Set<String> prh = ImmutableSet.copyOf(previouslyRemovedHosts); // Calculate the freshly removed hosts (current - new) and add in any previously removed hosts // that haven't been undeployed yet. final Set<String> rh = Sets.union(Sets.difference(ch, nh), prh); // Finally, only include hosts that are still registered return ImmutableList.copyOf(Sets.intersection(rh, ImmutableSet.copyOf(listHosts()))); }
private Expression deriveCommonPredicates(PlanNode node, Function<Integer, Collection<Map.Entry<Symbol, SymbolReference>>> mapping) { // Find the predicates that can be pulled up from each source List<Set<Expression>> sourceOutputConjuncts = new ArrayList<>(); for (int i = 0; i < node.getSources().size(); i++) { Expression underlyingPredicate = node.getSources().get(i).accept(this, null); List<Expression> equalities = mapping.apply(i).stream() .filter(SYMBOL_MATCHES_EXPRESSION.negate()) .map(ENTRY_TO_EQUALITY) .collect(toImmutableList()); sourceOutputConjuncts.add(ImmutableSet.copyOf(extractConjuncts(pullExpressionThroughSymbols(combineConjuncts( ImmutableList.<Expression>builder() .addAll(equalities) .add(underlyingPredicate) .build()), node.getOutputSymbols())))); } // Find the intersection of predicates across all sources // TODO: use a more precise way to determine overlapping conjuncts (e.g. commutative predicates) Iterator<Set<Expression>> iterator = sourceOutputConjuncts.iterator(); Set<Expression> potentialOutputConjuncts = iterator.next(); while (iterator.hasNext()) { potentialOutputConjuncts = Sets.intersection(potentialOutputConjuncts, iterator.next()); } return combineConjuncts(potentialOutputConjuncts); }
/** * @return a local non-thread safe copy of the database */ private Map<String, Object> localCopy() { return db.getAll().entrySet().stream().map(entry -> { Object struct = entry.getValue(); if (struct instanceof Set) return Pair.of(entry.getKey(), newHashSet((Set) struct)); else if (struct instanceof List) return Pair.of(entry.getKey(), newArrayList((List) struct)); else if (struct instanceof Map) return Pair.of(entry.getKey(), newHashMap((Map) struct)); else return Pair.of(entry.getKey(), struct); }).collect(toMap(pair -> (String) pair.a(), Pair::b)); }
private synchronized void updateNodes(MemoryPoolAssignmentsRequest assignments) { ImmutableSet.Builder<Node> builder = ImmutableSet.builder(); Set<Node> aliveNodes = builder .addAll(nodeManager.getNodes(ACTIVE)) .addAll(nodeManager.getNodes(SHUTTING_DOWN)) .build(); ImmutableSet<String> aliveNodeIds = aliveNodes.stream() .map(Node::getNodeIdentifier) .collect(toImmutableSet()); // Remove nodes that don't exist anymore // Make a copy to materialize the set difference Set<String> deadNodes = ImmutableSet.copyOf(difference(nodes.keySet(), aliveNodeIds)); nodes.keySet().removeAll(deadNodes); // Add new nodes for (Node node : aliveNodes) { if (!nodes.containsKey(node.getNodeIdentifier())) { nodes.put(node.getNodeIdentifier(), new RemoteNodeMemory(node, httpClient, memoryInfoCodec, assignmentsRequestJsonCodec, locationFactory.createMemoryInfoLocation(node))); } } // If work isn't scheduled on the coordinator (the current node) there is no point // in polling or updating (when moving queries to the reserved pool) its memory pools if (!isWorkScheduledOnCoordinator) { nodes.remove(nodeManager.getCurrentNode().getNodeIdentifier()); } // Schedule refresh for (RemoteNodeMemory node : nodes.values()) { node.asyncRefresh(assignments); } }
@Override public PlanNode visitLateralJoin(LateralJoinNode node, RewriteContext<Set<Symbol>> context) { PlanNode subquery = context.rewrite(node.getSubquery(), context.get()); // remove unused lateral nodes if (intersection(ImmutableSet.copyOf(subquery.getOutputSymbols()), context.get()).isEmpty() && isScalar(subquery)) { return context.rewrite(node.getInput(), context.get()); } // prune not used correlation symbols Set<Symbol> subquerySymbols = SymbolsExtractor.extractUnique(subquery); List<Symbol> newCorrelation = node.getCorrelation().stream() .filter(subquerySymbols::contains) .collect(toImmutableList()); Set<Symbol> inputContext = ImmutableSet.<Symbol>builder() .addAll(context.get()) .addAll(newCorrelation) .build(); PlanNode input = context.rewrite(node.getInput(), inputContext); // remove unused lateral nodes if (intersection(ImmutableSet.copyOf(input.getOutputSymbols()), inputContext).isEmpty() && isScalar(input)) { return subquery; } return new LateralJoinNode(node.getId(), input, subquery, newCorrelation, node.getType(), node.getOriginSubquery()); } }
.map(CaseTree::getExpression) .filter(IdentifierTree.class::isInstance) .map(p -> ((IdentifierTree) p).getName().toString()) .collect(toImmutableSet()); boolean unrecognized = false; SetView<String> setDifference = Sets.difference(ASTHelpers.enumValues(switchType), handledCases); if (!setDifference.isEmpty()) { if (setDifference.contains("UNRECOGNIZED") && setDifference.size() == 1) { .subSequence( ((JCTree) defaultStatements.get(0)).getStartPosition(), state.getEndPosition(getLast(defaultStatements))) .toString(); String initialComments = comments(state, defaultCase, defaultStatements);
private ImmutableList<ImmutableList<ModuleSplit>> groupSplitsToShardsForApex( ImmutableList<ModuleSplit> splits) { Set<ModuleSplit> multiAbiSplits = subsetWithTargeting(splits, ApkTargeting::hasMultiAbiTargeting); Set<ModuleSplit> masterSplits = Sets.difference(ImmutableSet.copyOf(splits), multiAbiSplits); ModuleSplit masterSplit = Iterables.getOnlyElement(masterSplits); checkState( masterSplit.getApkTargeting().equals(ApkTargeting.getDefaultInstance()), "Master splits are expected to have default targeting."); return multiAbiSplits.stream() .map(abiSplit -> ImmutableList.of(masterSplit, abiSplit)) .collect(toImmutableList()); }
@Override public synchronized void updatePartitionStatistics(String databaseName, String tableName, String partitionName, Function<PartitionStatistics, PartitionStatistics> update) { PartitionStatistics currentStatistics = requireNonNull( getPartitionStatistics(databaseName, tableName, ImmutableSet.of(partitionName)).get(partitionName), "getPartitionStatistics() returned null"); PartitionStatistics updatedStatistics = update.apply(currentStatistics); List<Partition> partitions = getPartitionsByNames(databaseName, tableName, ImmutableList.of(partitionName)); if (partitions.size() != 1) { throw new PrestoException(HIVE_METASTORE_ERROR, "Metastore returned multiple partitions for name: " + partitionName); } Partition originalPartition = getOnlyElement(partitions); Partition modifiedPartition = originalPartition.deepCopy(); HiveBasicStatistics basicStatistics = updatedStatistics.getBasicStatistics(); modifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), basicStatistics)); alterPartitionWithoutStatistics(databaseName, tableName, modifiedPartition); Map<String, HiveType> columns = modifiedPartition.getSd().getCols().stream() .collect(toImmutableMap(FieldSchema::getName, schema -> HiveType.valueOf(schema.getType()))); setPartitionColumnStatistics(databaseName, tableName, partitionName, columns, updatedStatistics.getColumnStatistics(), basicStatistics.getRowCount()); Set<String> removedStatistics = difference(currentStatistics.getColumnStatistics().keySet(), updatedStatistics.getColumnStatistics().keySet()); removedStatistics.forEach(column -> deletePartitionColumnStatistics(databaseName, tableName, partitionName, column)); }
/** * */ @Test public void testCollectionsInCollection() { GridBinaryTestClasses.TestObjectContainer obj = new GridBinaryTestClasses.TestObjectContainer(); obj.foo = Lists.newArrayList( Lists.newArrayList(1, 2), Lists.newLinkedList(Arrays.asList(1, 2)), Sets.newHashSet("a", "b"), Sets.newLinkedHashSet(Arrays.asList("a", "b")), Maps.newHashMap(ImmutableMap.of(1, "a", 2, "b"))); GridBinaryTestClasses.TestObjectContainer deserialized = wrap(obj).build().deserialize(); assertEquals(obj.foo, deserialized.foo); }
@Override public List<String> order(List<String> insertionOrder) { return Lists.newArrayList(Sets.newTreeSet(insertionOrder)); } })
@Override public PlanNode visitExchange(ExchangeNode node, RewriteContext<Set<Symbol>> context) Set<Symbol> expectedOutputSymbols = Sets.newHashSet(context.get()); node.getPartitioningScheme().getHashColumn().ifPresent(expectedOutputSymbols::add); node.getPartitioningScheme().getPartitioning().getColumns().stream() .forEach(expectedOutputSymbols::add); node.getOrderingScheme().ifPresent(orderingScheme -> expectedOutputSymbols.addAll(orderingScheme.getOrderBy())); node.getPartitioningScheme().getBucketToPartition()); ImmutableList.Builder<PlanNode> rewrittenSources = ImmutableList.builder(); for (int i = 0; i < node.getSources().size(); i++) { ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.<Symbol>builder() .addAll(inputsBySource.get(i)); expectedInputs.build()));
Sets.newHashSet("1200", "1400", "1600"), true ); new TopNResultValue( Arrays.<Map<String, Object>>asList( ImmutableMap.<String, Object>builder() .put("ql_alias", 1400L) .put(QueryRunnerTestHelper.indexMetric, 217725.41940800005D) .put("minIndex", 91.270553D) .build(), ImmutableMap.<String, Object>builder() .put("ql_alias", 1600L) .put(QueryRunnerTestHelper.indexMetric, 210865.67977600006D) .put("minIndex", 99.284525D) .build(), ImmutableMap.<String, Object>builder() .put("ql_alias", 1200L) .put(QueryRunnerTestHelper.indexMetric, 12086.472791D)
Map<String, String> authorizationParameters = Maps.newHashMap(); OAuth2Request clientAuth = new OAuth2Request(authorizationParameters, client.getClientId(), Sets.newHashSet(new SimpleGrantedAuthority("ROLE_CLIENT")), true, scope, null, null, null, null); OAuth2Authentication authentication = new OAuth2Authentication(clientAuth, null); .audience(Lists.newArrayList(client.getClientId())) .issuer(configBean.getIssuer()) .issueTime(new Date())
private void storeTest(String... stores) throws Exception { Multimap<String, Object> doc1 = getDocument("Hello world", 1001, 5.2, Geoshape.point(48.0, 0.0), Arrays.asList("1", "2", "3"), Sets.newHashSet("1", "2"), Instant.ofEpochSecond(1)); Multimap<String, Object> doc2 = getDocument("Tomorrow is the world", 1010, 8.5, Geoshape.point(49.0, 1.0), Arrays.asList("4", "5", "6"), Sets.newHashSet("4", "5"), Instant.ofEpochSecond(2)); Multimap<String, Object> doc3 = getDocument("Hello Bob, are you there?", -500, 10.1, Geoshape.point(47.0, 10.0), Arrays.asList("7", "8", "9"), Sets.newHashSet("7", "8"), Instant.ofEpochSecond(3)); ImmutableList<IndexQuery.OrderEntry> orderTimeAsc = ImmutableList.of(new IndexQuery.OrderEntry(TIME, Order.ASC, Integer.class)); ImmutableList<IndexQuery.OrderEntry> orderWeightAsc = ImmutableList.of(new IndexQuery.OrderEntry(WEIGHT, Order.ASC, Double.class)); ImmutableList<IndexQuery.OrderEntry> orderTimeDesc = ImmutableList.of(new IndexQuery.OrderEntry(TIME, Order.DESC, Integer.class)); ImmutableList<IndexQuery.OrderEntry> orderWeightDesc = ImmutableList.of(new IndexQuery.OrderEntry(WEIGHT, Order.DESC, Double.class)); ImmutableList<IndexQuery.OrderEntry> jointOrder = ImmutableList.of(new IndexQuery.OrderEntry(WEIGHT, Order.DESC, Double.class), new IndexQuery.OrderEntry(TIME, Order.DESC, Integer.class)); assertEquals(ImmutableSet.of("doc1", "doc2"), ImmutableSet.copyOf(result)); assertEquals(ImmutableSet.copyOf(result), ImmutableSet.copyOf(tx.query(new IndexQuery(store, PredicateCondition.of(TEXT, Text.CONTAINS, "wOrLD"))))); assertEquals(1, tx.query(new IndexQuery(store, PredicateCondition.of(TEXT, Text.CONTAINS, "bob"))).size()); assertEquals(0, tx.query(new IndexQuery(store, PredicateCondition.of(TEXT, Text.CONTAINS, "worl"))).size()); assertEquals(1, Iterables.size(tx.query(new RawQuery(store,"text:\"Hello Bob\"",NO_PARAS)))); assertEquals(0, Iterables.size(tx.query(new RawQuery(store,"text:\"Hello Bob\"",NO_PARAS).setOffset(1)))); assertEquals(1, Iterables.size(tx.query(new RawQuery(store,"text:(world AND tomorrow)",NO_PARAS)))); assertEquals(2, Iterables.size(tx.query(new RawQuery(store,"text:(you there Hello Bob)",NO_PARAS)))); add(store, "doc4", getDocument("I'ts all a big Bob", -100, 11.2, Geoshape.point(48.0, 8.0), Arrays.asList("10", "11", "12"), Sets.newHashSet("10", "11"), Instant.ofEpochSecond(4)), true); remove(store, "doc2", doc2, true); remove(store, "doc3", ImmutableMultimap.of(WEIGHT, (Object) 10.1), false);
ImmutableSet.Builder<VarSymbol> definedVariables = ImmutableSet.builder(); ImmutableSet.Builder<Symbol> usedSymbols = ImmutableSet.builder(); Tree prev = state.getPath().getLeaf(); for (Tree curr : state.getPath().getParentPath()) { case FOR_LOOP: ForLoopTree forLoop = (ForLoopTree) curr; forLoop.getInitializer().stream().forEach(t -> addIfVariable(t, definedVariables)); break; case ENHANCED_FOR_LOOP: return ImmutableSet.copyOf(Sets.difference(definedVariables.build(), usedSymbols.build()));