Refine search
@Override public void addFilterLists(List<Filter> filters) { if (checkAndGetReversed(filters, isReversed()) != isReversed()) { throw new IllegalArgumentException("Filters in the list must have the same reversed flag"); } this.filters.addAll(filters); this.subFiltersIncludedCell.addAll(Collections.nCopies(filters.size(), false)); this.prevFilterRCList.addAll(Collections.nCopies(filters.size(), null)); this.prevCellList.addAll(Collections.nCopies(filters.size(), null)); }
private FieldReference[][] packFields(List<Set<FieldReference>> fields) { List<Set<FieldReference>> joinedFields = new ArrayList<>(Collections.nCopies(fields.size(), null)); for (int i = 0; i < fields.size(); ++i) { if (fields.get(i) == null) { continue; } int j = definitionClasses[i]; Set<FieldReference> fieldSet = joinedFields.get(j); if (fieldSet == null) { fieldSet = new LinkedHashSet<>(); joinedFields.set(j, fieldSet); } fieldSet.addAll(fields.get(i)); } FieldReference[][] packedFields = new FieldReference[fields.size()][]; for (int i = 0; i < packedFields.length; ++i) { if (joinedFields.get(i) != null) { packedFields[i] = joinedFields.get(i).toArray(new FieldReference[0]); } } return packedFields; }
singleValues.add(range.getLow().getValue()); switch (range.getLow().getBound()) { case ABOVE: rangeConjuncts.add(toPredicate(columnName, ">", range.getLow().getValue(), type, accumulator)); break; case EXACTLY: rangeConjuncts.add(toPredicate(columnName, ">=", range.getLow().getValue(), type, accumulator)); break; case BELOW: if (singleValues.size() == 1) { disjuncts.add(toPredicate(columnName, "=", getOnlyElement(singleValues), type, accumulator)); else if (singleValues.size() > 1) { for (Object value : singleValues) { bindValue(value, type, accumulator); String values = Joiner.on(",").join(nCopies(singleValues.size(), "?")); disjuncts.add(quote(columnName) + " IN (" + values + ")");
protected void setup(int numSortChannels, int totalChannels, int numMergeSources, int pagesCount) { types = nCopies(totalChannels, BIGINT); sortChannels = new ArrayList<>(); for (int i = 0; i < numSortChannels; i++) { sortChannels.add(i); } sortTypes = nCopies(numSortChannels, BIGINT); sortOrders = nCopies(numSortChannels, ASC_NULLS_FIRST); outputChannels = new ArrayList<>(); for (int i = 0; i < totalChannels; i++) { outputChannels.add(i); } createPages(totalChannels, pagesCount); createPageProducers(numMergeSources); }
private void prepare(MethodNode method) { InsnList instructions = method.instructions; minLocal = 0; if ((method.access & Opcodes.ACC_STATIC) != 0) { minLocal = 1; } labelIndexes = new HashMap<>(); lineNumbers = new HashMap<>(); for (int i = 0; i < instructions.size(); ++i) { AbstractInsnNode node = instructions.get(i); if (node instanceof LabelNode) { labelIndexes.put(((LabelNode) node).getLabel(), i); } if (node instanceof LineNumberNode) { LineNumberNode lineNumberNode = (LineNumberNode) node; lineNumbers.put(lineNumberNode.start.getLabel(), lineNumberNode.line); } } for (LocalVariableNode localVar : method.localVariables) { int location = labelIndexes.get(localVar.start.getLabel()); localVariableMap.computeIfAbsent(location, k -> new ArrayList<>()).add(localVar); } targetInstructions = new ArrayList<>(instructions.size()); targetInstructions.addAll(Collections.nCopies(instructions.size(), null)); basicBlocks.addAll(Collections.nCopies(instructions.size(), null)); stackBefore = new StackFrame[instructions.size()]; stackAfter = new StackFrame[instructions.size()]; }
private void dfs(Graph graph, Program program) { startLocations = new ArrayList<>(Collections.nCopies(graph.size(), null)); additionalConnections = new ArrayList<>(); Deque<Step> stack = new ArrayDeque<>(); if (visited[step.block]) { if (step.location != null) { additionalConnections.add(new AdditionalConnection(step.location, startLocations.get(step.block)));
private List<Set<Phi>> getDestinationPhis(Program program) { List<Set<Phi>> destinationPhis = new ArrayList<>(); destinationPhis.addAll(Collections.nCopies(program.variableCount(), null)); for (int i = 0; i < program.basicBlockCount(); ++i) { BasicBlock block = program.basicBlockAt(i); for (Phi phi : block.getPhis()) { for (Incoming incoming : phi.getIncomings()) { Set<Phi> phis = destinationPhis.get(incoming.getValue().getIndex()); if (phis == null) { phis = new LinkedHashSet<>(); destinationPhis.set(incoming.getValue().getIndex(), phis); } phis.add(phi); } } } return destinationPhis; }
public void testDirectExecutorServiceInvokeAll() throws Exception { final ExecutorService executor = newDirectExecutorService(); final ThreadLocal<Integer> threadLocalCount = new ThreadLocal<Integer>() { @Override protected Integer initialValue() { return 0; } }; final Callable<Integer> incrementTask = new Callable<Integer>() { @Override public Integer call() { int i = threadLocalCount.get(); threadLocalCount.set(i + 1); return i; } }; List<Future<Integer>> futures = executor.invokeAll(Collections.nCopies(10, incrementTask)); for (int i = 0; i < 10; i++) { Future<Integer> future = futures.get(i); assertTrue("Task should have been run before being returned", future.isDone()); assertEquals(i, future.get().intValue()); } assertEquals(10, threadLocalCount.get().intValue()); }
public void addEdge(int from, int to) { if (to < 0 || from < 0) { throw new IllegalArgumentException(); } sz = Math.max(sz, Math.max(from, to) + 1); builtGraph = null; if (addedEdges.size() == from) { addedEdges.add(IntHashSet.from(to)); } else if (addedEdges.size() <= from) { addedEdges.addAll(Collections.nCopies(from - addedEdges.size(), null)); addedEdges.add(IntHashSet.from(to)); } else { IntSet set = addedEdges.get(from); if (set == null) { addedEdges.set(from, IntHashSet.from(to)); } else { set.add(to); } } }
public SimpleKafkaSpecConsumer(Config config, Optional<Logger> log) { // Consumer String kafkaConsumerClientClass = ConfigUtils.getString(config, CONSUMER_CLIENT_FACTORY_CLASS_KEY, DEFAULT_CONSUMER_CLIENT_FACTORY_CLASS); try { Class<?> clientFactoryClass = (Class<?>) Class.forName(kafkaConsumerClientClass); final GobblinKafkaConsumerClient.GobblinKafkaConsumerClientFactory factory = (GobblinKafkaConsumerClient.GobblinKafkaConsumerClientFactory) ConstructorUtils.invokeConstructor(clientFactoryClass); _kafkaConsumer = factory.create(config); } catch (ClassNotFoundException | NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException e) { if (log.isPresent()) { log.get().error("Failed to instantiate Kafka consumer from class " + kafkaConsumerClientClass, e); } throw new RuntimeException("Failed to instantiate Kafka consumer", e); } List<KafkaTopic> kafkaTopics = _kafkaConsumer.getFilteredTopics(Collections.EMPTY_LIST, Lists.newArrayList(Pattern.compile(config.getString(SimpleKafkaSpecExecutor.SPEC_KAFKA_TOPICS_KEY)))); _partitions = kafkaTopics.get(0).getPartitions(); _lowWatermark = Lists.newArrayList(Collections.nCopies(_partitions.size(), 0L)); _nextWatermark = Lists.newArrayList(Collections.nCopies(_partitions.size(), 0L)); _highWatermark = Lists.newArrayList(Collections.nCopies(_partitions.size(), 0L)); InputStream dummyInputStream = new ByteArrayInputStream(new byte[0]); _decoder = DecoderFactory.get().binaryDecoder(dummyInputStream, null); _reader = new SpecificDatumReader<AvroJobSpec>(AvroJobSpec.SCHEMA$); _versionWriter = new FixedSchemaVersionWriter(); }
partitionNames = new ArrayList<>(partitions.size()); for (Partition partition : partitions) { partitionNames.add(Warehouse.makePartName(tbl.getPartitionKeys(), partition.getValues())); response.setPartitionValues(new ArrayList<>(partitionNames.size())); LOG.info("Converting responses to Partition values for items: {}", partitionNames.size()); for (String partName : partitionNames) { ArrayList<String> vals = new ArrayList<String>(Collections.nCopies(tbl.getPartitionKeys().size(), null)); PartitionValuesRow row = new PartitionValuesRow(); Warehouse.makeValsFromName(partName, vals);
public List<List<String>> getFieldOrigins(SqlNode sqlQuery) { if (sqlQuery instanceof SqlExplain) { return Collections.emptyList(); } final RelDataType rowType = getValidatedNodeType(sqlQuery); final int fieldCount = rowType.getFieldCount(); if (!sqlQuery.isA(SqlKind.QUERY)) { return Collections.nCopies(fieldCount, null); } final List<List<String>> list = new ArrayList<>(); for (int i = 0; i < fieldCount; i++) { list.add(getFieldOrigin(sqlQuery, i)); } return ImmutableNullableList.copyOf(list); }
/** * Decorate the set method to perform the growth behaviour. * <p> * If the requested index is greater than the current size, the list will * grow to the new size. Indices between the old size and the requested * size will be filled with <code>null</code>. * <p> * If the index is less than the current size, the value will be set onto * the underlying list directly. * If the index is less than zero, the underlying list is called, which * will probably throw an IndexOutOfBoundsException. * * @param index the index to set * @param element the object to set at the specified index * @return the object previously at that index * @throws UnsupportedOperationException if the underlying list doesn't implement set * @throws ClassCastException if the underlying list rejects the element * @throws IllegalArgumentException if the underlying list rejects the element */ public Object set(int index, Object element) { int size = getList().size(); if (index >= size) { getList().addAll(Collections.nCopies((index - size) + 1, null)); } return getList().set(index, element); }
public FilterListWithAND(List<Filter> filters) { super(filters); // For FilterList with AND, when call FL's transformCell(), we should transform cell for all // sub-filters (because all sub-filters return INCLUDE*). So here, fill this array with true. we // keep this in FilterListWithAND for abstracting the transformCell() in FilterListBase. subFiltersIncludedCell = new ArrayList<>(Collections.nCopies(filters.size(), true)); }
null; List<DatasetTrainer<? extends IgniteModel, L>> trainers = Collections.nCopies(ensembleSize, tr); .mapToObj(mdlIdx -> { AdaptableDatasetTrainer<Vector, Double, Vector, Double, ? extends IgniteModel, L> tr = AdaptableDatasetTrainer.of(trainers.get(mdlIdx)); if (mappings != null) { tr = tr.afterFeatureExtractor(featureValues -> { int[] mapping = mappings.get(mdlIdx); double[] newFeaturesValues = new double[mapping.length]; for (int j = 0; j < mapping.length; j++) }).beforeTrainedModel(VectorUtils.getProjector(mappings.get(mdlIdx)));