Refine search
@Override public int[] getMatchingDictIds() { if (_matchingDictIds == null) { IntList matchingDictIds = new IntArrayList(); int dictionarySize = _dictionary.length(); for (int dictId = 0; dictId < dictionarySize; dictId++) { if (applySV(dictId)) { matchingDictIds.add(dictId); } } _matchingDictIds = matchingDictIds.toIntArray(); } return _matchingDictIds; } }
private static int[] filterInts(IntSet intSet, int[] source) { IntList intList = new IntArrayList(); for (int value : source) { if (intSet.contains(value)) { intList.add(value); } } if (intList.size() == source.length) { return source; } else { return intList.toIntArray(); } }
public ColumnChunk readPrimitive(Field field) throws IOException { IntList definitionLevels = new IntArrayList(); IntList repetitionLevels = new IntArrayList(); seek(); BlockBuilder blockBuilder = field.getType().createBlockBuilder(null, nextBatchSize); int valueCount = 0; while (valueCount < nextBatchSize) { if (page == null) { readNextPage(); } int valuesToRead = Math.min(remainingValueCountInPage, nextBatchSize - valueCount); readValues(blockBuilder, valuesToRead, field.getType(), definitionLevels, repetitionLevels); valueCount += valuesToRead; } checkArgument(valueCount == nextBatchSize, "valueCount %s not equals to batchSize %s", valueCount, nextBatchSize); readOffset = 0; nextBatchSize = 0; return new ColumnChunk(blockBuilder.build(), definitionLevels.toIntArray(), repetitionLevels.toIntArray()); }
public static IntList toIntList(IntIterator iterator) { final IntList integers = new IntArrayList(); while (iterator.hasNext()) { integers.add(iterator.nextInt()); } return IntLists.unmodifiable(integers); }
@Override public synchronized void accept(Page page) { // reset the assignment lists for (IntList partitionAssignment : partitionAssignments) { partitionAssignment.clear(); } // assign each row to a partition for (int position = 0; position < page.getPositionCount(); position++) { int partition = partitionGenerator.getPartition(page, position); partitionAssignments[partition].add(position); } // build a page for each partition Block[] outputBlocks = new Block[page.getChannelCount()]; for (int partition = 0; partition < buffers.size(); partition++) { IntArrayList positions = partitionAssignments[partition]; if (!positions.isEmpty()) { for (int i = 0; i < page.getChannelCount(); i++) { outputBlocks[i] = page.getBlock(i).copyPositions(positions.elements(), 0, positions.size()); } Page pageSplit = new Page(positions.size(), outputBlocks); memoryManager.updateMemoryUsage(pageSplit.getRetainedSizeInBytes()); buffers.get(partition).accept(new PageReference(pageSplit, 1, () -> memoryManager.updateMemoryUsage(-pageSplit.getRetainedSizeInBytes()))); } } }
public FixedLifespanScheduler(BucketNodeMap bucketNodeMap, List<ConnectorPartitionHandle> partitionHandles, OptionalInt concurrentLifespansPerTask) { checkArgument(!partitionHandles.equals(ImmutableList.of(NOT_PARTITIONED))); checkArgument(partitionHandles.size() == bucketNodeMap.getBucketCount()); Map<Node, IntList> nodeToDriverGroupMap = new HashMap<>(); Int2ObjectMap<Node> driverGroupToNodeMap = new Int2ObjectOpenHashMap<>(); for (int bucket = 0; bucket < bucketNodeMap.getBucketCount(); bucket++) { Node node = bucketNodeMap.getAssignedNode(bucket).get(); nodeToDriverGroupMap.computeIfAbsent(node, key -> new IntArrayList()).add(bucket); driverGroupToNodeMap.put(bucket, node); } this.driverGroupToNodeMap = driverGroupToNodeMap; this.nodeToDriverGroupsMap = nodeToDriverGroupMap.entrySet().stream() .collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().iterator())); this.partitionHandles = requireNonNull(partitionHandles, "partitionHandles is null"); if (concurrentLifespansPerTask.isPresent()) { checkArgument(concurrentLifespansPerTask.getAsInt() >= 1, "concurrentLifespansPerTask must be great or equal to 1 if present"); } this.concurrentLifespansPerTask = requireNonNull(concurrentLifespansPerTask, "concurrentLifespansPerTask is null"); }
this.palette = new IntArrayList(); for (char type : types) { if (type != 0) { if (!palette.contains(type)) { palette.add(type); int bitsPerBlock = VariableValueArray.calculateNeededBits(palette.size()); if (bitsPerBlock < 4) { bitsPerBlock = 4; for (int i = 0; i < ARRAY_SIZE; i++) { if (palette != null) { data.set(i, palette.indexOf(types[i])); } else { data.set(i, types[i]);
public ListMapping(IntList list, Int2IntFunction fun) { mapping = new IntArrayList(list.size()); for (int i = 0; i < list.size(); i++) { mapping.add(fun.applyAsInt(list.getInt(i))); } }
@Test public void testRemove() { int limit = 100; IntList values = new IntArrayList(new int[] { 1, 20, 1000, 2, 3, 30, 40, 10, 11, 12, 13, 300, 400, 500, 600 }); ByteBuffer myBuffer = ByteBuffer.allocate(1000000); ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); for (Integer value : values) { heap.addOffset(value); Assert.assertTrue(heap.isIntact()); } heap.removeOffset(12); Assert.assertTrue(heap.isIntact()); Collections.sort(values); values.rem(12); List<Integer> actual = new ArrayList<>(); for (int i = 0; i < values.size(); i++) { int min = heap.removeMin(); actual.add(min); } Assert.assertEquals(values, actual); }
@Override @Nonnull @SideOnly(Side.CLIENT) public IntList getValidItemStacksPacked() { if (this.itemIds == null || itemIds.size() != orbs.size()) { this.itemIds = new IntArrayList(orbs.size()); for (ItemStack itemstack : orbs) this.itemIds.add(RecipeItemHelper.pack(itemstack)); this.itemIds.sort(IntComparators.NATURAL_COMPARATOR); } return this.itemIds; }
@Override public Iterable<Integer> getPartitionIds() { if (partitionIds == null) { partitionIds = new IntArrayList(partitions.size()); for (int i = 0; i < partitions.size(); i++) { partitionIds.add(i); } } Preconditions.checkState(partitionIds.size() == partitions.size()); return partitionIds; }
final List<List<Batch>> eventBatchesPerThread = new ArrayList<>(nThreads); for (int i = 0; i < nThreads; i++) { eventsPerThread.add(new IntArrayList()); eventBatchesPerThread.add(new ArrayList<Batch>()); eventsPerThread.get(ThreadLocalRandom.current().nextInt(nThreads)).add(i); for (int threadIndex = 0; threadIndex < eventsPerThread.size(); threadIndex++) { IntList threadEvents = eventsPerThread.get(threadIndex); int indexOfEvent = threadEvents.indexOf(eventIndex); if (indexOfEvent >= 0) { Batch batch = eventBatchesPerThread.get(threadIndex).get(indexOfEvent);
@Test public void testSimpleListAccess() { IntList list = new IntIntervalList(1); assertFalse(list.isEmpty()); assertEquals(1, list.size()); assertEquals(0, list.getInt(0)); try { list.getInt(1); fail("getInt(1) should throw"); } catch (IndexOutOfBoundsException e) { /* no-op */ } IntListIterator iter = list.iterator(); assertTrue(iter.hasNext()); assertFalse(iter.hasPrevious()); assertEquals(0, iter.nextInt()); assertFalse(iter.hasNext()); assertTrue(iter.hasPrevious()); assertEquals(0, iter.previousInt()); }
/** * Recount the amount of non-air blocks in the chunk section. */ public void recount() { count = 0; for (int i = 0; i < ARRAY_SIZE; i++) { int type = data.get(i); if (palette != null) { type = palette.getInt(type); } if (type != 0) { count++; } } }
list.size(), chunkFactor, numBytes,