private void initializeEntryIndicesList(int entryCount) { entryIndicesList = new IntList[entryCount]; for (int i = 0; i < entryIndicesList.length; i++) { entryIndicesList[i] = new IntArrayList(); } } }
public DynamicLifespanScheduler(BucketNodeMap bucketNodeMap, List<Node> allNodes, List<ConnectorPartitionHandle> partitionHandles, OptionalInt concurrentLifespansPerTask) { this.bucketNodeMap = requireNonNull(bucketNodeMap, "bucketNodeMap is null"); this.allNodes = requireNonNull(allNodes, "allNodes is null"); this.partitionHandles = unmodifiableList(new ArrayList<>( requireNonNull(partitionHandles, "partitionHandles is null"))); this.concurrentLifespansPerTask = requireNonNull(concurrentLifespansPerTask, "concurrentLifespansPerTask is null"); concurrentLifespansPerTask.ifPresent(lifespansPerTask -> checkArgument(lifespansPerTask >= 1, "concurrentLifespansPerTask must be great or equal to 1 if present")); int bucketCount = partitionHandles.size(); verify(bucketCount > 0); this.driverGroups = new IntArrayList(IntStream.range(0, bucketCount).toArray()).iterator(); }
public static DateColumn create(final String name, final int initialSize) { DateColumn column = new DateColumn(name, new IntArrayList(initialSize)); for (int i = 0; i < initialSize; i++) { column.appendMissing(); } return column; }
public FixedLifespanScheduler(BucketNodeMap bucketNodeMap, List<ConnectorPartitionHandle> partitionHandles, OptionalInt concurrentLifespansPerTask) { checkArgument(!partitionHandles.equals(ImmutableList.of(NOT_PARTITIONED))); checkArgument(partitionHandles.size() == bucketNodeMap.getBucketCount()); Map<Node, IntList> nodeToDriverGroupMap = new HashMap<>(); Int2ObjectMap<Node> driverGroupToNodeMap = new Int2ObjectOpenHashMap<>(); for (int bucket = 0; bucket < bucketNodeMap.getBucketCount(); bucket++) { Node node = bucketNodeMap.getAssignedNode(bucket).get(); nodeToDriverGroupMap.computeIfAbsent(node, key -> new IntArrayList()).add(bucket); driverGroupToNodeMap.put(bucket, node); } this.driverGroupToNodeMap = driverGroupToNodeMap; this.nodeToDriverGroupsMap = nodeToDriverGroupMap.entrySet().stream() .collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().iterator())); this.partitionHandles = requireNonNull(partitionHandles, "partitionHandles is null"); if (concurrentLifespansPerTask.isPresent()) { checkArgument(concurrentLifespansPerTask.getAsInt() >= 1, "concurrentLifespansPerTask must be great or equal to 1 if present"); } this.concurrentLifespansPerTask = requireNonNull(concurrentLifespansPerTask, "concurrentLifespansPerTask is null"); }
public PartitioningExchanger( List<Consumer<PageReference>> partitions, LocalExchangeMemoryManager memoryManager, List<? extends Type> types, List<Integer> partitionChannels, Optional<Integer> hashChannel) { this.buffers = ImmutableList.copyOf(requireNonNull(partitions, "partitions is null")); this.memoryManager = requireNonNull(memoryManager, "memoryManager is null"); HashGenerator hashGenerator; if (hashChannel.isPresent()) { hashGenerator = new PrecomputedHashGenerator(hashChannel.get()); } else { List<Type> partitionChannelTypes = partitionChannels.stream() .map(types::get) .collect(toImmutableList()); hashGenerator = new InterpretedHashGenerator(partitionChannelTypes, Ints.toArray(partitionChannels)); } partitionGenerator = new LocalPartitionGenerator(hashGenerator, buffers.size()); partitionAssignments = new IntArrayList[partitions.size()]; for (int i = 0; i < partitionAssignments.length; i++) { partitionAssignments[i] = new IntArrayList(); } }
public IntToIdMap() { _valueToIdMap = new Int2IntOpenHashMap(); _valueToIdMap.defaultReturnValue(INVALID_KEY); _idToValueMap = new IntArrayList(); }
private IntArrayList generatePositionList(int numRows, int numPositions) { IntArrayList positions = new IntArrayList(numPositions); for (int i = 0; i < numPositions; i++) { positions.add((7 * i + 3) % numRows); } Collections.sort(positions); return positions; } }
@Override public void sortAscending() { int[] sorted = data.toIntArray(); Arrays.parallelSort(sorted); this.data = new IntArrayList(sorted); }
public TypedSet(Type elementType, BlockBuilder blockBuilder, int expectedSize, String functionName) { checkArgument(expectedSize >= 0, "expectedSize must not be negative"); this.elementType = requireNonNull(elementType, "elementType must not be null"); this.elementBlock = requireNonNull(blockBuilder, "blockBuilder must not be null"); this.functionName = functionName; initialElementBlockOffset = elementBlock.getPositionCount(); initialElementBlockSizeInBytes = elementBlock.getSizeInBytes(); this.size = 0; this.hashCapacity = arraySize(expectedSize, FILL_RATIO); this.maxFill = calculateMaxFill(hashCapacity); this.hashMask = hashCapacity - 1; blockPositionByHash = new IntArrayList(hashCapacity); blockPositionByHash.size(hashCapacity); for (int i = 0; i < hashCapacity; i++) { blockPositionByHash.set(i, EMPTY_SLOT); } this.containsNullElement = false; }
public static IntList toIntList(IntIterator iterator) { final IntList integers = new IntArrayList(); while (iterator.hasNext()) { integers.add(iterator.nextInt()); } return IntLists.unmodifiable(integers); }
public IntArrayList computeEligibleRowIds(Page page) { IntArrayList ids = new IntArrayList(page.getPositionCount()); Page bucketColumnsPage = extractColumns(page, bucketColumns); for (int position = 0; position < page.getPositionCount(); position++) { int bucket = getHiveBucket(tableBucketCount, typeInfoList, bucketColumnsPage, position); if ((bucket - bucketToKeep) % partitionBucketCount != 0) { throw new PrestoException(HIVE_INVALID_BUCKET_FILES, format( "A row that is supposed to be in bucket %s is encountered. Only rows in bucket %s (modulo %s) are expected", bucket, bucketToKeep % partitionBucketCount, partitionBucketCount)); } if (bucket == bucketToKeep) { ids.add(position); } } return ids; } }
private ColumnChunk readMap(GroupField field) throws IOException { List<Type> parameters = field.getType().getTypeParameters(); checkArgument(parameters.size() == 2, "Maps must have two type parameters, found %d", parameters.size()); Block[] blocks = new Block[parameters.size()]; ColumnChunk columnChunk = readColumnChunk(field.getChildren().get(0).get()); blocks[0] = columnChunk.getBlock(); blocks[1] = readColumnChunk(field.getChildren().get(1).get()).getBlock(); IntList offsets = new IntArrayList(); BooleanList valueIsNull = new BooleanArrayList(); calculateCollectionOffsets(field, offsets, valueIsNull, columnChunk.getDefinitionLevels(), columnChunk.getRepetitionLevels()); Block mapBlock = ((MapType) field.getType()).createBlockFromKeyValue(Optional.of(valueIsNull.toBooleanArray()), offsets.toIntArray(), blocks[0], blocks[1]); return new ColumnChunk(mapBlock, columnChunk.getDefinitionLevels(), columnChunk.getRepetitionLevels()); }
private synchronized IntArrayList partitionPage(Page page, IntPredicate spillPartitionMask) { IntArrayList unspilledPositions = new IntArrayList(); for (int position = 0; position < page.getPositionCount(); position++) { int partition = partitionFunction.getPartition(page, position); if (!spillPartitionMask.test(partition)) { unspilledPositions.add(position); continue; } spilledPartitions.add(partition); PageBuilder pageBuilder = pageBuilders[partition]; pageBuilder.declarePosition(); for (int channel = 0; channel < types.size(); channel++) { Type type = types.get(channel); type.appendTo(page.getBlock(channel), position, pageBuilder.getBlockBuilder(channel)); } } return unspilledPositions; }
public ColumnChunk readPrimitive(Field field) throws IOException { IntList definitionLevels = new IntArrayList(); IntList repetitionLevels = new IntArrayList(); seek(); BlockBuilder blockBuilder = field.getType().createBlockBuilder(null, nextBatchSize); int valueCount = 0; while (valueCount < nextBatchSize) { if (page == null) { readNextPage(); } int valuesToRead = Math.min(remainingValueCountInPage, nextBatchSize - valueCount); readValues(blockBuilder, valuesToRead, field.getType(), definitionLevels, repetitionLevels); valueCount += valuesToRead; } checkArgument(valueCount == nextBatchSize, "valueCount %s not equals to batchSize %s", valueCount, nextBatchSize); readOffset = 0; nextBatchSize = 0; return new ColumnChunk(blockBuilder.build(), definitionLevels.toIntArray(), repetitionLevels.toIntArray()); }
private ColumnChunk readArray(GroupField field) throws IOException { List<Type> parameters = field.getType().getTypeParameters(); checkArgument(parameters.size() == 1, "Arrays must have a single type parameter, found %d", parameters.size()); Field elementField = field.getChildren().get(0).get(); ColumnChunk columnChunk = readColumnChunk(elementField); IntList offsets = new IntArrayList(); BooleanList valueIsNull = new BooleanArrayList(); calculateCollectionOffsets(field, offsets, valueIsNull, columnChunk.getDefinitionLevels(), columnChunk.getRepetitionLevels()); Block arrayBlock = ArrayBlock.fromElementBlock(valueIsNull.size(), Optional.of(valueIsNull.toBooleanArray()), offsets.toIntArray(), columnChunk.getBlock()); return new ColumnChunk(arrayBlock, columnChunk.getDefinitionLevels(), columnChunk.getRepetitionLevels()); }
private static int[] filterInts(IntSet intSet, int[] source) { IntList intList = new IntArrayList(); for (int value : source) { if (intSet.contains(value)) { intList.add(value); } } if (intList.size() == source.length) { return source; } else { return intList.toIntArray(); } }
public void generateDataFrameIndex() { for (int i = 0; i < df.getData().get(index).size(); i++) { if (!dataTable.containsKey((int) df.get(i, index))) { dataTable.put((int) df.get(i, index), new IntArrayList()); } dataTable.get((int) df.get(i, index)).add(index); } }
@Override public IntColumn top(int n) { final IntArrayList top = new IntArrayList(); final int[] values = data.toIntArray(); IntArrays.parallelQuickSort(values, descendingComparator); for (int i = 0; i < n && i < values.length; i++) { top.add(values[i]); } return new IntColumn(name() + "[Top " + n + "]", top); }
@Override public IntColumn bottom(final int n) { final IntArrayList bottom = new IntArrayList(); final int[] values = data.toIntArray(); IntArrays.parallelQuickSort(values); for (int i = 0; i < n && i < values.length; i++) { bottom.add(values[i]); } return new IntColumn(name() + "[Bottoms " + n + "]", bottom); }
@Test public void smokeTest() { ThreadLocalRandom r = ThreadLocalRandom.current(); for (int i = 0; i < 1000; i++) { int numIterators = r.nextInt(1, 11); List<IntList> lists = new ArrayList<>(numIterators); for (int j = 0; j < numIterators; j++) { lists.add(new IntArrayList()); } for (int j = 0; j < 50; j++) { lists.get(r.nextInt(numIterators)).add(j); } for (int j = 0; j < lists.size() + 1; j++) { assertAscending(mergeAscending(iteratorsFromLists(lists))); Collections.rotate(lists, 1); } for (int j = 0; j < 10; j++) { Collections.shuffle(lists); assertAscending(mergeAscending(iteratorsFromLists(lists))); } } }