@Override public int size() { return sizeFn.applyAsInt(null); }
static <T> int linearSearch(LinearSeq<T> seq, ToIntFunction<T> comparison) { int idx = 0; for (T current : seq) { final int cmp = comparison.applyAsInt(current); if (cmp == 0) { return idx; } else if (cmp < 0) { return -(idx + 1); } idx += 1; } return -(idx + 1); } }
public Node getNode(Split split) { int bucket = splitToBucket.applyAsInt(split); int partition = bucketToPartition[bucket]; return requireNonNull(partitionToNode.get(partition)); }
@Override public void writeObject(ObjectOutput output, T object) throws IOException { output.writeInt(this.writer.applyAsInt(object)); }
private static Pair<Integer, Integer> getSpan(List<CoreLabel> tokens, ToIntFunction<CoreLabel> toMin, ToIntFunction<CoreLabel> toMax) { int min = Integer.MAX_VALUE; int max = Integer.MIN_VALUE; for (CoreLabel token : tokens) { min = Math.min(min, toMin.applyAsInt(token)); max = Math.max(max, toMax.applyAsInt(token) + 1); } return Pair.makePair(min, max); }
public final Optional<Node> getAssignedNode(Split split) { return getAssignedNode(splitToBucket.applyAsInt(split)); } }
public boolean replace(final V oldValue, final V newValue) { if (indexer.applyAsInt(oldValue) != indexer.applyAsInt(newValue)) { throw new IllegalArgumentException("Can only replace with value which has the same key"); } return doReplace(oldValue, newValue, table); }
@Override public int getMaxCreatedCycle() { int maxCycle = Integer.MIN_VALUE; final File[] files = queueDir.listFiles((d, n) -> n.endsWith(SingleChronicleQueue.SUFFIX)); if (files != null) { for (File file : files) { maxCycle = Math.max(maxCycle, fileToCycleFunction.applyAsInt(file)); } } return maxCycle; }
/** * Returns a {@code Collector} that accumulates elements into a multiset created via the specified * {@code Supplier}, whose elements are the result of applying {@code elementFunction} to the * inputs, with counts equal to the result of applying {@code countFunction} to the inputs. * Elements are added in encounter order. * * <p>If the mapped elements contain duplicates (according to {@link Object#equals}), the element * will be added more than once, with the count summed over all appearances of the element. * * <p>Note that {@code stream.collect(toMultiset(function, e -> 1, supplier))} is equivalent to * {@code stream.map(function).collect(Collectors.toCollection(supplier))}. * * @since 22.0 */ public static <T, E, M extends Multiset<E>> Collector<T, ?, M> toMultiset( java.util.function.Function<? super T, E> elementFunction, java.util.function.ToIntFunction<? super T> countFunction, java.util.function.Supplier<M> multisetSupplier) { checkNotNull(elementFunction); checkNotNull(countFunction); checkNotNull(multisetSupplier); return Collector.of( multisetSupplier, (ms, t) -> ms.add(elementFunction.apply(t), countFunction.applyAsInt(t)), (ms1, ms2) -> { ms1.addAll(ms2); return ms1; }); }
private static <T> int[] resolveAndValidateTokens( String tokenTypeName, T[] tokens, Function<T,String> getTokenName, ToIntFunction<String> getTokenId ) { int[] tokenIds = new int[tokens.length]; for ( int i = 0; i < tokenIds.length; i++ ) { String tokenName = getTokenName.apply( tokens[i] ); int tokenId = getTokenId.applyAsInt( tokenName ); if ( tokenId == TokenRead.NO_TOKEN ) { throw new NotFoundException( tokenTypeName + " " + tokenName + " not found." ); } tokenIds[i] = tokenId; } return tokenIds; }
/** * Returns a {@code Collector} that accumulates elements into an {@code ImmutableMultiset} whose * elements are the result of applying {@code elementFunction} to the inputs, with counts equal to * the result of applying {@code countFunction} to the inputs. * * <p>If the mapped elements contain duplicates (according to {@link Object#equals}), the first * occurrence in encounter order appears in the resulting multiset, with count equal to the sum of * the outputs of {@code countFunction.applyAsInt(t)} for each {@code t} mapped to that element. * * @since 22.0 */ public static <T, E> Collector<T, ?, ImmutableMultiset<E>> toImmutableMultiset( Function<? super T, ? extends E> elementFunction, ToIntFunction<? super T> countFunction) { checkNotNull(elementFunction); checkNotNull(countFunction); return Collector.of( LinkedHashMultiset::create, (multiset, t) -> multiset.add(checkNotNull(elementFunction.apply(t)), countFunction.applyAsInt(t)), (multiset1, multiset2) -> { multiset1.addAll(multiset2); return multiset1; }, (Multiset<E> multiset) -> copyFromEntries(multiset.entrySet())); }
private static List<Map.Entry<String, MetricStatsInt>> fiveBiggest(DistributedMetricStatsInt distributedMetricStatsInt, ToIntFunction<MetricStatsInt> biggerCriteria) { Comparator<Map.Entry<String, MetricStatsInt>> comparator = Comparator.comparingInt(a -> biggerCriteria.applyAsInt(a.getValue())); return distributedMetricStatsInt.getForLabels() .entrySet() .stream() .sorted(comparator.reversed()) .limit(5) .collect(MoreCollectors.toList(5)); }
@Override public Integer apply(ENTITY entity) { final V value = field.get(entity); if (value == null) return null; else return mapper.applyAsInt(value); }
/** * Aggregate the current row with the provided key. Some implementations are thread-safe and * some are not. * * @param key key * * @return result that is ok if the row was aggregated, not ok if a resource limit was hit */ default AggregateResult aggregate(KeyType key) { Preconditions.checkNotNull(key, "key"); return aggregate(key, hashFunction().applyAsInt(key)); }
/** * Returns a {@code Collector} that accumulates elements into an {@code ImmutableSortedMultiset} * whose elements are the result of applying {@code elementFunction} to the inputs, with counts * equal to the result of applying {@code countFunction} to the inputs. * * <p>If the mapped elements contain duplicates (according to {@code comparator}), the first * occurrence in encounter order appears in the resulting multiset, with count equal to the sum of * the outputs of {@code countFunction.applyAsInt(t)} for each {@code t} mapped to that element. * * @since 22.0 */ public static <T, E> Collector<T, ?, ImmutableSortedMultiset<E>> toImmutableSortedMultiset( Comparator<? super E> comparator, Function<? super T, ? extends E> elementFunction, ToIntFunction<? super T> countFunction) { checkNotNull(comparator); checkNotNull(elementFunction); checkNotNull(countFunction); return Collector.of( () -> TreeMultiset.create(comparator), (multiset, t) -> multiset.add(checkNotNull(elementFunction.apply(t)), countFunction.applyAsInt(t)), (multiset1, multiset2) -> { multiset1.addAll(multiset2); return multiset1; }, (Multiset<E> multiset) -> copyOfSortedEntries(comparator, multiset.entrySet())); }
private V doGet(final Table<V> table, final int key) { final AtomicReferenceArray<V[]> array = table.array; final V[] row = array.get(key & (array.length() - 1)); if(row == RESIZED) { return doGet(table.resizeView, key); } if (row != null) for (V item : row) { if (key == indexer.applyAsInt(item)) { return item; } } return nonexistent(); }
public static int calculatePropertySize( InputEntity entity, ToIntFunction<Value[]> valueSizeCalculator ) { int size = 0; int propertyCount = entity.propertyCount(); if ( propertyCount > 0 ) { Value[] values = new Value[propertyCount]; for ( int i = 0; i < propertyCount; i++ ) { values[i] = ValueUtils.asValue( entity.propertyValue( i ) ); } size += valueSizeCalculator.applyAsInt( values ); } return size; } }
@Override public void serialize(T value, JsonGenerator generator, SerializerProvider provider) throws IOException { if (useTimestamp(provider)) { if (useNanoseconds(provider)) { generator.writeNumber(DecimalUtils.toBigDecimal( getEpochSeconds.applyAsLong(value), getNanoseconds.applyAsInt(value) )); return; } generator.writeNumber(getEpochMillis.applyAsLong(value)); return; } String str; if (_formatter != null) { str = _formatter.format(value);; } else if (defaultFormat != null) { str = defaultFormat.format(value);; } else { str = value.toString(); } generator.writeString(str); }
private void whenReceiveBroadcastOnMessage( final int msgTypeId, final MutableDirectBuffer buffer, final ToIntFunction<MutableDirectBuffer> filler) { doAnswer( (invocation) -> { final int length = filler.applyAsInt(buffer); conductor.driverListenerAdapter().onMessage(msgTypeId, buffer, 0, length); return 1; }) .when(mockToClientReceiver).receive(any(MessageHandler.class)); }
private ToIntFunction<Split> getSplitToBucket(Session session, PartitioningHandle partitioningHandle) { ConnectorNodePartitioningProvider partitioningProvider = partitioningProviders.get(partitioningHandle.getConnectorId().get()); checkArgument(partitioningProvider != null, "No partitioning provider for connector %s", partitioningHandle.getConnectorId().get()); ToIntFunction<ConnectorSplit> splitBucketFunction = partitioningProvider.getSplitBucketFunction( partitioningHandle.getTransactionHandle().orElse(null), session.toConnectorSession(), partitioningHandle.getConnectorHandle()); checkArgument(splitBucketFunction != null, "No partitioning %s", partitioningHandle); return split -> { int bucket; if (split.getConnectorSplit() instanceof EmptySplit) { bucket = split.getLifespan().isTaskWide() ? 0 : split.getLifespan().getId(); } else { bucket = splitBucketFunction.applyAsInt(split.getConnectorSplit()); } if (!split.getLifespan().isTaskWide()) { checkArgument(split.getLifespan().getId() == bucket); } return bucket; }; }