public IntLimitAction(long maxSize) { super(s -> s.limit(maxSize), IntStream.class, LIMIT); this.limit = maxSize; }
public static <T> List<T> reverse_IntStream(List<T> list) { if (list == null) { throw new IllegalArgumentException("list can't be null"); } int size = list.size(); return IntStream.iterate(size - 1, el -> el - 1).limit(size).mapToObj(list::get).collect(toList()); }
/** * Chunks an array into smaller arrays of a specified size. * * @param numbers Input array of numbers * @param size The chunk size * @return Smaller chunks */ public static int[][] chunk(int[] numbers, int size) { return IntStream.iterate(0, i -> i + size) .limit((long) Math.ceil((double) numbers.length / size)) .mapToObj(cur -> Arrays.copyOfRange(numbers, cur, cur + size > numbers.length ? numbers.length : cur + size)) .toArray(int[][]::new); }
static IntStream repeat(int item, int times) { return IntStream.generate(() -> item).limit(times); }
public static int[] initializeArrayWithValues(int n, int value) { return IntStream.generate(() -> value).limit(n).toArray(); }
public static String[] chop(String input, int step) { if (input == null || input.length() == 0) { return EMPTY_ARRAY; } if (step == 0) { return new String[]{input}; } int strLength = input.length(); int iterations = strLength % step == 0 ? strLength / step : strLength / step + 1; return IntStream.iterate(0, i -> i + step) .limit(iterations) .mapToObj(i -> input.substring(i, (i + step) < strLength ? i + step : strLength)) .toArray(String[]::new); }
/** * Find last index of element in the array. Return -1 in case element does not exist. * <p> * Uses IntStream.iterate().limit().filter() to find index of the element in the array. * * @param elements input array * @param el element to find * @return index of the element */ public static int lastIndexOf(int[] elements, int el) { return IntStream.iterate(elements.length - 1, i -> i - 1) .limit(elements.length) .filter(idx -> elements[idx] == el) .findFirst() .orElse(-1); }
@Override public IntStream limit(long maxSize) { return wrap(stream().limit(maxSize)); }
public Tensor(int startOffset, float[] elements, int[] globalDimensions, int[][] globalDimensionsLimit) { super(); if (globalDimensions == null || globalDimensions.length == 0) { throw new IllegalArgumentException("Please provide dimensions"); } this.startOffset = startOffset; this.elements = elements; this.globalDimensions = globalDimensions; this.globalDimensionsLimit = globalDimensionsLimit; this.dimTmp = new int[globalDimensions.length]; this.dimensions = new int[(int) IntStream.range(0, globalDimensions.length) .filter(i -> globalDimensionsLimit[0][i] != globalDimensionsLimit[1][i] || globalDimensionsLimit[1][i] - globalDimensionsLimit[0][i] + 1 == globalDimensions[i]).count()]; for (int i = 0, j = 0; i < globalDimensions.length; i++) { if (globalDimensionsLimit[0][i] != globalDimensionsLimit[1][i] || globalDimensionsLimit[1][i] - globalDimensionsLimit[0][i] + 1 == globalDimensions[i]) { dimensions[j++] = globalDimensionsLimit[1][i] - globalDimensionsLimit[0][i] + 1; } } this.dimMultiplicators = new int[dimensions.length]; IntStream.range(0, dimMultiplicators.length).forEach(i -> { dimMultiplicators[i] = 1; Arrays.stream(globalDimensions).skip(i + 1).limit(globalDimensions.length).forEach(j -> dimMultiplicators[i] *= j); }); size = IntStream.range(0, dimensions.length).map(i -> dimensions[i]).reduce(1, (a, b) -> a * b); }
@Override @SuppressWarnings("unchecked") public TS build(boolean parallel) { final TS built = previous().build(parallel); if (built instanceof Stream<?>) { return (TS) ((Stream<T>) built).limit(limit); } else if (built instanceof IntStream) { return (TS) ((IntStream) built).limit(limit); } else if (built instanceof LongStream) { return (TS) ((LongStream) built).limit(limit); } else if (built instanceof DoubleStream) { return (TS) ((DoubleStream) built).limit(limit); } else { throw new UnsupportedOperationException( "Built stream did not match any known stream interface." ); } } }
this.randomIntegersForMap = new Random(0x123456789ABCDL).ints().limit((long) (this.mapSizeDividedBy16000 * 64)).toArray();
case ".*x.*": pattern = Slices.utf8Slice(".*x.*"); IntStream.generate(() -> 97).limit(sourceLength).forEach(sliceOutput::appendByte); break; case ".*(x|y).*": pattern = Slices.utf8Slice(".*(x|y).*"); IntStream.generate(() -> 97).limit(sourceLength).forEach(sliceOutput::appendByte); break; case "longdotstar": pattern = Slices.utf8Slice(".*coolfunctionname.*"); ThreadLocalRandom.current().ints(97, 123).limit(sourceLength).forEach(sliceOutput::appendByte); break; case "phone": pattern = Slices.utf8Slice("\\d{3}/\\d{3}/\\d{4}"); ThreadLocalRandom.current().ints(47, 58).limit(sourceLength).forEach(sliceOutput::appendByte); break; case "literal": pattern = Slices.utf8Slice("literal"); ThreadLocalRandom.current().ints(97, 123).limit(sourceLength).forEach(sliceOutput::appendByte); break; default:
.ints(0, 1000) .distinct() .limit(numberOfUsers) .toArray();
@Test public void testLoadExistingRecoversInflightInvalidationsForEventualCache() throws Exception { ClusterTierActiveEntity activeEntity = new ClusterTierActiveEntity(defaultRegistry, defaultConfiguration, DEFAULT_MAPPER); EhcacheStateServiceImpl ehcacheStateService = defaultRegistry.getStoreManagerService(); ehcacheStateService.createStore(defaultStoreName, defaultStoreConfiguration, false); //Passive would have done this before failover InvalidationTracker invalidationTracker = ehcacheStateService.getInvalidationTracker(defaultStoreName); Random random = new Random(); random.ints(0, 100).limit(10).forEach(invalidationTracker::trackHashInvalidation); activeEntity.loadExisting(); assertThat(activeEntity.getInflightInvalidations().isEmpty(), is(false)); }
/** * Test with large user attributes on random nodes. * Also tests that big messages (more than 1MB) properly separated and processed by zk. * * @throws Exception If failed. */ @Test public void testLargeUserAttribute3() throws Exception { Set<Integer> idxs = ThreadLocalRandom.current() .ints(0, 10) .distinct() .limit(3) .boxed() .collect(Collectors.toSet()); for (int i = 0; i < 10; i++) { info("Iteration: " + i); if (idxs.contains(i)) initLargeAttribute(); else userAttrs = null; helper.clientMode(i > 5); startGrid(i); } waitForTopology(10); }
@Test public void testMassive() { BlockBuilder inputBlockBuilder = BIGINT.createBlockBuilder(null, 5000); TypedHistogram typedHistogram = new SingleTypedHistogram(BIGINT, 1000); IntStream.range(1, 2000) .flatMap(i -> IntStream.iterate(i, IntUnaryOperator.identity()).limit(i)) .forEach(j -> BIGINT.writeLong(inputBlockBuilder, j)); Block inputBlock = inputBlockBuilder.build(); for (int i = 0; i < inputBlock.getPositionCount(); i++) { typedHistogram.add(i, inputBlock, 1); } MapType mapType = mapType(BIGINT, BIGINT); BlockBuilder out = mapType.createBlockBuilder(null, 1); typedHistogram.serialize(out); Block outputBlock = mapType.getObject(out, 0); for (int i = 0; i < outputBlock.getPositionCount(); i += 2) { assertEquals(BIGINT.getLong(outputBlock, i + 1), BIGINT.getLong(outputBlock, i)); } } }
int[] randomNumbers = random.ints().limit(10007).map(Math::abs).toArray(); int nextRandom = 0;
int[] randomNumbers = random.ints().limit(10007).map(Math::abs).toArray(); int nextRandom = 0;
@Deprecated //moved to cyclops.companion.Functions public static Function<? super ReactiveSeq<Integer>, ? extends ReactiveSeq<Integer>> limitInts(long maxSize){ return a->a.ints(i->i,s->s.limit(maxSize)); } /*