public LongLimitAction(long maxSize) { super(s -> s.limit(maxSize), LongStream.class, LIMIT); this.limit = maxSize; }
long getNumAtOrBelow(long val) { return Arrays.stream(counts).mapToLong(c -> c.sum()).limit(getIndex(val) + 1).sum(); }
/** * Generates pseudorandom discrete distribution. * * @param numOfValues Number of distinct values of pseudorandom variable. * @param seed Seed. * @return Probabilities array. */ public static double[] randomDistribution(int numOfValues, long seed) { A.ensure(numOfValues > 0, "numberOfValues > 0"); Random random = new Random(seed); long[] rnd = IntStream.range(0, numOfValues) .mapToLong(i -> random.nextInt(Integer.MAX_VALUE)) .limit(numOfValues) .toArray(); long sum = Arrays.stream(rnd).sum(); double[] res = new double[numOfValues]; for (int i = 0; i < res.length; i++) res[i] = rnd[i] / Math.max(1.0, sum); return res; }
@Override public LongStream limit(long maxSize) { return wrap(stream().limit(maxSize)); }
@Override @SuppressWarnings("unchecked") public TS build(boolean parallel) { final TS built = previous().build(parallel); if (built instanceof Stream<?>) { return (TS) ((Stream<T>) built).limit(limit); } else if (built instanceof IntStream) { return (TS) ((IntStream) built).limit(limit); } else if (built instanceof LongStream) { return (TS) ((LongStream) built).limit(limit); } else if (built instanceof DoubleStream) { return (TS) ((DoubleStream) built).limit(limit); } else { throw new UnsupportedOperationException( "Built stream did not match any known stream interface." ); } } }
long[] randomLongsGet = new Random(0x123456789ABCDL).longs().limit(this.mapSizeDividedBy16000).toArray();
Map<Long, BlobValue> map = random.longs().limit(JOB_SIZE).collect(HashMap::new, (hashMap, x) -> hashMap.put(x, new BlobValue()), HashMap::putAll); futures.add(executorService.submit(() -> { cache.putAll(map);
futures.add(executorService.submit(() -> random.longs().limit(JOB_SIZE).forEach(x -> { cache.put(x, new BlobValue()); universalSet.add(x);
@Ignore("This is currently unstable as if the clear does not complete before the failover," + "there is no future operation that will trigger the code in ClusterTierActiveEntity.invokeServerStoreOperation" + "dealing with in-flight invalidation reconstructed from reconnect data") @Test(timeout=180000) public void testClear() throws Exception { List<Future<?>> futures = new ArrayList<>(); Set<Long> universalSet = ConcurrentHashMap.newKeySet(); caches.forEach(cache -> { for (int i = 0; i < NUM_OF_THREADS; i++) { Map<Long, BlobValue> map = random.longs().limit(JOB_SIZE).collect(HashMap::new, (hashMap, x) -> hashMap.put(x, new BlobValue()), HashMap::putAll); futures.add(executorService.submit(() -> { cache.putAll(map); universalSet.addAll(map.keySet()); })); } }); drainTasks(futures); universalSet.forEach(x -> { CACHE1.get(x); CACHE2.get(x); }); Future<?> clearFuture = executorService.submit(() -> CACHE1.clear()); CLUSTER.getClusterControl().terminateActive(); clearFuture.get(); universalSet.forEach(x -> assertThat(CACHE2.get(x), nullValue())); }
@Deprecated //moved to cyclops.companion.Functions public static Function<? super ReactiveSeq<Long>, ? extends ReactiveSeq<Long>> limitLongs(long maxSize){ return a->a.longs(i->i,s->s.limit(maxSize)); } /*
public static Function<? super ReactiveSeq<Long>, ? extends ReactiveSeq<Long>> limitLongs(long maxSize){ return a->a.longs(i->i,s->s.limit(maxSize)); } /*
public static void main(String[] args) { long[] arrayOfLong = new long[20000]; Arrays.parallelSetAll(arrayOfLong, index -> ThreadLocalRandom.current().nextInt(1000000)); Arrays.stream(arrayOfLong).limit(10).forEach( i -> System.out.print(i + " ")); System.out.println(); Arrays.parallelSort(arrayOfLong); Arrays.stream(arrayOfLong).limit(10).forEach( i -> System.out.print(i + " ")); System.out.println(); } }
private LongList recommendWithPredicate(int n, LongPredicate filter) { LongList items = statistics.getItemsByPopularity(); LongList list = new LongArrayList(items.size()); LongStream str = IntStream.range(0, items.size()).mapToLong(items::getLong); if (filter != null) { str = str.filter(filter); } if (n > 0) { str = str.limit(n); } str.forEachOrdered(list::add); return list; }
@Override public ExLongStream limit(long maxSize) { return ExLongStream.of(stream.limit(maxSize)); }
/** * Returns a stream of {@code long}s: * {@code for (long i = start; i <= end; i += step) yield i;} */ private static LongStream range(long start, long end, long step) { return start > end ? LongStream.empty() : LongStream.iterate(start, n -> n + step).limit(1 + (end - start) / step); }
static long[] fillCounted(int size) { return LongStream.iterate(0, i -> i + 1) .limit(size).toArray(); } public static void main(String[] args) {
public LongStream asLongRange(){ long start = ((Number)t3.v1()).longValue(); long end = ((Number)t3.v2()).longValue(); long step = ((Number)t3.v3()).longValue(); return LongStream.iterate(start, i -> i + step) .limit((end-start)/step); } }
@Test(timeout = 1000) public void testMinMax() { runTestMinMax(LongStream.empty()); runTestMinMax(LongStream.iterate(0, n -> n).limit(10)); for(int count = 1; count < 10; count++) { runTestMinMax(LongStream.iterate(1, n -> n + 1).limit(count)); } for(int count = 1; count < 10; count++) { runTestMinMax(LongStream.iterate(0, _dummy -> ThreadLocalRandom.current().nextLong()).limit(count)); } }
@Test public void constantRandomAccessibleInterval() { final int nDim = 5; final Random rng = new Random( 100 ); final long[] dims = LongStream.generate( () -> rng.nextInt( 5 ) + 1 ).limit( nDim ).toArray(); final IntType constVal = new IntType( 123 ); final RandomAccessibleInterval< IntType > randomAccessibleInterval = ConstantUtils.constantRandomAccessibleInterval( constVal, new FinalInterval( dims ) ); Assert.assertArrayEquals( dims, Intervals.dimensionsAsLongArray( randomAccessibleInterval ) ); Assert.assertArrayEquals( new long[ nDim ], Intervals.minAsLongArray( randomAccessibleInterval ) ); Views.iterable( randomAccessibleInterval ).forEach( p -> Assert.assertTrue( constVal.valueEquals( constVal ) ) ); }