public static Pair<Integer, Integer> getDefaultBatchSizeAndLimit(long maxMemory) { long memoryLimit = maxMemory / 10; long batchSize = 5 * 1024 * 1024; long queueLimit = 50; if (batchSize * queueLimit > memoryLimit) { queueLimit = memoryLimit / batchSize; } // make room for at least two queue items if (queueLimit < 2) { queueLimit = 2; batchSize = memoryLimit / queueLimit; } return new Pair<>((int) batchSize, (int) queueLimit); }
@Override public Pair<String, Integer> getSubjectAndId(ByteBuffer payload) { return new Pair<>(topic, payload.getInt()); }
public void addUpdate(String updatedItemName, byte[] updatedItemData) { updateQueue.add( new Pair<>(updatedItemName, updatedItemData) ); }
public void put(K key, @Nullable V value, int size) { final int totalSize = size + 48; // add approximate object overhead numBytes += totalSize; super.put(key, new Pair<>(totalSize, value)); }
public static <T1, T2> Pair<T1, T2> of(@Nullable T1 lhs, @Nullable T2 rhs) { return new Pair<>(lhs, rhs); }
public static <T> Pair<Queue, Accumulator<Queue, T>> createBySegmentAccumulatorPair() { // In parallel query runner multiple threads add to this queue concurrently Queue init = new ConcurrentLinkedQueue<>(); Accumulator<Queue, T> accumulator = new Accumulator<Queue, T>() { @Override public Queue accumulate(Queue accumulated, T in) { if (in == null) { throw new ISE("Cannot have null result"); } accumulated.offer(in); return accumulated; } }; return new Pair<>(init, accumulator); }
@Override public boolean registerSegmentHandoffCallback(SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable) { log.info("Adding SegmentHandoffCallback for dataSource[%s] Segment[%s]", dataSource, descriptor); Pair<Executor, Runnable> prev = handOffCallbacks.putIfAbsent( descriptor, new Pair<>(exec, handOffRunnable) ); return prev == null; }
private Pair<Map<Interval, String>, Map<Interval, List<DataSegment>>> getVersionAndBaseSegments( List<DataSegment> snapshot ) { Map<Interval, String> versions = new HashMap<>(); Map<Interval, List<DataSegment>> segments = new HashMap<>(); for (DataSegment segment : snapshot) { Interval interval = segment.getInterval(); versions.put(interval, segment.getVersion()); segments.putIfAbsent(interval, new ArrayList<>()); segments.get(interval).add(segment); } return new Pair<>(versions, segments); }
private List<Pair<Long, Long>> makeIntervalLongs() { List<Pair<Long, Long>> intervalLongs = new ArrayList<>(); for (Interval interval : intervals) { intervalLongs.add(new Pair<Long, Long>(interval.getStartMillis(), interval.getEndMillis())); } return intervalLongs; }
public Pair<String, Server> getDefaultLookup() { final String brokerServiceName = tierConfig.getDefaultBrokerServiceName(); return new Pair<>(brokerServiceName, servers.get(brokerServiceName).pick()); }
public Pair<Segment, Closeable> getAndIncrementSegment() { ReferenceCountingSegment segment = getIncrementedSegment(); return new Pair<>(segment, segment.decrementOnceCloseable()); }
public static Pair<List<AggregatorFactory>, List<PostAggregator>> condensedAggregators( List<AggregatorFactory> aggList, List<PostAggregator> postAggList, String metric ) { List<PostAggregator> condensedPostAggs = AggregatorUtil.pruneDependentPostAgg(postAggList, metric); // calculate dependent aggregators for these postAgg Set<String> dependencySet = new HashSet<>(); dependencySet.add(metric); for (PostAggregator postAggregator : condensedPostAggs) { dependencySet.addAll(postAggregator.getDependentFields()); } List<AggregatorFactory> condensedAggs = new ArrayList<>(); for (AggregatorFactory aggregatorSpec : aggList) { if (dependencySet.contains(aggregatorSpec.getName())) { condensedAggs.add(aggregatorSpec); } } return new Pair<>(condensedAggs, condensedPostAggs); }
@Nullable private Pair<DataSegment, Set<String>> getServersWhereSegmentIsServed(SegmentId segmentId) { DataSegment theSegment = null; Set<String> servers = new HashSet<>(); for (DruidServer druidServer : serverInventoryView.getInventory()) { DataSegment currSegment = druidServer.getSegment(segmentId); if (currSegment != null) { theSegment = currSegment; servers.add(druidServer.getHost()); } } if (theSegment == null) { return null; } return new Pair<>(theSegment, servers); }
@SuppressWarnings("ArgumentParameterSwap") private List<Pair<QueryableIndex, String>> getIndexes(int index1, int index2) { return Arrays.asList( new Pair<>( SchemalessIndexTest.getIncrementalIndex(index1, index2), StringUtils.format("Failed: II[%,d, %,d]", index1, index2) ), new Pair<>( SchemalessIndexTest.getIncrementalIndex(index2, index1), StringUtils.format("Failed: II[%,d, %,d]", index2, index1) ), new Pair<>( schemalessIndexTest.getMergedIncrementalIndex(index1, index2), StringUtils.format("Failed: MII[%,d, %,d]", index1, index2) ), new Pair<>( schemalessIndexTest.getMergedIncrementalIndex(index2, index1), StringUtils.format("Failed: MII[%,d, %,d]", index2, index1) ) ); }
@Test public void testDelete1() { final String dataSouce = "test"; final Interval interval = Intervals.of("2011-04-01/2011-04-02"); Future future = assertQueryable( Granularities.DAY, dataSouce, interval, ImmutableList.of( new Pair<String, Interval>("2", interval) ) ); waitForTestVerificationAndCleanup(future); dropQueryable(dataSouce, "2", interval); future = assertQueryable( Granularities.DAY, dataSouce, interval, ImmutableList.of( new Pair<String, Interval>("1", interval) ) ); waitForTestVerificationAndCleanup(future); }
@Test public void testSimpleGet() { Future future = assertQueryable( Granularities.DAY, "test", Intervals.of("P1d/2011-04-01"), ImmutableList.of( new Pair<String, Interval>("1", Intervals.of("P1d/2011-04-01")) ) ); waitForTestVerificationAndCleanup(future); future = assertQueryable( Granularities.DAY, "test", Intervals.of("P2d/2011-04-02"), ImmutableList.of( new Pair<String, Interval>("1", Intervals.of("P1d/2011-04-01")), new Pair<String, Interval>("2", Intervals.of("P1d/2011-04-02")) ) ); waitForTestVerificationAndCleanup(future); }
@Test public void testNullPostAggregatorNames() { AggregatorFactory agg1 = new DoubleSumAggregatorFactory("agg1", "value"); AggregatorFactory agg2 = new DoubleSumAggregatorFactory("agg2", "count"); PostAggregator postAgg1 = new ArithmeticPostAggregator( null, "*", Lists.newArrayList(new FieldAccessPostAggregator(null, "agg1"), new FieldAccessPostAggregator(null, "agg2")) ); PostAggregator postAgg2 = new ArithmeticPostAggregator( "postAgg", "/", Lists.newArrayList(new FieldAccessPostAggregator(null, "agg1"), new FieldAccessPostAggregator(null, "agg2")) ); Assert.assertEquals( new Pair<>(Lists.newArrayList(agg1, agg2), Collections.singletonList(postAgg2)), AggregatorUtil.condensedAggregators( Lists.newArrayList(agg1, agg2), Lists.newArrayList(postAgg1, postAgg2), "postAgg" ) ); }
@Test public void testCasing() { AggregatorFactory agg1 = new DoubleSumAggregatorFactory("Agg1", "value"); AggregatorFactory agg2 = new DoubleSumAggregatorFactory("Agg2", "count"); PostAggregator postAgg1 = new ArithmeticPostAggregator( null, "*", Lists.newArrayList(new FieldAccessPostAggregator(null, "Agg1"), new FieldAccessPostAggregator(null, "Agg2")) ); PostAggregator postAgg2 = new ArithmeticPostAggregator( "postAgg", "/", Lists.newArrayList(new FieldAccessPostAggregator(null, "Agg1"), new FieldAccessPostAggregator(null, "Agg2")) ); Assert.assertEquals( new Pair<>(Lists.newArrayList(agg1, agg2), Collections.singletonList(postAgg2)), AggregatorUtil.condensedAggregators( Lists.newArrayList(agg1, agg2), Lists.newArrayList(postAgg1, postAgg2), "postAgg" ) ); }