.create(intervals) .transformCat( new Function<Interval, Iterable<TimelineObjectHolder<String, Sink>>>() .transformCat( new Function<TimelineObjectHolder<String, Sink>, Iterable<SegmentDescriptor>>()
/** * Convert a list of DimFilters to a list of Filters. * * @param dimFilters list of DimFilters, should all be non-null * * @return list of Filters */ public static List<Filter> toFilters(List<DimFilter> dimFilters) { return ImmutableList.copyOf( FunctionalIterable .create(dimFilters) .transform( new Function<DimFilter, Filter>() { @Override public Filter apply(DimFilter input) { return input.toFilter(); } } ) ); }
public static <T> FunctionalIterable<T> create(Iterable<T> delegate) { return new FunctionalIterable<>(delegate); }
private Iterable<BytesMessageWithOffset> filterAndDecode(Iterable<MessageAndOffset> kafkaMessages, final long offset) { return FunctionalIterable .create(kafkaMessages) .filter( new Predicate<MessageAndOffset>() { @Override public boolean apply(MessageAndOffset msgAndOffset) { return msgAndOffset.offset() >= offset; } } ) .transform( new Function<MessageAndOffset, BytesMessageWithOffset>() { @Override public BytesMessageWithOffset apply(MessageAndOffset msgAndOffset) { ByteBuffer bb = msgAndOffset.message().payload(); byte[] payload = new byte[bb.remaining()]; bb.get(payload); // add nextOffset here, thus next fetch will use nextOffset instead of current offset return new BytesMessageWithOffset(payload, msgAndOffset.nextOffset(), partitionId); } } ); }
.create(queryPlus.getQuery().getIntervals()) .transformCat( new Function<Interval, Iterable<Interval>>() FunctionalIterable.create(chunkIntervals).transform( new Function<Interval, Sequence<T>>()
.create(segmentIds) .trinaryTransform( intervals, results,
@Test public void testKeep() { Assert.assertEquals( Lists.newArrayList( FunctionalIterable.create(Arrays.asList("1", "2", "3")) .keep( new Function<String, Integer>() { @Override public Integer apply(String input) { if ("2".equals(input)) { return null; } return Integer.parseInt(input); } } ) ), Arrays.asList(1, 3) ); }
private static Sequence<Result<SearchResultValue>> makeReturnResult( Segment segment, int limit, Object2IntRBTreeMap<SearchHit> retVal ) { Iterable<SearchHit> source = Iterables.transform( retVal.object2IntEntrySet(), new Function<Object2IntMap.Entry<SearchHit>, SearchHit>() { @Override public SearchHit apply(Object2IntMap.Entry<SearchHit> input) { SearchHit hit = input.getKey(); return new SearchHit(hit.getDimension(), hit.getValue(), input.getIntValue()); } } ); return Sequences.simple( ImmutableList.of( new Result<>( segment.getDataInterval().getStart(), new SearchResultValue( Lists.newArrayList(new FunctionalIterable<>(source).limit(limit)) ) ) ) ); } }
.create(queryPlus.getQuery().getIntervals()) .transformCat( new Function<Interval, Iterable<Interval>>() FunctionalIterable.create(chunkIntervals).transform( new Function<Interval, Sequence<T>>()
.create(segmentIds) .trinaryTransform( intervals, results,
private static Sequence<Result<SearchResultValue>> makeReturnResult( Segment segment, int limit, Object2IntRBTreeMap<SearchHit> retVal ) { Iterable<SearchHit> source = Iterables.transform( retVal.object2IntEntrySet(), new Function<Object2IntMap.Entry<SearchHit>, SearchHit>() { @Override public SearchHit apply(Object2IntMap.Entry<SearchHit> input) { SearchHit hit = input.getKey(); return new SearchHit(hit.getDimension(), hit.getValue(), input.getIntValue()); } } ); return Sequences.simple( ImmutableList.of( new Result<>( segment.getDataInterval().getStart(), new SearchResultValue( Lists.newArrayList(new FunctionalIterable<>(source).limit(limit)) ) ) ) ); } }
public List<DataSegment> getSegments() { return ImmutableSet.copyOf( FunctionalIterable.create(timelineObjects).transformCat( new Function<Pair<TimelineObjectHolder<String, DataSegment>, Interval>, Iterable<DataSegment>>() { @Override public Iterable<DataSegment> apply(Pair<TimelineObjectHolder<String, DataSegment>, Interval> input) { return Iterables.transform( input.lhs.getObject(), new Function<PartitionChunk<DataSegment>, DataSegment>() { @Override public DataSegment apply(PartitionChunk<DataSegment> input) { return input.getObject(); } } ); } } ) ).asList(); }
@Override public Iterable<Bucket> apply(Interval input) { final DateTime bucketTime = input.getStart(); final List<HadoopyShardSpec> specs = schema.getTuningConfig().getShardSpecs().get(bucketTime.getMillis()); if (specs == null) { return ImmutableList.of(); } return FunctionalIterable .create(specs) .transform( new Function<HadoopyShardSpec, Bucket>() { int i = 0; @Override public Bucket apply(HadoopyShardSpec input) { return new Bucket(input.getShardNum(), bucketTime, i++); } } ); } }
.create(segmentIds) .trinaryTransform( intervals, results,
public <RetType> FunctionalIterable<RetType> transform(Function<T, RetType> fn) { return new FunctionalIterable<>(Iterables.transform(delegate, fn)); }
return Optional.of( (Iterable<Bucket>) FunctionalIterable .create(intervals.get()) .transformCat( new Function<Interval, Iterable<Bucket>>()
.create(segmentIds) .trinaryTransform( intervals, results,