@Override
public ProducerOperator create(FragmentExecutionContext fragmentExecContext, final OperatorContext context, EasySubScan config) throws ExecutionSetupException {
final FileSystemPlugin plugin = fragmentExecContext.getStoragePlugin(config.getPluginId());
final FileSystemWrapper fs = plugin.getFs(config.getUserName(), context.getStats());
final FormatPluginConfig formatConfig = PhysicalDatasetUtils.toFormatPlugin(config.getFileConfig(), Collections.<String>emptyList());
final EasyFormatPlugin<?> formatPlugin = (EasyFormatPlugin<?>) plugin.getFormatPlugin(formatConfig);
FluentIterable<SplitAndExtended> unorderedWork = FluentIterable.from(config.getSplits())
.transform(new Function<DatasetSplit, SplitAndExtended>() {
@Override
public SplitAndExtended apply(DatasetSplit split) {
return new SplitAndExtended(split);
}
});
final boolean sortReaders = context.getOptions().getOption(ExecConstants.SORT_FILE_BLOCKS);
final List<SplitAndExtended> workList = sortReaders ? unorderedWork.toSortedList(SPLIT_COMPARATOR) : unorderedWork.toList();
final boolean selectAllColumns = selectsAllColumns(config.getSchema(), config.getColumns());
final CompositeReaderConfig readerConfig = CompositeReaderConfig.getCompound(config.getSchema(), config.getColumns(), config.getPartitionColumns());
final List<SchemaPath> innerFields = selectAllColumns ? ImmutableList.of(ColumnUtils.STAR_COLUMN) : readerConfig.getInnerColumns();
FluentIterable<RecordReader> readers = FluentIterable.from(workList).transform(new Function<SplitAndExtended, RecordReader>() {
@Override
public RecordReader apply(SplitAndExtended input) {
try {
RecordReader inner = formatPlugin.getRecordReader(context, fs, input.getExtended(), innerFields);
return readerConfig.wrapIfNecessary(context.getAllocator(), inner, input.getSplit());
} catch (ExecutionSetupException e) {
throw new RuntimeException(e);
}
}});
return new ScanOperator(fragmentExecContext.getSchemaUpdater(), config, context, readers.iterator());
}