@Override public FilterResolverIntf getFilterResolverIntf(String uniqueId) { FilterResolverIntf leftExp = left.getFilterResolverIntf(uniqueId); FilterResolverIntf rightExp = right.getFilterResolverIntf(uniqueId); if (leftExp != null) { return leftExp; } else if (rightExp != null) { return rightExp; } return null; }
@Override public FilterResolverIntf getFilterResolverIntf(String uniqueId) { FilterResolverIntf leftExp = left.getFilterResolverIntf(uniqueId); FilterResolverIntf rightExp = right.getFilterResolverIntf(uniqueId); if (leftExp != null) { return leftExp; } else if (rightExp != null) { return rightExp; } return null; }
@Override public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { distributable = (DataMapDistributableWrapper) inputSplit; // clear the segmentMap and from cache in executor when there are invalid segments if (invalidSegments.size() > 0) { DataMapStoreManager.getInstance().clearInvalidSegments(table, invalidSegments); } TableDataMap tableDataMap = DataMapStoreManager.getInstance() .getDataMap(table, distributable.getDistributable().getDataMapSchema()); if (isJobToClearDataMaps) { // if job is to clear datamaps just clear datamaps from cache and return DataMapStoreManager.getInstance() .clearDataMaps(table.getCarbonTableIdentifier().getTableUniqueName()); // clear the segment properties cache from executor SegmentPropertiesAndSchemaHolder.getInstance() .invalidate(table.getAbsoluteTableIdentifier()); blockletIterator = Collections.emptyIterator(); return; } dataMaps = tableDataMap.getTableDataMaps(distributable.getDistributable()); List<ExtendedBlocklet> blocklets = tableDataMap .prune(dataMaps, distributable.getDistributable(), dataMapExprWrapper.getFilterResolverIntf(distributable.getUniqueId()), partitions); for (ExtendedBlocklet blocklet : blocklets) { blocklet.setDataMapUniqueId(distributable.getUniqueId()); } blockletIterator = blocklets.iterator(); }