@Override public void initialize(CompilationOpContext opContext) { try { fWork.initializeForFetch(opContext); JobConf job = new JobConf(conf); ftOp = new FetchOperator(fWork, job); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, ctx, opContext); work.initializeForFetch(opContext); try { JobConf job = new JobConf(conf); ftOp = new FetchOperator(work.getfWork(), job); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } }
public void setupContext(List<Path> paths) throws HiveException { int segmentLen = paths.size(); FetchOperator.setFetchOperatorContext(jobConf, fetchWork.getPartDir()); FetchOperator[] segments = segmentsForSize(segmentLen); for (int i = 0 ; i < segmentLen; i++) { Path path = paths.get(i); if (segments[i] == null) { segments[i] = new FetchOperator(fetchWork, new JobConf(jobConf)); } segments[i].setupContext(Arrays.asList(path)); } initialize(segmentLen); for (int i = 0; i < segmentLen; i++) { if (nextHive(i)) { put(i); } } counter = 0; }
public void setupContext(List<Path> paths) throws HiveException { int segmentLen = paths.size(); FetchOperator.setFetchOperatorContext(jobConf, fetchWork.getPartDir()); FetchOperator[] segments = segmentsForSize(segmentLen); for (int i = 0 ; i < segmentLen; i++) { Path path = paths.get(i); if (segments[i] == null) { segments[i] = new FetchOperator(fetchWork, new JobConf(jobConf)); } segments[i].setupContext(Arrays.asList(path)); } initialize(segmentLen); for (int i = 0; i < segmentLen; i++) { if (nextHive(i)) { put(i); } } counter = 0; }
FetchOperator fetchOp = new FetchOperator(entry.getValue(), jobClone); fetchOpJobConfMap.put(fetchOp, jobClone); fetchOperators.put(entry.getKey(), fetchOp);
FetchOperator fetchOp = new FetchOperator(entry.getValue(), jobClone); fetchOpJobConfMap.put(fetchOp, jobClone); fetchOperators.put(entry.getKey(), fetchOp);
FetchOperator fetchOp = new FetchOperator(fetchWork, jobClone); ts.initialize(jobClone, new ObjectInspector[]{fetchOp.getOutputObjectInspector()}); fetchOp.clearFetchContext();
FetchOperator fetchOp = new FetchOperator(fetchWork, jobClone); ts.initialize(jobClone, new ObjectInspector[]{fetchOp.getOutputObjectInspector()}); fetchOp.clearFetchContext();
fetch = new FetchOperator(work, job, source, getVirtualColumns(source)); source.initialize(conf, new ObjectInspector[]{fetch.getOutputObjectInspector()}); totalRows = 0;
fetch = new FetchOperator(work, job, source, getVirtualColumns(source)); source.initialize(conf, new ObjectInspector[]{fetch.getOutputObjectInspector()}); totalRows = 0;
@Override public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { super.initialize(conf, queryPlan, ctx); work.initializeForFetch(); try { JobConf job = new JobConf(conf); ftOp = new FetchOperator(work.getfWork(), job); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } }
@Override public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { super.initialize(conf, queryPlan, ctx); try { // Create a file system handle JobConf job = new JobConf(conf, ExecDriver.class); String serdeName = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE); Class<? extends SerDe> serdeClass = Class.forName(serdeName, true, JavaUtils.getClassLoader()).asSubclass(SerDe.class); // cast only needed for Hadoop 0.17 compatibility mSerde = (SerDe) ReflectionUtils.newInstance(serdeClass, null); Properties serdeProp = new Properties(); // this is the default serialization format if (mSerde instanceof DelimitedJSONSerDe) { serdeProp.put(Constants.SERIALIZATION_FORMAT, "" + Utilities.tabCode); serdeProp.put(Constants.SERIALIZATION_NULL_FORMAT, work.getSerializationNullFormat()); } mSerde.initialize(job, serdeProp); ftOp = new FetchOperator(work, job); } catch (Exception e) { // Bail out ungracefully - we should never hit // this here - but would have hit it in SemanticAnalyzer LOG.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } }
public void setupContext(List<Path> paths) throws HiveException { int segmentLen = paths.size(); FetchOperator.setFetchOperatorContext(jobConf, fetchWork.getPartDir()); FetchOperator[] segments = segmentsForSize(segmentLen); for (int i = 0 ; i < segmentLen; i++) { Path path = paths.get(i); if (segments[i] == null) { segments[i] = new FetchOperator(fetchWork, new JobConf(jobConf)); } segments[i].setupContext(Arrays.asList(path)); } initialize(segmentLen); for (int i = 0; i < segmentLen; i++) { if (nextHive(i)) { put(i); } } counter = 0; }
FetchOperator fetchOp = new FetchOperator(entry.getValue(), jobClone); fetchOpJobConfMap.put(fetchOp, jobClone); fetchOperators.put(entry.getKey(), fetchOp);
ColumnProjectionUtils.setFullyReadColumns(jobClone); FetchOperator fetchOp = new FetchOperator(entry.getValue(),jobClone); fetchOpJobConfMap.put(fetchOp, jobClone); fetchOperators.put(entry.getKey(), fetchOp);
FetchOperator fetchOp = new FetchOperator(entry.getValue(), jobClone); fetchOpJobConfMap.put(fetchOp, jobClone); fetchOperators.put(entry.getKey(), fetchOp);
FetchOperator fetchOp = new FetchOperator(fetchWork, jobClone); ts.initialize(jobClone, new ObjectInspector[]{fetchOp.getOutputObjectInspector()}); fetchOp.clearFetchContext();
@Override public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { super.initialize(conf, queryPlan, ctx); work.initializeForFetch(); try { // Create a file system handle JobConf job = new JobConf(conf); Operator<?> source = work.getSource(); if (source instanceof TableScanOperator) { TableScanOperator ts = (TableScanOperator) source; // push down projections ColumnProjectionUtils.appendReadColumns( job, ts.getNeededColumnIDs(), ts.getNeededColumns()); // push down filters HiveInputFormat.pushFilters(job, ts); } sink = work.getSink(); fetch = new FetchOperator(work, job, source, getVirtualColumns(source)); source.initialize(conf, new ObjectInspector[]{fetch.getOutputObjectInspector()}); totalRows = 0; ExecMapper.setDone(false); } catch (Exception e) { // Bail out ungracefully - we should never hit // this here - but would have hit it in SemanticAnalyzer LOG.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } }