private ListSinkOperator completed(ParseContext pctx, FetchWork work) { for (ReadEntity input : inputs) { PlanUtils.addInput(pctx.getSemanticInputs(), input); } return replaceFSwithLS(fileSink, work.getSerializationNullFormat()); }
private ListSinkOperator completed(ParseContext pctx, FetchWork work) { for (ReadEntity input : inputs) { PlanUtils.addInput(pctx.getSemanticInputs(), input); } return replaceFSwithLS(fileSink, work.getSerializationNullFormat()); }
private ListSinkOperator completed(ParseContext pctx, FetchWork work) { for (ReadEntity input : inputs) { PlanUtils.addInput(pctx.getSemanticInputs(), input); } return replaceFSwithLS(fileSink, work.getSerializationNullFormat()); }
@Override public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { super.initialize(conf, queryPlan, ctx); try { // Create a file system handle JobConf job = new JobConf(conf, ExecDriver.class); String serdeName = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE); Class<? extends SerDe> serdeClass = Class.forName(serdeName, true, JavaUtils.getClassLoader()).asSubclass(SerDe.class); // cast only needed for Hadoop 0.17 compatibility mSerde = (SerDe) ReflectionUtils.newInstance(serdeClass, null); Properties serdeProp = new Properties(); // this is the default serialization format if (mSerde instanceof DelimitedJSONSerDe) { serdeProp.put(Constants.SERIALIZATION_FORMAT, "" + Utilities.tabCode); serdeProp.put(Constants.SERIALIZATION_NULL_FORMAT, work.getSerializationNullFormat()); } mSerde.initialize(job, serdeProp); ftOp = new FetchOperator(work, job); } catch (Exception e) { // Bail out ungracefully - we should never hit // this here - but would have hit it in SemanticAnalyzer LOG.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } }