SparkConf sparkConf = null; try { String[] cmdArgs = splitArgs(conf.getScriptCmd()); new BufferedInputStream(scriptPid.getErrorStream())); scriptOutWriter = conf.getInRecordWriterClass().newInstance(); scriptOutWriter.initialize(scriptOut, hconf); RecordReader scriptOutputReader = conf.getOutRecordReaderClass() .newInstance(); scriptOutputReader.initialize(scriptIn, hconf, conf .getScriptOutputInfo().getProperties()); .getObjectInspector()), "OutputProcessor"); RecordReader scriptErrReader = conf.getErrRecordReaderClass() .newInstance(); scriptErrReader.initialize(scriptErr, hconf, conf.getScriptErrInfo() .getProperties());
@Override protected void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); firstRow = true; statsMap.put(Counter.DESERIALIZE_ERRORS.toString(), deserialize_error_count); statsMap.put(Counter.SERIALIZE_ERRORS.toString(), serialize_error_count); try { this.hconf = hconf; scriptOutputDeserializer = conf.getScriptOutputInfo() .getDeserializerClass().newInstance(); SerDeUtils.initializeSerDe(scriptOutputDeserializer, hconf, conf.getScriptOutputInfo().getProperties(), null); scriptInputSerializer = (Serializer) conf.getScriptInputInfo() .getDeserializerClass().newInstance(); scriptInputSerializer.initialize(hconf, conf.getScriptInputInfo() .getProperties()); outputObjInspector = scriptOutputDeserializer.getObjectInspector(); } catch (Exception e) { throw new HiveException(ErrorMsg.SCRIPT_INIT_ERROR.getErrorCodedMsg(), e); } }
@Override public boolean isSame(OperatorDesc other) { if (getClass().getName().equals(other.getClass().getName())) { ScriptDesc otherDesc = (ScriptDesc) other; return Objects.equals(getScriptCmd(), otherDesc.getScriptCmd()) && Objects.equals(getScriptOutputInfo(), otherDesc.getScriptOutputInfo()); } return false; }
Class<? extends RecordReader> errRecordReader = getDefaultRecordReader(); Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(new ScriptDesc( fetchFilesNotInLocalFilesystem(stripQuotes(trfm.getChild(execPos).getText())), inInfo, inRecordWriter, outInfo, outRecordReader, errRecordReader, errInfo),
@SuppressWarnings("unchecked") private void populateMapPlan2(Table src) throws Exception { Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator + "mapplan2.out"), Utilities.defaultTd, false)); Operator<ScriptDesc> op2 = OperatorFactory.get(new ScriptDesc("cat", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), TextRecordWriter.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), TextRecordReader.class, TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key")), op3); Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"), op2); addMapWork(mr, src, "a", op1); }
@Override public boolean equals(ScriptOperator op1, ScriptOperator op2) { Preconditions.checkNotNull(op1); Preconditions.checkNotNull(op2); ScriptDesc desc1 = op1.getConf(); ScriptDesc desc2 = op2.getConf(); if (compareString(desc1.getScriptCmd(), desc2.getScriptCmd()) && compareObject(desc1.getScriptOutputInfo(), desc2.getScriptOutputInfo())) { return true; } else { return false; } } }
SparkConf sparkConf = null; try { String[] cmdArgs = splitArgs(conf.getScriptCmd()); new BufferedInputStream(scriptPid.getErrorStream())); scriptOutWriter = conf.getInRecordWriterClass().newInstance(); scriptOutWriter.initialize(scriptOut, hconf); RecordReader scriptOutputReader = conf.getOutRecordReaderClass() .newInstance(); scriptOutputReader.initialize(scriptIn, hconf, conf .getScriptOutputInfo().getProperties()); .getObjectInspector()), "OutputProcessor"); RecordReader scriptErrReader = conf.getErrRecordReaderClass() .newInstance(); scriptErrReader.initialize(scriptErr, hconf, conf.getScriptErrInfo() .getProperties());
@Override protected void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); firstRow = true; statsMap.put(Counter.DESERIALIZE_ERRORS.toString(), deserialize_error_count); statsMap.put(Counter.SERIALIZE_ERRORS.toString(), serialize_error_count); try { this.hconf = hconf; scriptOutputDeserializer = conf.getScriptOutputInfo() .getDeserializerClass().newInstance(); SerDeUtils.initializeSerDe(scriptOutputDeserializer, hconf, conf.getScriptOutputInfo().getProperties(), null); scriptInputSerializer = (Serializer) conf.getScriptInputInfo() .getDeserializerClass().newInstance(); scriptInputSerializer.initialize(hconf, conf.getScriptInputInfo() .getProperties()); outputObjInspector = scriptOutputDeserializer.getObjectInspector(); } catch (Exception e) { throw new HiveException(ErrorMsg.SCRIPT_INIT_ERROR.getErrorCodedMsg(), e); } }
Class<? extends RecordReader> errRecordReader = getDefaultRecordReader(); Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(new ScriptDesc( fetchFilesNotInLocalFilesystem(stripQuotes(trfm.getChild(execPos).getText())), inInfo, inRecordWriter, outInfo, outRecordReader, errRecordReader, errInfo),
firstRow = false; try { String[] cmdArgs = splitArgs(conf.getScriptCmd()); new BufferedInputStream(scriptPid.getErrorStream())); scriptOutWriter = conf.getInRecordWriterClass().newInstance(); scriptOutWriter.initialize(scriptOut, hconf); RecordReader scriptOutputReader = conf.getOutRecordReaderClass() .newInstance(); scriptOutputReader.initialize(scriptIn, hconf, conf .getScriptOutputInfo().getProperties()); .getObjectInspector()), "OutputProcessor"); RecordReader scriptErrReader = conf.getErrRecordReaderClass() .newInstance(); scriptErrReader.initialize(scriptErr, hconf, conf.getScriptErrInfo() .getProperties());
@Override protected void initializeOp(Configuration hconf) throws HiveException { firstRow = true; statsMap.put(Counter.DESERIALIZE_ERRORS, deserialize_error_count); statsMap.put(Counter.SERIALIZE_ERRORS, serialize_error_count); try { this.hconf = hconf; scriptOutputDeserializer = conf.getScriptOutputInfo() .getDeserializerClass().newInstance(); scriptOutputDeserializer.initialize(hconf, conf.getScriptOutputInfo() .getProperties()); scriptInputSerializer = (Serializer) conf.getScriptInputInfo() .getDeserializerClass().newInstance(); scriptInputSerializer.initialize(hconf, conf.getScriptInputInfo() .getProperties()); outputObjInspector = scriptOutputDeserializer.getObjectInspector(); // initialize all children before starting the script initializeChildren(hconf); } catch (Exception e) { throw new HiveException("Cannot initialize ScriptOperator", e); } }
TableDesc scriptInput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b"); ScriptDesc sd = new ScriptDesc("cat", scriptOutput, TextRecordWriter.class, scriptInput, TextRecordReader.class, TextRecordReader.class,
firstRow = false; try { String[] cmdArgs = splitArgs(conf.getScriptCmd()); new BufferedInputStream(scriptPid.getErrorStream())); scriptOutWriter = conf.getInRecordWriterClass().newInstance(); scriptOutWriter.initialize(scriptOut, hconf); RecordReader scriptOutputReader = conf.getOutRecordReaderClass() .newInstance(); scriptOutputReader.initialize(scriptIn, hconf, conf .getScriptOutputInfo().getProperties()); .getObjectInspector()), "OutputProcessor"); RecordReader scriptErrReader = conf.getErrRecordReaderClass() .newInstance(); scriptErrReader.initialize(scriptErr, hconf, conf.getScriptErrInfo() .getProperties());
@Override protected Collection<Future<?>> initializeOp(Configuration hconf) throws HiveException { Collection<Future<?>> result = super.initializeOp(hconf); firstRow = true; statsMap.put(Counter.DESERIALIZE_ERRORS.toString(), deserialize_error_count); statsMap.put(Counter.SERIALIZE_ERRORS.toString(), serialize_error_count); try { this.hconf = hconf; scriptOutputDeserializer = conf.getScriptOutputInfo() .getDeserializerClass().newInstance(); SerDeUtils.initializeSerDe(scriptOutputDeserializer, hconf, conf.getScriptOutputInfo().getProperties(), null); scriptInputSerializer = (Serializer) conf.getScriptInputInfo() .getDeserializerClass().newInstance(); scriptInputSerializer.initialize(hconf, conf.getScriptInputInfo() .getProperties()); outputObjInspector = scriptOutputDeserializer.getObjectInspector(); } catch (Exception e) { throw new HiveException(ErrorMsg.SCRIPT_INIT_ERROR.getErrorCodedMsg(), e); } return result; }
getStringColumn("tvalue")), outputColumns, false, -1, 1, -1, AcidUtils.Operation.NOT_ACID)); Operator<ScriptDesc> op0 = OperatorFactory.get(new ScriptDesc("cat", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), TextRecordWriter.class, PlanUtils.getDefaultTableDesc(""
getStringColumn("tvalue")), outputColumns, false, -1, 1, -1, AcidUtils.Operation.NOT_ACID)); Operator<ScriptDesc> op0 = OperatorFactory.get(new ScriptDesc( "\'cat\'", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"), TextRecordWriter.class, PlanUtils
new ScriptDesc( fetchFilesNotInLocalFilesystem(stripQuotes(trfm.getChild(execPos).getText())), inInfo, inRecordWriter, outInfo, outRecordReader, errRecordReader, errInfo),
new ScriptDesc( fetchFilesNotInLocalFilesystem(stripQuotes(trfm.getChild(execPos).getText())), inInfo, inRecordWriter, outInfo, outRecordReader, errRecordReader, errInfo),