/** * Custom serialization methods. * @see <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html">http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html</a> */ private void writeObject(final ObjectOutputStream out) throws IOException { out.writeObject(mapper.getClass()); jobConf.write(out); }
/** * Custom serialization methods. * @see <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html">http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html</a> */ private void writeObject(final ObjectOutputStream out) throws IOException { out.writeObject(reducer.getClass()); jobConf.write(out); }
/** * Custom serialization methods. * @see <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html">http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html</a> */ private void writeObject(final ObjectOutputStream out) throws IOException { out.writeObject(reducer.getClass()); out.writeObject(combiner.getClass()); jobConf.write(out); }
private void writeObject(ObjectOutputStream out) throws IOException { super.write(out); out.writeUTF(mapredInputFormat.getClass().getName()); out.writeUTF(keyClass.getName()); out.writeUTF(valueClass.getName()); jobConf.write(out); }
private void writeObject(ObjectOutputStream out) throws IOException { super.write(out); out.writeUTF(mapredOutputFormat.getClass().getName()); jobConf.write(out); }
private void writeObject(ObjectOutputStream out) throws IOException { // serialize the parent fields and the final fields out.defaultWriteObject(); // the job conf knows how to serialize itself jobConf.write(out); // write the input split hadoopInputSplit.write(out); }
public static byte[] serializeJobConf(JobConf jobConf) { ByteArrayOutputStream out = new ByteArrayOutputStream(); try { jobConf.write(new DataOutputStream(out)); } catch (IOException e) { LOG.error("Error serializing job configuration: " + e, e); return null; } finally { try { out.close(); } catch (IOException e) { LOG.error("Error closing output stream: " + e, e); } } return out.toByteArray(); }
public static byte[] serializeJobConf(JobConf jobConf) { ByteArrayOutputStream out = new ByteArrayOutputStream(); try { jobConf.write(new DataOutputStream(out)); } catch (IOException e) { LOG.error("Error serializing job configuration: " + e, e); return null; } finally { try { out.close(); } catch (IOException e) { LOG.error("Error closing output stream: " + e, e); } } return out.toByteArray(); }
ByteArrayOutputStream buf = new ByteArrayOutputStream(); jobConf.write(new DataOutputStream(buf));
/** * Creates DataInput to read JobConf. * * @param job Job. * @return DataInput with JobConf. * @throws IgniteCheckedException If failed. */ private static DataInput jobConfDataInput(HadoopJobEx job) throws IgniteCheckedException { JobConf jobConf = new JobConf(); for (Map.Entry<String, String> e : ((HadoopDefaultJobInfo)job.info()).properties().entrySet()) jobConf.set(e.getKey(), e.getValue()); ByteArrayOutputStream buf = new ByteArrayOutputStream(); try { jobConf.write(new DataOutputStream(buf)); } catch (IOException e) { throw new IgniteCheckedException(e); } return new DataInputStream(new ByteArrayInputStream(buf.toByteArray())); }
/** * Custom serialization methods. * @see <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html">http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html</a> */ private void writeObject(final ObjectOutputStream out) throws IOException { out.writeObject(reducer.getClass()); jobConf.write(out); }
/** * Custom serialization methods. * @see <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html">http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html</a> */ private void writeObject(final ObjectOutputStream out) throws IOException { out.writeObject(reducer.getClass()); jobConf.write(out); }
/** * Custom serialization methods. * @see <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html">http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html</a> */ private void writeObject(final ObjectOutputStream out) throws IOException { out.writeObject(mapper.getClass()); jobConf.write(out); }
/** * Custom serialization methods. * @see http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html */ private void writeObject(ObjectOutputStream out) throws IOException { out.writeUTF(hadoopOutputFormatName); jobConf.write(out); out.writeObject(converter); out.writeObject(fileOutputCommitterWrapper); }
/** * Custom serialization methods. * @see <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html">http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html</a> */ private void writeObject(final ObjectOutputStream out) throws IOException { out.writeObject(reducer.getClass()); out.writeObject(combiner.getClass()); jobConf.write(out); }
private void writeObject(ObjectOutputStream out) throws IOException { super.write(out); out.writeUTF(mapredInputFormat.getClass().getName()); out.writeUTF(keyClass.getName()); out.writeUTF(valueClass.getName()); jobConf.write(out); }
private void writeObject(ObjectOutputStream out) throws IOException { super.write(out); out.writeUTF(mapredOutputFormat.getClass().getName()); jobConf.write(out); }
private void writeObject(ObjectOutputStream out) throws IOException { super.write(out); out.writeUTF(mapredOutputFormat.getClass().getName()); jobConf.write(out); }
private void writeObject(ObjectOutputStream out) throws IOException { // serialize the parent fields and the final fields out.defaultWriteObject(); // the job conf knows how to serialize itself jobConf.write(out); // write the input split hadoopInputSplit.write(out); }
public ConfFactory(JobConf conf) throws HyracksDataException { try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(bos); conf.write(dos); confBytes = bos.toByteArray(); dos.close(); } catch (Exception e) { throw HyracksDataException.create(e); } }