/** * Writes a string as a typed bytes sequence. * * @param s * the string to be written * @throws IOException */ public void writeString(String s) throws IOException { out.write(Type.STRING.code); WritableUtils.writeString(out, s); }
public static void writeStringArray(DataOutput out, String[] s) throws IOException{ out.writeInt(s.length); for(int i = 0; i < s.length; i++) { writeString(out, s[i]); } }
@Override public void write(DataOutput out) throws IOException { WritableUtils.writeEnum(out, primitiveCategory); WritableUtils.writeString(out, typeName); WritableUtils.writeString(out, primitiveJavaType.getName()); WritableUtils.writeString(out, primitiveJavaClass.getName()); WritableUtils.writeString(out, primitiveWritableClass.getName()); }
@Override public void write(DataOutput output) throws IOException { String partitionInfoString = HCatUtil.serialize(partitionInfo); // write partitionInfo into output WritableUtils.writeString(output, partitionInfoString); WritableUtils.writeString(output, baseMapRedSplit.getClass().getName()); Writable baseSplitWritable = (Writable) baseMapRedSplit; //write baseSplit into output baseSplitWritable.write(output); }
/** * Setup response for the IPC Call on Fatal Error from a * client that is using old version of Hadoop. * The response is serialized using the previous protocol's response * layout. * * @param response buffer to serialize the response into * @param call {@link Call} to which we are setting up the response * @param rv return value for the IPC Call, if the call was successful * @param errorClass error class, if the the call failed * @param error error message, if the call failed * @throws IOException */ private void setupResponseOldVersionFatal(ByteArrayOutputStream response, RpcCall call, Writable rv, String errorClass, String error) throws IOException { final int OLD_VERSION_FATAL_STATUS = -1; response.reset(); DataOutputStream out = new DataOutputStream(response); out.writeInt(call.callId); // write call id out.writeInt(OLD_VERSION_FATAL_STATUS); // write FATAL_STATUS WritableUtils.writeString(out, errorClass); WritableUtils.writeString(out, error); call.setResponse(ByteBuffer.wrap(response.toByteArray())); }
public void writeWritable(Writable w) throws IOException { DataOutputStream dos = null; try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); dos = new DataOutputStream(baos); WritableUtils.writeString(dos, w.getClass().getName()); w.write(dos); out.writeBytes(baos.toByteArray(), Type.WRITABLE.code); dos.close(); dos = null; } finally { IOUtils.closeStream(dos); } }
@Override public void write(DataOutput out) throws IOException { if (this.value == null) { out.writeInt(-1); WritableUtils.writeString(out, this.elementType.getName()); } else { Object[] array = this.value.toArray(); int length = array.length; out.writeInt(length); if (length == 0) { if (this.elementType == null) throw new UnsupportedOperationException( "Unable to serialize empty EnumSet with no element type provided."); WritableUtils.writeString(out, this.elementType.getName()); } for (int i = 0; i < length; i++) { ObjectWritable.writeObject(out, array[i], array[i].getClass(), conf); } } }
/** * No protobuf encoding of raw sasl messages */ protected final void doRawSaslReply(SaslStatus status, Writable rv, String errorClass, String error) throws IOException { BufferChain bc; // In my testing, have noticed that sasl messages are usually // in the ballpark of 100-200. That's why the initial capacity is 256. try (ByteBufferOutputStream saslResponse = new ByteBufferOutputStream(256); DataOutputStream out = new DataOutputStream(saslResponse)) { out.writeInt(status.state); // write status if (status == SaslStatus.SUCCESS) { rv.write(out); } else { WritableUtils.writeString(out, errorClass); WritableUtils.writeString(out, error); } bc = new BufferChain(saslResponse.getByteBuffer()); } doRespond(() -> bc); }
out.writeInt(0); // call ID out.writeBoolean(true); // error WritableUtils.writeString(out, VersionMismatch.class.getName()); WritableUtils.writeString(out, errMsg); fakeCall.setResponse(ByteBuffer.wrap(buffer.toByteArray())); sendResponse(fakeCall);
@Override public void write(DataOutput output) throws IOException { super.write(output); WritableUtils.writeString(output, tenantId.getString()); WritableUtils.writeString(output, this.functionClassName); if(this.jarPath == null) { WritableUtils.writeString(output, ""); } else { WritableUtils.writeString(output, this.jarPath); } }
@Override public void write(DataOutput output) throws IOException { super.write(output); WritableUtils.writeString(output, ""); // For b/w compat int nChildren = children.size(); // If dateFormat and/or timeZoneId are supplied as children, don't write them again, // except if using LOCAL, in which case we want to write the resolved/actual time zone. if (nChildren == 1) { WritableUtils.writeString(output, dateFormat); WritableUtils.writeString(output, timeZoneId); } else if (nChildren == 2 || DateUtil.LOCAL_TIME_ZONE_ID.equalsIgnoreCase(getTimeZoneIdArg())) { WritableUtils.writeString(output, timeZoneId); } }
/** * @since 1.5.0 * @see Writable */ @Override public void write(DataOutput dout) throws IOException { WritableUtils.writeVInt(dout, priority); WritableUtils.writeString(dout, name); WritableUtils.writeString(dout, iteratorClass); WritableUtils.writeVInt(dout, properties.size()); for (Entry<String,String> e : properties.entrySet()) { WritableUtils.writeString(dout, e.getKey()); WritableUtils.writeString(dout, e.getValue()); } } }
@Override public void write(DataOutput output) throws IOException { WritableUtils.writeString(output,tableName); rowkey.write(output); }
@Override public void write(DataOutput output) throws IOException { super.write(output); WritableUtils.writeString(output, dateFormat); }
@Override public void write(DataOutput output) throws IOException { super.write(output); WritableUtils.writeString(output, formatString); WritableUtils.writeEnum(output, type); }
@Override public void write(DataOutput output) throws IOException { super.write(output); WritableUtils.writeString(output, formatString); WritableUtils.writeEnum(output, type); }
@Override public void write(DataOutput output) throws IOException { super.write(output); WritableUtils.writeString(output, formatString); WritableUtils.writeEnum(output, type); }
@Override public void write(DataOutput output) throws IOException { super.write(output); WritableUtils.writeString(output, formatString); WritableUtils.writeEnum(output, type); }
@Override public void write(DataOutput output) throws IOException { WritableUtils.writeString(output, regionLocation); WritableUtils.writeVLong(output, regionSize); Preconditions.checkNotNull(scans); WritableUtils.writeVInt(output, scans.size()); for (Scan scan : scans) { ClientProtos.Scan protoScan = ProtobufUtil.toScan(scan); byte[] protoScanBytes = protoScan.toByteArray(); WritableUtils.writeVInt(output, protoScanBytes.length); output.write(protoScanBytes); } }
@Override public void write(DataOutput out) throws IOException { super.write(out); Preconditions.checkNotNull(scans); WritableUtils.writeVInt(out, scans.size()); for (Scan scan : scans) { ClientProtos.Scan protoScan = ProtobufUtil.toScan(scan); byte[] protoScanBytes = protoScan.toByteArray(); WritableUtils.writeVInt(out, protoScanBytes.length); out.write(protoScanBytes); } WritableUtils.writeString(out, query); WritableUtils.writeVLong(out, regionSize); }