/** {@inheritDoc} */ @Override public void write(DataOutput out) throws IOException { super.write(out); // Write out the number of entries in the map out.writeInt(instance.size()); // Then write out each key/value pair for (Map.Entry<Writable, Writable> e : instance.entrySet()) { out.writeByte(getId(e.getKey().getClass())); e.getKey().write(out); out.writeByte(getId(e.getValue().getClass())); e.getValue().write(out); } }
@Override public void write(DataOutput out) throws IOException { UTF8.writeString(out, this.leaderId); out.writeInt(this.partition); out.writeLong(this.beginOffset); out.writeLong(this.offset); out.writeLong(this.checksum); out.writeUTF(this.topic); out.writeLong(this.time); out.writeUTF(this.server); // left for legacy out.writeUTF(this.service); // left for legacy this.partitionMap.write(out); }
public void write(DataOutput out) throws IOException { out.writeByte(CUR_VERSION); // store current version out.writeByte(status); out.writeLong(fetchTime); out.writeByte(retries); out.writeInt(fetchInterval); out.writeFloat(score); out.writeLong(modifiedTime); if (signature == null) { out.writeByte(0); } else { out.writeByte(signature.length); out.write(signature); } if (metaData != null && metaData.size() > 0) { out.writeBoolean(true); metaData.write(out); } else { out.writeBoolean(false); } }
/** * Sets configurations for multiple tables at a time. * * @param implementingClass * the class whose name will be used as a prefix for the property configuration key * @param conf * the Hadoop configuration object to configure * @param configs * an array of InputTableConfig objects to associate with the job * @since 1.6.0 */ public static void setInputTableConfigs(Class<?> implementingClass, Configuration conf, Map<String,org.apache.accumulo.core.client.mapreduce.InputTableConfig> configs) { MapWritable mapWritable = new MapWritable(); for (Map.Entry<String,org.apache.accumulo.core.client.mapreduce.InputTableConfig> tableConfig : configs .entrySet()) mapWritable.put(new Text(tableConfig.getKey()), tableConfig.getValue()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { mapWritable.write(new DataOutputStream(baos)); } catch (IOException e) { throw new IllegalStateException("Table configuration could not be serialized."); } String confKey = enumToConfKey(implementingClass, ScanOpts.TABLE_CONFIGS); conf.set(confKey, Base64.getEncoder().encodeToString(baos.toByteArray())); }
@Override public void write(DataOutput out) throws IOException { map.write(out); }
@Override public void write(DataOutput out) throws IOException { out.writeUTF(term); out.writeUTF(field); out.writeUTF(type); out.writeUTF(date); out.writeUTF(columnVisibility); count.write(out); countsByColumnVisibility.write(out); }
@Override public void write(DataOutput out) throws IOException { UTF8.writeString(out, this.leaderId); out.writeInt(this.partition); out.writeLong(this.beginOffset); out.writeLong(this.offset); out.writeLong(this.checksum); out.writeUTF(this.topic); out.writeLong(this.time); out.writeUTF(this.server); // left for legacy out.writeUTF(this.service); // left for legacy this.partitionMap.write(out); }
@Override public void write(DataOutput out) throws IOException { UTF8.writeString(out, this.leaderId); out.writeInt(this.partition); out.writeLong(this.beginOffset); out.writeLong(this.offset); out.writeLong(this.checksum); out.writeUTF(this.topic); out.writeLong(this.time); out.writeUTF(this.server); // left for legacy out.writeUTF(this.service); // left for legacy this.partitionMap.write(out); }
public static final void writeProperties(DataOutput out, Properties props) throws IOException { MapWritable propsWritable = new MapWritable(); for (Entry<Object, Object> prop : props.entrySet()) { Writable key = new Text(prop.getKey().toString()); Writable value = new Text(prop.getValue().toString()); propsWritable.put(key,value); } propsWritable.write(out); }
public static final void writeProperties(DataOutput out, Properties props) throws IOException { MapWritable propsWritable = new MapWritable(); for (Entry<Object, Object> prop : props.entrySet()) { Writable key = new Text(prop.getKey().toString()); Writable value = new Text(prop.getValue().toString()); propsWritable.put(key,value); } propsWritable.write(out); }
/** {@inheritDoc} */ @Override public void write(DataOutput out) throws IOException { super.write(out); // Write out the number of entries in the map out.writeInt(instance.size()); // Then write out each key/value pair for (Map.Entry<Writable, Writable> e : instance.entrySet()) { out.writeByte(getId(e.getKey().getClass())); e.getKey().write(out); out.writeByte(getId(e.getValue().getClass())); e.getValue().write(out); } }
/** {@inheritDoc} */ @Override public void write(DataOutput out) throws IOException { super.write(out); // Write out the number of entries in the map out.writeInt(instance.size()); // Then write out each key/value pair for (Map.Entry<Writable, Writable> e : instance.entrySet()) { out.writeByte(getId(e.getKey().getClass())); e.getKey().write(out); out.writeByte(getId(e.getValue().getClass())); e.getValue().write(out); } }
/** {@inheritDoc} */ @Override public void write(DataOutput out) throws IOException { super.write(out); // Write out the number of entries in the map out.writeInt(instance.size()); // Then write out each key/value pair for (Map.Entry<Writable, Writable> e : instance.entrySet()) { out.writeByte(getId(e.getKey().getClass())); e.getKey().write(out); out.writeByte(getId(e.getValue().getClass())); e.getValue().write(out); } }
/** {@inheritDoc} */ @Override public void write(DataOutput out) throws IOException { super.write(out); // Write out the number of entries in the map out.writeInt(instance.size()); // Then write out each key/value pair for (Map.Entry<Writable, Writable> e : instance.entrySet()) { out.writeByte(getId(e.getKey().getClass())); e.getKey().write(out); out.writeByte(getId(e.getValue().getClass())); e.getValue().write(out); } }
public void writeCommon(DataOutput out) throws IOException { out.writeByte(CUR_VERSION); // write version Text.writeString(out, url); // write url if (content == null) out.writeInt(0); // write content else { out.writeInt(content.length); // write content out.write(content); } if (contentType != null) { Text.writeString(out, contentType); // write contentType } else { Text.writeString(out, ""); } out.writeBoolean(text != null); if (text != null) Text.writeString(out, text); // write text out.writeBoolean(metadata != null); if (metadata != null) metadata.write(out); // write metadata; }
/** * Sets configurations for multiple tables at a time. * * @param implementingClass * the class whose name will be used as a prefix for the property configuration key * @param conf * the Hadoop configuration object to configure * @param configs * an array of {@link InputTableConfig} objects to associate with the job * @since 1.6.0 */ public static void setInputTableConfigs(Class<?> implementingClass, Configuration conf, Map<String,InputTableConfig> configs) { MapWritable mapWritable = new MapWritable(); for (Map.Entry<String,InputTableConfig> tableConfig : configs.entrySet()) mapWritable.put(new Text(tableConfig.getKey()), tableConfig.getValue()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { mapWritable.write(new DataOutputStream(baos)); } catch (IOException e) { throw new IllegalStateException("Table configuration could not be serialized."); } String confKey = enumToConfKey(implementingClass, ScanOpts.TABLE_CONFIGS); conf.set(confKey, Base64.encodeBase64String(baos.toByteArray())); }
@Override public void write(DataOutput out) throws IOException { out.writeByte(this.flag); if (isVertexMessage()) { // we don't need to write the classes because the other side has the same // classes for the two entities. vertexId.write(out); out.writeInt(numOfValues); out.writeInt(byteBuffer.size()); out.write(byteBuffer.toByteArray()); } else if (isMapMessage()) { map.write(out); } else if (isVerticesSizeMessage()) { integerMessage.write(out); } else if (isPartitioningMessage()) { out.writeInt(numOfValues); out.writeInt(byteBuffer.size()); out.write(byteBuffer.toByteArray()); } else { vertexId.write(out); } }
@Override public void write(DataOutput out) throws IOException { out.writeByte(this.flag); if (isVertexMessage()) { // we don't need to write the classes because the other side has the same // classes for the two entities. vertexId.write(out); out.writeInt(numOfValues); out.writeInt(byteBuffer.size()); out.write(byteBuffer.toByteArray()); } else if (isMapMessage()) { map.write(out); } else if (isVerticesSizeMessage()) { integerMessage.write(out); } else if (isPartitioningMessage()) { out.writeInt(numOfValues); out.writeInt(byteBuffer.size()); out.write(byteBuffer.toByteArray()); } else { vertexId.write(out); } }
/** * Converts a Map of Strings into a Writable and writes it. * * @param map * @param output * @throws IOException */ public static void writeMap(Map<String,String> map, DataOutput output) throws IOException { MapWritable mw = new MapWritable(); for (Map.Entry<String,String> entry : map.entrySet()) { mw.put(new Text(entry.getKey()), new Text(entry.getValue())); } mw.write(output); }
/** * Assert MapWritable does not grow across calls to readFields. * @throws Exception * @see <a href="https://issues.apache.org/jira/browse/HADOOP-2244">HADOOP-2244</a> */ public void testMultipleCallsToReadFieldsAreSafe() throws Exception { // Create an instance and add a key/value. MapWritable m = new MapWritable(); final Text t = new Text(getName()); m.put(t, t); // Get current size of map. Key values are 't'. int count = m.size(); // Now serialize... save off the bytes. ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); m.write(dos); dos.close(); // Now add new values to the MapWritable. m.put(new Text("key1"), new Text("value1")); m.put(new Text("key2"), new Text("value2")); // Now deserialize the original MapWritable. Ensure count and key values // match original state. ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DataInputStream dis = new DataInputStream(bais); m.readFields(dis); assertEquals(count, m.size()); assertTrue(m.get(t).equals(t)); dis.close(); }