@Override public void serialize(ByteArrayDataOutput out, Object value) { ParseException exceptionToThrow = null; Long ret = null; try { ret = DimensionHandlerUtils.convertObjectToLong(value, true); } catch (ParseException pe) { exceptionToThrow = pe; } if (ret == null) { // remove null -> zero conversion when https://github.com/apache/incubator-druid/pull/5278 series of patches is merged // we'll also need to change the serialized encoding so that it can represent numeric nulls ret = DimensionHandlerUtils.ZERO_LONG; } out.writeLong(ret); if (exceptionToThrow != null) { throw exceptionToThrow; } }
public void testNewDataOutput_writeLong() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.writeLong(0x1234567876543210L); assertEquals(bytes, out.toByteArray()); }
out.writeLong(row.getTimestampFromEpoch());
public byte[] serializeEvent(DataPointEvent dataPointEvent) { //Todo: Create some adaptive value here, keep stats on if the buffer increases and slowely increase it ByteArrayDataOutput dataOutput = ByteStreams.newDataOutput(64); dataOutput.writeUTF(dataPointEvent.getMetricName()); dataOutput.writeInt(dataPointEvent.getTtl()); dataOutput.writeLong(dataPointEvent.getDataPoint().getTimestamp()); dataOutput.writeUTF(dataPointEvent.getDataPoint().getDataStoreDataType()); try { dataPointEvent.getDataPoint().writeValueToBuffer(dataOutput); } catch (IOException e) { logger.error("Error serializing DataPoint", e); } dataOutput.writeInt(dataPointEvent.getTags().size()); for (Map.Entry<String, String> entry : dataPointEvent.getTags().entrySet()) { dataOutput.writeUTF(entry.getKey()); dataOutput.writeUTF(entry.getValue()); } return dataOutput.toByteArray(); }
@Override public void serialize(ByteArrayDataOutput out, Object value, boolean reportParseExceptions) { Long ret = DimensionHandlerUtils.convertObjectToLong(value, reportParseExceptions); if (ret == null) { // remove null -> zero conversion when https://github.com/druid-io/druid/pull/5278 series of patches is merged // we'll also need to change the serialized encoding so that it can represent numeric nulls ret = DimensionHandlerUtils.ZERO_LONG; } out.writeLong(ret); }
@Override public void writeTo(ByteArrayDataOutput output) { output.writeInt(type.ordinal()); //Data is sent through a random player. We have to tell the Bukkit version of this plugin the target output.writeUTF(playerName); //proxy identifier to check if it's a acceptable proxy output.writeLong(proxyId.getMostSignificantBits()); output.writeLong(proxyId.getLeastSignificantBits()); }
@Override public void serialize(ByteArrayDataOutput out, Object value) { ParseException exceptionToThrow = null; Long ret = null; try { ret = DimensionHandlerUtils.convertObjectToLong(value, true); } catch (ParseException pe) { exceptionToThrow = pe; } if (ret == null) { // remove null -> zero conversion when https://github.com/apache/incubator-druid/pull/5278 series of patches is merged // we'll also need to change the serialized encoding so that it can represent numeric nulls ret = DimensionHandlerUtils.ZERO_LONG; } out.writeLong(ret); if (exceptionToThrow != null) { throw exceptionToThrow; } }
@Override public void read(Client c, int len, ByteArrayDataInputWrapper buf) throws IOException{ long temp = buf.readLong(); ByteArrayDataOutput buff = ByteStreams.newDataOutput(); writeVarInt(buff, 11); buff.writeLong(temp); c.net.sendPacket(buff, c.out); }
public void delayedDig(long pos, byte face){ try { ByteArrayDataOutput buf = ByteStreams.newDataOutput(); Packet.writeVarInt(buf, 7); buf.writeByte(2); buf.writeLong(pos); buf.writeByte(face); c.net.sendPacket(buf, c.out); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
/** * Try to claim a stream event offset. * * @return The row key for writing to the state table if successfully claimed or {@code null} if not claimed. */ private byte[] claimEntry(StreamFileOffset offset, byte[] claimedStateContent) throws IOException { ByteArrayDataOutput out = ByteStreams.newDataOutput(50); out.writeLong(consumerConfig.getGroupId()); StreamUtils.encodeOffset(out, offset); byte[] row = out.toByteArray(); SortedMap<byte[], byte[]> rowStates = getInitRowStates(row); // See if the entry should be ignored. If it is in the rowStates with null value, then it should be ignored. byte[] rowState = rowStates.get(row); if (rowStates.containsKey(row) && rowState == null) { return null; } // Only need to claim entry if FIFO and group size > 1 if (consumerConfig.getDequeueStrategy() == DequeueStrategy.FIFO && consumerConfig.getGroupSize() > 1) { return claimFifoEntry(row, claimedStateContent, rowState) ? row : null; } // For Hash, RR and FIFO with group size == 1, no need to claim and check, // as it's already handled by the readFilter return row; }
public void testNewDataOutput_writeLong() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.writeLong(0x1234567876543210L); assertEquals(bytes, out.toByteArray()); }
Packet.writeVarInt(buf, 7); buf.writeByte(0); buf.writeLong(pos); buf.writeByte(1); c.net.sendPacket(buf, c.out);
public void placeBlock(int x, int y, int z, int id, int face){ ByteArrayDataOutput buf = ByteStreams.newDataOutput(); try { long pos = ((long)(x & 0x3FFFFFF) << 38) | ((long)(y & 0xFFF) << 26) | z & 0x3FFFFFF; Packet.writeVarInt(buf, 8); buf.writeLong(pos); buf.writeByte(face); new Slot(0,(short)id,(byte)1,(short)0,(byte)0).sendSlot(buf); buf.writeByte(0); buf.writeByte(0); buf.writeByte(0); c.net.sendPacket(buf, c.out); buf = ByteStreams.newDataOutput(); Packet.writeVarInt(buf, 10); c.net.sendPacket(buf, c.out); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
out.writeLong(row.getTimestampFromEpoch());
out.writeLong(row.getTimestampFromEpoch());
out.writeUTF("LastOnline"); out.writeUTF(user); out.writeLong(RedisBungee.getApi().getLastOnline(plugin.getUuidTranslator().getTranslatedUuid(user, true))); break; case "ServerPlayers":
} else if (value instanceof Long) { buffer.write(DATA_TYPE_LONG); buffer.writeLong((Long) value); } else if (value instanceof Double) { buffer.write(DATA_TYPE_DOUBLE);