protected DataInputBlobReader<T> createNewBlobReader() { return new DataInputBlobReader<T>(this); }
protected DataInputBlobReader<T> createNewBlobReader() { return new DataInputBlobReader<T>(this); }
protected DataInputBlobReader<T> createNewBlobReader() { return new DataInputBlobReader<T>(this); }
@Override public void startup() { reader = new DataInputBlobReader<RawDataSchema>(input); output2Writer = new DataOutputBlobWriter<RawDataSchema>(output2); int maxValue = PhastCodecSchema.FROM.messageStarts.length+1; idxReverseLookup = new int[maxValue]; int i = PhastCodecSchema.FROM.fieldIdScript.length; while (--i>=0) { long id = PhastCodecSchema.FROM.fieldIdScript[i]; if (id>10000) { idxReverseLookup[(int)(id-10000)] = i; } } }
@Override public void startup() { startup = System.currentTimeMillis(); input2Reader = new DataInputBlobReader<RawDataSchema>(input2); writer = new DataOutputBlobWriter<RawDataSchema>(output); int i = PhastCodecSchema.FROM.fieldIdScript.length; lengthLookup = new short[i]; sizeLookup = new int[i]; while (--i>=0) { lengthLookup[i] = (short) (PhastCodecSchema.FROM.fieldIdScript[i]-10000); sizeLookup[i] = PhastCodecSchema.FROM.fragDataSize[i]; } int maxFieldsPerMessage = PhastCodecSchema.FROM.messageStarts.length; maxBytesPerMessage = maxFieldsPerMessage * 10; }
public StreamRegulator(long bitPerSecond, int maxWrittenChunksInFlight, int maxWrittenChunkSizeInBytes) { PipeConfig<RawDataSchema> pipeConfig = new PipeConfig<RawDataSchema>(RawDataSchema.instance, maxWrittenChunksInFlight, maxWrittenChunkSizeInBytes); this.pipe = new Pipe<RawDataSchema>(pipeConfig); this.pipe.initBuffers(); Pipe.setPublishBatchSize(pipe, 0); Pipe.setReleaseBatchSize(pipe, maxWrittenChunksInFlight/3); if (this.pipe.blobMask<=0) { throw new UnsupportedOperationException("Pipe must have room to send blob data. Found size:"+ this.pipe.sizeOfBlobRing+" config: "+pipeConfig); } this.inputStreamFlyweight = new DataInputBlobReader<RawDataSchema>(pipe); this.outputStreamFlyweight = new DataOutputBlobWriter<RawDataSchema>(pipe); this.bitPerSecond = bitPerSecond; //TODO: may want to add latency per chunk, per startup, or per N bytes. }
public StreamRegulator(long bitPerSecond, int maxWrittenChunksInFlight, int maxWrittenChunkSizeInBytes) { PipeConfig<RawDataSchema> pipeConfig = new PipeConfig<RawDataSchema>(RawDataSchema.instance, maxWrittenChunksInFlight, maxWrittenChunkSizeInBytes); this.pipe = new Pipe<RawDataSchema>(pipeConfig); this.pipe.initBuffers(); Pipe.setPublishBatchSize(pipe, 0); Pipe.setReleaseBatchSize(pipe, maxWrittenChunksInFlight/3); if (this.pipe.blobMask<=0) { throw new UnsupportedOperationException("Pipe must have room to send blob data. Found size:"+ this.pipe.sizeOfBlobRing+" config: "+pipeConfig); } this.inputStreamFlyweight = new DataInputBlobReader<RawDataSchema>(pipe); this.outputStreamFlyweight = new DataOutputBlobWriter<RawDataSchema>(pipe); this.bitPerSecond = bitPerSecond; //TODO: may want to add latency per chunk, per startup, or per N bytes. }
public StreamRegulator(long bitPerSecond, int maxWrittenChunksInFlight, int maxWrittenChunkSizeInBytes) { PipeConfig<RawDataSchema> pipeConfig = new PipeConfig<RawDataSchema>(RawDataSchema.instance, maxWrittenChunksInFlight, maxWrittenChunkSizeInBytes); this.pipe = new Pipe<RawDataSchema>(pipeConfig); this.pipe.initBuffers(); Pipe.setPublishBatchSize(pipe, 0); Pipe.setReleaseBatchSize(pipe, maxWrittenChunksInFlight/3); if (this.pipe.blobMask<=0) { throw new UnsupportedOperationException("Pipe must have room to send blob data. Found size:"+ this.pipe.sizeOfBlobRing+" config: "+pipeConfig); } this.inputStreamFlyweight = new DataInputBlobReader<RawDataSchema>(pipe); this.outputStreamFlyweight = new DataOutputBlobWriter<RawDataSchema>(pipe); this.bitPerSecond = bitPerSecond; //TODO: may want to add latency per chunk, per startup, or per N bytes. }
@Test public void testEncodeString() throws IOException{ //create a new blob pipe to put a string on Pipe<RawDataSchema> pipe = new Pipe<RawDataSchema>(new PipeConfig<RawDataSchema>(RawDataSchema.instance, 100, 4000)); pipe.initBuffers(); DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(pipe); //encode a string on blob using the static method StringBuilder testString = new StringBuilder("This is a test"); PhastEncoder.encodeString(writer, testString , 0, 0, false); writer.close(); //check what is on the pipe DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(pipe); //should be -63 int test = reader.readPackedInt(); //the string String value = reader.readUTF(); reader.close(); String s = value.toString(); assertTrue((test==-63) && (s.compareTo("This is a test")==0)); }
@Test public void decodeStringTest() throws IOException{ Pipe<RawDataSchema> pipe = new Pipe<RawDataSchema>(new PipeConfig<RawDataSchema>(RawDataSchema.instance, 100, 4000)); pipe.initBuffers(); DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(pipe); DataOutputBlobWriter.writePackedInt(writer, -63); writer.writeUTF("This is a test"); writer.close(); DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(pipe); String stest = PhastDecoder.decodeString(reader, false); reader.close(); assertTrue(stest.compareTo("This is a test") == 0); }
@Test public void defaultIntTest() throws IOException{ //create a blob to test Pipe<RawDataSchema> encodedValuesToValidate = new Pipe<RawDataSchema>(new PipeConfig<RawDataSchema>(RawDataSchema.instance, 100, 4000)); encodedValuesToValidate.initBuffers(); DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(encodedValuesToValidate); //make int array int[] defaultInt = new int[5]; defaultInt[3] = 4; //should encode 16 PhastEncoder.encodeDefaultInt(defaultInt, writer, 1, 1, 3, 16, false); //should encode 4 PhastEncoder.encodeDefaultInt(defaultInt, writer, 0, 1, 3, 16, false); writer.close(); DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(encodedValuesToValidate); int test1 = reader.readPackedInt(); //shouldnt encode anything reader.close(); }
DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(pipe); int test = PhastDecoder.decodePresentInt(reader, 0, 0, false); assertTrue(test==3894);
DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(encodedValuesToValidate); assertTrue(reader.readPackedLong()==342); assertTrue(reader.readPackedLong()==342);
DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(encodedValuesToValidate); assertTrue(reader.readPackedLong()==455); assertTrue(reader.readPackedLong()==455);
DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(pipe); long test = PhastDecoder.decodePresentLong(reader, 0, 0, false); assertTrue(test==3894);
DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(pipe); short test = PhastDecoder.decodePresentShort(reader, 0, 0, false); assertTrue(test==3894);
Pipe.publishAllBatchedWrites(encodedValuesToValidate); DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(encodedValuesToValidate);
DataInputBlobReader<RawDataSchema> decodedReader = new DataInputBlobReader<RawDataSchema>(decodedDataToValidate2);