/** * checks if there is data on the channel * @return true if there is a full message to read */ public boolean hasContentToRead() { return Pipe.hasContentToRead(pipe); }
/** * checks if there is data on the channel * @return true if there is a full message to read */ public boolean hasContentToRead() { return Pipe.hasContentToRead(pipe); }
/** * checks if there is data on the channel * @return true if there is a full message to read */ public boolean hasContentToRead() { return Pipe.hasContentToRead(pipe); }
private boolean allHaveContentToRead(Pipe<ColumnSchema<C>>[] columnPipeInput) { int i = columnPipeInput.length; while (--i>=0) { if (!Pipe.hasContentToRead(columnPipeInput[i])) { x++; return false; } } y++; return true; }
private boolean allHaveData(Pipe<HistogramSchema>[] inputs) { int i = inputs.length; while (--i >= 0) { //System.err.println("i "+i); if (!Pipe.hasContentToRead(inputs[i])) { return false; } } return true; }
private boolean allHaveContentToRead(Pipe<ColumnSchema<M>>[] columnPipeInput) { int i = columnPipeInput.length; while (--i>=0) { if (!Pipe.hasContentToRead(columnPipeInput[i])) { return false; } } return true; }
@Override public boolean paused() { return !Pipe.hasContentToRead(expectedInput, 1); }
@Override public boolean paused() { return !Pipe.hasContentToRead(expectedInput, 1); }
private static int findPipesWithContent(ReactiveListenerStage r, Pipe[] localInputs, Object localObj, ReactiveOperator[] localOperators, int pipeIdx, int passes) { while (--pipeIdx >= 0) { if (!Pipe.hasContentToRead(localInputs[pipeIdx])) { //most calls are stopping on this if continue; } else { passes = applyToPipeWithData(r, localObj, localOperators, passes, pipeIdx, localInputs[pipeIdx]); } } return passes; }
private void noGoDoSingles(final int a, Pipe<?> pipe) { if (Pipe.isEmpty(pipe) || !Pipe.hasContentToRead(pipe)) { } else { //only go if we have a full message noGoProcessSingleMessage(a, pipe); } }
private boolean readChunk() { readPrep(); if (Pipe.hasContentToRead(pipe)) { return beginNewRead(); } return false; }
private boolean readChunk() { readPrep(); if (Pipe.hasContentToRead(pipe)) { return beginNewRead(); } return false; }
private static int applyToPipeWithData(ReactiveListenerStage r, Object localObj, ReactiveOperator[] localOperators, int passes, int i, Pipe pipe) { if (null!=localOperators && null!=localOperators[i]) {//skip if null, this is for the TickListener localOperators[i].apply(i, localObj, pipe, r); r.realStage.didWork(); if (Pipe.hasContentToRead(pipe)) { passes++; } } return passes; }
private boolean readChunk() { readPrep(); if (Pipe.hasContentToRead(pipe)) { return beginNewRead(); } return false; }
public static boolean hasPacketId(IdGenCache genCache, Pipe<MQTTIdRangeSchema> idGenIn) { boolean hasId = true; if (IdGenCache.isEmpty(genCache)) { //get next range if (Pipe.hasContentToRead(idGenIn)) { loadNextPacketIdRange(idGenIn, genCache); } else { hasId = false; //logger.trace("no packed IDs are left for use"); } } return hasId; }
@Override public void run() { if (Pipe.hasContentToRead(input)) { int remaining = limit; if (Pipe.hasRoomForWrite(output, remaining)) { //move all the data we can while ( remaining > 0 //active batch still has data && (remaining>=maxFrag) //has a full fragment ) { remaining -= Pipe.copyFragment(input, output); } } } }
@Override public void run() { if (connectionId()>=0 || Pipe.hasContentToRead(input)) { if (!processPersistLoad()) { long connectionId = processPingAndReplay(); processInputAcks(connectionId); processInput(connectionId); } else { if (Integer.numberOfLeadingZeros(countOfBlocksWaitingForPersistLoad) != Integer.numberOfLeadingZeros(++countOfBlocksWaitingForPersistLoad)) { logger.info("NOTE: too much volume, encoding has been blocked by waiting for persistance {} times",countOfBlocksWaitingForPersistLoad); } } } }
public static void appendNextFieldToReader(LittleEndianDataInputBlobReader reader, Pipe<RawDataSchema> targetPipe) { while (Pipe.hasContentToRead(targetPipe) && Pipe.peekInt(targetPipe) >=0) { Pipe.takeMsgIdx(targetPipe); accumLowLevelAPIField(reader); Pipe.readNextWithoutReleasingReadLock(targetPipe); Pipe.confirmLowLevelRead(targetPipe, Pipe.sizeOf(targetPipe, RawDataSchema.MSG_CHUNKEDSTREAM_1)); } }
public static void appendNextFieldToReader(LittleEndianDataInputBlobReader reader, Pipe<RawDataSchema> targetPipe) { while (Pipe.hasContentToRead(targetPipe) && Pipe.peekInt(targetPipe) >=0) { Pipe.takeMsgIdx(targetPipe); accumLowLevelAPIField(reader); Pipe.readNextWithoutReleasingReadLock(targetPipe); Pipe.confirmLowLevelRead(targetPipe, Pipe.sizeOf(targetPipe, RawDataSchema.MSG_CHUNKEDSTREAM_1)); } }
public static void appendNextFieldToReader(LittleEndianDataInputBlobReader reader, Pipe<RawDataSchema> targetPipe) { while (Pipe.hasContentToRead(targetPipe) && Pipe.peekInt(targetPipe) >=0) { Pipe.takeMsgIdx(targetPipe); accumLowLevelAPIField(reader); Pipe.readNextWithoutReleasingReadLock(targetPipe); Pipe.confirmLowLevelRead(targetPipe, Pipe.sizeOf(targetPipe, RawDataSchema.MSG_CHUNKEDSTREAM_1)); } }