Refine search
private static void appendPartialContent(CompositeByteBuf content, ByteBuf partialContent) { if (partialContent.isReadable()) { content.addComponent(true, partialContent.retain()); } }
@Override public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception { if (msg instanceof ByteBuf) { ByteBuf buf = (ByteBuf) msg; bufs.addComponent(buf); bufs.writerIndex(bufs.writerIndex() + buf.writerIndex()); } }
@Override public CompositeByteBuf addComponents(ByteBuf... buffers) { wrapped.addComponents(buffers); return this; }
<T> CompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents, ByteWrapper<T> wrapper, T[] buffers, int offset) { this(alloc, direct, maxNumComponents, buffers.length - offset); addComponents0(false, 0, wrapper, buffers, offset); consolidateIfNeeded(); setIndex(0, capacity()); }
CompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents, ByteBuf[] buffers, int offset) { this(alloc, direct, maxNumComponents, buffers.length - offset); addComponents0(false, 0, buffers, offset); consolidateIfNeeded(); setIndex0(0, capacity()); }
@Override public ByteBuffer[] nioBuffers() { return nioBuffers(readerIndex(), readableBytes()); }
public void encode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws IOException { if (!ctx.channel().isOpen()) { logger.debug("In " + RpcConstants.SASL_ENCRYPTION_HANDLER + " and channel is not open. " + "So releasing msg memory before encryption."); msg.release(); return; final int numComponents = cbb.numComponents(); final ByteBuf component = cbb.component(currentIndex); if (component.readableBytes() > wrapSizeLimit) { throw new RpcException(String.format("Component Chunk size: %d is greater than the wrapSizeLimit: %d", component.readableBytes(), wrapSizeLimit)); final ByteBuf encryptedBuf = ctx.alloc().buffer(wrappedMsg.length + RpcConstants.LENGTH_FIELD_LENGTH);
if (!ctx.channel().isOpen()) { logger.debug("Channel closed, skipping encode inside {}.", RpcConstants.CHUNK_CREATION_HANDLER); msg.release(); return; int numChunks = (int) Math.ceil((double) msg.readableBytes() / chunkSize); final CompositeByteBuf cbb = ctx.alloc().compositeBuffer(numChunks); int currentChunkLen = min(msg.readableBytes(), chunkSize); final ByteBuf chunkBuf = msg.slice(msg.readerIndex(), currentChunkLen); chunkBuf.retain(); cbb.addComponent(chunkBuf); cbbWriteIndex += currentChunkLen; msg.skipBytes(currentChunkLen); cbb.writerIndex(cbbWriteIndex);
ByteBuf buffer; try { if (cumulation.refCnt() > 1) { buffer = expandCumulation(alloc, cumulation, in.readableBytes()); buffer.writeBytes(in); } else { CompositeByteBuf composite; composite = (CompositeByteBuf) cumulation; } else { composite = alloc.compositeBuffer(Integer.MAX_VALUE); composite.addComponent(true, cumulation); composite.addComponent(true, in); in = null; buffer = composite;
Object continueResponse = newContinueResponse(m, maxContentLength, ctx.pipeline()); if (continueResponse != null) { handlingOversizedMessage = ignoreContentAfterContinueResponse(continueResponse); final ChannelFuture future = ctx.writeAndFlush(continueResponse).addListener(listener); O aggregated; if (m instanceof ByteBufHolder) { aggregated = beginAggregation(m, ((ByteBufHolder) m).content().retain()); } else { aggregated = beginAggregation(m, EMPTY_BUFFER); CompositeByteBuf content = ctx.alloc().compositeBuffer(maxCumulationBufferComponents); if (m instanceof ByteBufHolder) { appendPartialContent(content, ((ByteBufHolder) m).content()); final C m = (C) msg; if (content.readableBytes() > maxContentLength - m.content().readableBytes()) {
private ByteBuf write(ByteBuf message, boolean compressed) { final int messageLength = message.readableBytes(); if (maxOutboundMessageSize >= 0 && messageLength > maxOutboundMessageSize) { message.release(); throw Status.RESOURCE_EXHAUSTED .withDescription( final ByteBuf buf = alloc.buffer(HEADER_LENGTH + messageLength); buf.writeByte(compressed ? COMPRESSED : UNCOMPRESSED); buf.writeInt(messageLength); buf.writeBytes(message); return new CompositeByteBuf(alloc, true, 2, alloc.buffer(HEADER_LENGTH) .writeByte(compressed ? COMPRESSED : UNCOMPRESSED) .writeInt(messageLength),
boolean readable = msg.content().isReadable(); decoder.writeInbound(msg.content().retain()); if (appendFrameTail(msg)) { decoder.writeInbound(Unpooled.wrappedBuffer(FRAME_TAIL)); CompositeByteBuf compositeUncompressedContent = ctx.alloc().compositeBuffer(); for (;;) { ByteBuf partUncompressedContent = decoder.readInbound(); break; if (!partUncompressedContent.isReadable()) { partUncompressedContent.release(); continue; compositeUncompressedContent.addComponent(true, partUncompressedContent); if (readable && compositeUncompressedContent.numComponents() <= 0) { compositeUncompressedContent.release(); throw new CodecException("cannot read uncompressed buffer");
@Override public void channelRead0(ChannelHandlerContext ctx, ByteBuf in) throws Exception { byte[] actual = new byte[in.readableBytes()]; in.readBytes(actual); int lastIdx = recvCounter.get(); for (int i = 0; i < actual.length; i ++) { assertEquals(data[i + lastIdx], actual[i]); } ByteBuf buf = Unpooled.wrappedBuffer(actual); if (useCompositeByteBuf) { buf = Unpooled.compositeBuffer().addComponent(true, buf); } ctx.write(buf); recvCounter.addAndGet(actual.length); // Perform server-initiated renegotiation if necessary. if (renegotiation.type == RenegotiationType.SERVER_INITIATED && recvCounter.get() > data.length / 2 && renegoFuture == null) { SslHandler sslHandler = ctx.pipeline().get(SslHandler.class); Future<Channel> hf = sslHandler.handshakeFuture(); assertThat(hf.isDone(), is(true)); sslHandler.engine().setEnabledCipherSuites(new String[] { renegotiation.cipherSuite }); logStats("SERVER RENEGOTIATES"); renegoFuture = sslHandler.renegotiate(); assertThat(renegoFuture, is(not(sameInstance(hf)))); assertThat(renegoFuture, is(sameInstance(sslHandler.handshakeFuture()))); assertThat(renegoFuture.isDone(), is(false)); } } }
if (!ctx.channel().isOpen()) { ByteBuf buf = ctx.alloc().buffer(); OutputStream os = new ByteBufOutputStream(buf); CodedOutputStream cos = CodedOutputStream.newInstance(os); final CompositeByteBuf cbb = ctx.alloc().compositeBuffer(msg.dBodies.length + 1); cbb.addComponent(buf); int bufLength = buf.readableBytes(); for (ByteBuf b : msg.dBodies) { cbb.addComponent(b); bufLength += b.readableBytes(); cbb.writerIndex(bufLength); out.add(cbb); } else { logger.debug("Sent message. Ending writer index was {}.", buf.writerIndex());
case 1: ByteBuf buffer = buffers[0]; if (buffer.isReadable()) { return wrappedBuffer(buffer.order(BIG_ENDIAN)); } else { buffer.release(); ByteBuf buf = buffers[i]; if (buf.isReadable()) { return new CompositeByteBuf(ALLOC, false, maxNumComponents, buffers, i); buf.release();
encoder.writeOutbound(msg.content().retain()); CompositeByteBuf fullCompressedContent = ctx.alloc().compositeBuffer(); for (;;) { ByteBuf partCompressedContent = encoder.readOutbound(); break; if (!partCompressedContent.isReadable()) { partCompressedContent.release(); continue; fullCompressedContent.addComponent(true, partCompressedContent); if (fullCompressedContent.numComponents() <= 0) { fullCompressedContent.release(); throw new CodecException("cannot read compressed buffer"); int realLength = fullCompressedContent.readableBytes() - FRAME_TAIL.length; compressedContent = fullCompressedContent.slice(0, realLength); } else { compressedContent = fullCompressedContent;
@Override public void call(CompositeByteBuf collector, ByteBuf buf) { long newLength = collector.readableBytes() + buf.readableBytes(); if (newLength <= maxBytes) { collector.addComponent(true, buf); } else { collector.release(); buf.release(); throw new TooMuchDataException("More than " + maxBytes + "B received"); } } }
@Override protected ByteBuf composeFirst(ByteBufAllocator allocator, ByteBuf first) { if (first instanceof CompositeByteBuf) { CompositeByteBuf composite = (CompositeByteBuf) first; first = allocator.directBuffer(composite.readableBytes()); try { first.writeBytes(composite); } catch (Throwable cause) { first.release(); PlatformDependent.throwException(cause); } composite.release(); } return first; }
@Override public Flux<Frame> receive() { return connection .inbound() .receive() .map( buf -> { CompositeByteBuf composite = connection.channel().alloc().compositeBuffer(); ByteBuf length = wrappedBuffer(new byte[FRAME_LENGTH_SIZE]); FrameHeaderFlyweight.encodeLength(length, 0, buf.readableBytes()); composite.addComponents(true, length, buf.retain()); return Frame.from(composite); }); }
if (end) { if (buff != null && !chunked && !contentLengthSet()) { headers().set(CONTENT_LENGTH, String.valueOf(buff.readableBytes())); written += buff.readableBytes(); pending = (CompositeByteBuf) pendingChunks; } else { pending = Unpooled.compositeBuffer(); pending.addComponent(true, pendingChunks); pendingChunks = pending; pending.addComponent(true, buff);