private List<IdleChannel> expiredChannels(ConcurrentLinkedQueue<IdleChannel> partition, long now) { // lazy create List<IdleChannel> idleTimeoutChannels = null; for (IdleChannel idleChannel : partition) { if (isTTLExpired(idleChannel.channel, now) || isIdleTimeoutExpired(idleChannel, now) || !Channels.isChannelValid(idleChannel.channel)) { LOGGER.debug("Adding Candidate expired Channel {}", idleChannel.channel); if (idleTimeoutChannels == null) idleTimeoutChannels = new ArrayList<>(); idleTimeoutChannels.add(idleChannel); } } return idleTimeoutChannels != null ? idleTimeoutChannels : Collections.<IdleChannel> emptyList(); }
public Channel poll(Object partitionKey) { IdleChannel idleChannel = null; ConcurrentLinkedQueue<IdleChannel> partition = partitions.get(partitionKey); if (partition != null) { while (idleChannel == null) { idleChannel = partition.poll(); if (idleChannel == null) { // pool is empty break; } else if (!Channels.isChannelValid(idleChannel.channel)) { idleChannel = null; LOGGER.trace("Channel not connected or not opened, probably remotely closed!"); } else if (!idleChannel.takeOwnership()) { idleChannel = null; LOGGER.trace("Couldn't take ownership of channel, probably in the process of being expired!"); } } } return idleChannel != null ? idleChannel.channel : null; }
/** * Return true if the {@link Future} can be recovered. There is some scenario where a connection can be closed by an * unexpected IOException, and in some situation we can recover from that exception. * * @return true if that {@link Future} cannot be recovered. */ public boolean canBeReplayed() { return !isDone() && canRetry() && !(Channels.isChannelValid(channel) && !uri.getScheme().equalsIgnoreCase("https")) && !isInAuth(); }
private Channel getCachedChannel(NettyResponseFuture<?> future, Uri uri, ConnectionPoolPartitioning partitioning, ProxyServer proxyServer, AsyncHandler<?> asyncHandler) { if (future != null && future.reuseChannel() && Channels.isChannelValid(future.channel())) return future.channel(); else return pollAndVerifyCachedChannel(uri, proxyServer, partitioning, asyncHandler); }
/** * Using CONNECT depends on wither we can fetch a valid channel or not * Loop until we get a valid channel from the pool and it's still valid * once the request is built */ @SuppressWarnings("unused") private <T> ListenableFuture<T> sendRequestThroughSslProxy(// Request request,// AsyncHandler<T> asyncHandler,// NettyResponseFuture<T> future,// boolean reclaimCache,// Uri uri,// ProxyServer proxyServer) throws IOException { NettyResponseFuture<T> newFuture = null; for (int i = 0; i < 3; i++) { Channel channel = getCachedChannel(future, uri, request.getConnectionPoolPartitioning(), proxyServer, asyncHandler); if (Channels.isChannelValid(channel)) if (newFuture == null) newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, false); if (Channels.isChannelValid(channel)) // if the channel is still active, we can use it, otherwise try gain return sendRequestWithCachedChannel(request, uri, proxyServer, newFuture, asyncHandler, channel); else // pool is empty break; } newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, true); return sendRequestWithNewChannel(request, uri, proxyServer, true, newFuture, asyncHandler, reclaimCache); }
future.getNettyRequest().getHttpRequest().getUri()); if (Channels.isChannelValid(channel)) { Channels.setAttribute(channel, future);
/** * We know for sure if we have to force to connect or not, so we can * build the HttpRequest right away * This reduces the probability of having a pooled channel closed by the * server by the time we build the request */ private <T> ListenableFuture<T> sendRequestWithCertainForceConnect(// Request request,// AsyncHandler<T> asyncHandler,// NettyResponseFuture<T> future,// boolean reclaimCache,// Uri uri,// ProxyServer proxyServer,// boolean useProxy,// boolean forceConnect) throws IOException { NettyResponseFuture<T> newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, forceConnect); Channel channel = getCachedChannel(future, uri, request.getConnectionPoolPartitioning(), proxyServer, asyncHandler); if (Channels.isChannelValid(channel)) return sendRequestWithCachedChannel(request, uri, proxyServer, newFuture, asyncHandler, channel); else return sendRequestWithNewChannel(request, uri, proxyServer, useProxy, newFuture, asyncHandler, reclaimCache); }
public <T> void writeRequest(NettyResponseFuture<T> future, Channel channel) { NettyRequest nettyRequest = future.getNettyRequest(); HttpRequest httpRequest = nettyRequest.getHttpRequest(); AsyncHandler<T> handler = future.getAsyncHandler(); // if the channel is dead because it was pooled and the remote // server decided to close it, // we just let it go and the channelInactive do its work if (!Channels.isChannelValid(channel)) return; try { if (handler instanceof TransferCompletionHandler) configureTransferAdapter(handler, httpRequest); if (!future.isHeadersAlreadyWrittenOnContinue()) { if (future.getAsyncHandler() instanceof AsyncHandlerExtensions) AsyncHandlerExtensions.class.cast(future.getAsyncHandler()).onSendRequest(nettyRequest); channel.write(httpRequest).addListener(new ProgressListener(config, future.getAsyncHandler(), future, true)); } if (nettyRequest.getBody() != null && !future.isDontWriteBodyBecauseExpectContinue() && httpRequest.getMethod() != HttpMethod.CONNECT) nettyRequest.getBody().write(channel, future, config); // don't bother scheduling timeouts if channel became invalid if (Channels.isChannelValid(channel)) scheduleTimeouts(future); } catch (Throwable t) { LOGGER.error("Can't write request", t); Channels.silentlyCloseChannel(channel); } }
public Channel poll(String poolKey) { if (!sslConnectionPoolEnabled && poolKey.startsWith("https")) return null; IdleChannel idleChannel = null; ConcurrentLinkedQueue<IdleChannel> pooledConnectionForKey = poolsPerKey.get(poolKey); if (pooledConnectionForKey != null) { while (idleChannel == null) { idleChannel = pooledConnectionForKey.poll(); if (idleChannel == null) // pool is empty break; else if (!Channels.isChannelValid(idleChannel.channel)) { idleChannel = null; LOGGER.trace("Channel not connected or not opened, probably remotely closed!"); } } } return idleChannel != null ? idleChannel.channel : null; }
private List<IdleChannel> expiredChannels(ConcurrentLinkedQueue<IdleChannel> pool, long now) { // lazy create List<IdleChannel> idleTimeoutChannels = null; for (IdleChannel idleChannel : pool) { if (isTTLExpired(idleChannel.channel, now) || isIdleTimeoutExpired(idleChannel, now) || !Channels.isChannelValid(idleChannel.channel)) { LOGGER.debug("Adding Candidate expired Channel {}", idleChannel.channel); if (idleTimeoutChannels == null) idleTimeoutChannels = new ArrayList<IdleChannel>(); idleTimeoutChannels.add(idleChannel); } } return idleTimeoutChannels != null ? idleTimeoutChannels : Collections.<IdleChannel> emptyList(); }
/** * Return true if the {@link Future} can be recovered. There is some scenario where a connection can be closed by an * unexpected IOException, and in some situation we can recover from that exception. * * @return true if that {@link Future} cannot be recovered. */ public boolean canBeReplayed() { return !isDone() && canRetry() && !(Channels.isChannelValid(channel) && !uri.getScheme().equalsIgnoreCase("https")) && !isInAuth(); }
private Channel getCachedChannel(NettyResponseFuture<?> future, UriComponents uri, ConnectionPoolKeyStrategy poolKeyGen, ProxyServer proxyServer) { if (future != null && future.reuseChannel() && Channels.isChannelValid(future.channel())) return future.channel(); else return pollAndVerifyCachedChannel(uri, proxyServer, poolKeyGen); }
/** * Using CONNECT depends on wither we can fetch a valid channel or not * Loop until we get a valid channel from the pool and it's still valid * once the request is built */ @SuppressWarnings("unused") private <T> ListenableFuture<T> sendRequestThroughSslProxy(// Request request,// AsyncHandler<T> asyncHandler,// NettyResponseFuture<T> future,// boolean reclaimCache,// UriComponents uri,// ProxyServer proxyServer) throws IOException { NettyResponseFuture<T> newFuture = null; for (int i = 0; i < 3; i++) { Channel channel = getCachedChannel(future, uri, request.getConnectionPoolKeyStrategy(), proxyServer); if (Channels.isChannelValid(channel)) if (newFuture == null) newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, false); if (Channels.isChannelValid(channel)) // if the channel is still active, we can use it, otherwise try // gain return sendRequestWithCachedChannel(request, uri, proxyServer, newFuture, asyncHandler, channel); else // pool is empty break; } newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, true); return sendRequestWithNewChannel(request, uri, proxyServer, true, newFuture, asyncHandler, reclaimCache); }
/** * We know for sure if we have to force to connect or not, so we can * build the HttpRequest right away * This reduces the probability of having a pooled channel closed by the * server by the time we build the request */ private <T> ListenableFuture<T> sendRequestWithCertainForceConnect(// Request request,// AsyncHandler<T> asyncHandler,// NettyResponseFuture<T> future,// boolean reclaimCache,// UriComponents uri,// ProxyServer proxyServer,// boolean useProxy,// boolean forceConnect) throws IOException { NettyResponseFuture<T> newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, forceConnect); Channel channel = getCachedChannel(future, uri, request.getConnectionPoolKeyStrategy(), proxyServer); if (Channels.isChannelValid(channel)) return sendRequestWithCachedChannel(request, uri, proxyServer, newFuture, asyncHandler, channel); else return sendRequestWithNewChannel(request, uri, proxyServer, useProxy, newFuture, asyncHandler, reclaimCache); }
if (!Channels.isChannelValid(channel)) return;