private Channel pollAndVerifyCachedChannel(Request request, ProxyServer proxy, AsyncHandler<?> asyncHandler) { if (asyncHandler instanceof AsyncHandlerExtensions) AsyncHandlerExtensions.class.cast(asyncHandler).onConnectionPool(); Uri uri = request.getUri(); String virtualHost = request.getVirtualHost(); final Channel channel = channelManager.poll(uri, virtualHost, proxy, request.getConnectionPoolPartitioning()); if (channel != null) { LOGGER.debug("Using cached Channel {}\n for uri {}\n", channel, uri); try { channelManager.verifyChannelPipeline(channel.pipeline(), uri, virtualHost); } catch (Exception ex) { LOGGER.debug(ex.getMessage(), ex); } } return channel; }
public Channel pollAndVerifyCachedChannel(Request request, ProxyServer proxy, AsyncHandler<?> asyncHandler) { if (asyncHandler instanceof AsyncHandlerExtensions) AsyncHandlerExtensions.class.cast(asyncHandler).onConnectionPool(); Uri uri = request.getUri(); String virtualHost = request.getVirtualHost(); final Channel channel = channelManager.poll(uri, virtualHost, proxy, request.getConnectionPoolPartitioning()); if (channel != null) { LOGGER.debug("Using cached Channel {}\n for uri {}\n", channel, uri); try { // Always make sure the channel who got cached support the // proper protocol. It could // only occurs when a HttpMethod.CONNECT is used against a proxy // that requires upgrading from http to // https. channelManager.verifyChannelPipeline(channel.getPipeline(), uri, virtualHost); } catch (Exception ex) { LOGGER.debug(ex.getMessage(), ex); } } return channel; }
private <T> NettyResponseFuture<T> newNettyResponseFuture(Uri uri, Request request, AsyncHandler<T> asyncHandler, NettyRequest nettyRequest, ProxyServer proxyServer) { NettyResponseFuture<T> future = new NettyResponseFuture<>(// uri,// request,// asyncHandler,// nettyRequest,// config.getMaxRequestRetry(),// request.getConnectionPoolPartitioning(),// proxyServer); String expectHeader = request.getHeaders().getFirstValue(HttpHeaders.Names.EXPECT); if (expectHeader != null && expectHeader.equalsIgnoreCase(HttpHeaders.Values.CONTINUE)) future.setDontWriteBodyBecauseExpectContinue(true); return future; }
private <T> NettyResponseFuture<T> newNettyResponseFuture(Request request, AsyncHandler<T> asyncHandler, NettyRequest nettyRequest, ProxyServer proxyServer) { NettyResponseFuture<T> future = new NettyResponseFuture<>(// request,// asyncHandler,// nettyRequest,// config.getMaxRequestRetry(),// request.getConnectionPoolPartitioning(),// proxyServer); String expectHeader = request.getHeaders().getFirstValue(HttpHeaders.Names.EXPECT); if (expectHeader != null && expectHeader.equalsIgnoreCase(HttpHeaders.Values.CONTINUE)) future.setDontWriteBodyBecauseExpectContinue(true); return future; }
private <T> NettyResponseFuture<T> newNettyResponseFuture(Request request, AsyncHandler<T> asyncHandler, NettyRequest nettyRequest, ProxyServer proxyServer) { NettyResponseFuture<T> future = new NettyResponseFuture<>(// request,// asyncHandler,// nettyRequest,// config.getMaxRequestRetry(),// request.getConnectionPoolPartitioning(),// proxyServer); String expectHeader = request.getHeaders().getFirstValue(HttpHeaders.Names.EXPECT); if (expectHeader != null && expectHeader.equalsIgnoreCase(HttpHeaders.Values.CONTINUE)) future.setDontWriteBodyBecauseExpectContinue(true); return future; }
private <T> NettyResponseFuture<T> newNettyResponseFuture(Uri uri, Request request, AsyncHandler<T> asyncHandler, NettyRequest nettyRequest, ProxyServer proxyServer) { NettyResponseFuture<T> future = new NettyResponseFuture<>(// uri,// request,// asyncHandler,// nettyRequest,// config.getMaxRequestRetry(),// request.getConnectionPoolPartitioning(),// proxyServer); String expectHeader = request.getHeaders().getFirstValue(HttpHeaders.Names.EXPECT); if (expectHeader != null && expectHeader.equalsIgnoreCase(HttpHeaders.Values.CONTINUE)) future.setDontWriteBodyBecauseExpectContinue(true); return future; }
/** * We know for sure if we have to force to connect or not, so we can build * the HttpRequest right away This reduces the probability of having a * pooled channel closed by the server by the time we build the request */ private <T> ListenableFuture<T> sendRequestWithCertainForceConnect(// Request request,// AsyncHandler<T> asyncHandler,// NettyResponseFuture<T> future,// boolean reclaimCache,// Uri uri,// ProxyServer proxyServer,// boolean useProxy,// boolean forceConnect) throws IOException { NettyResponseFuture<T> newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, forceConnect); Channel channel = getCachedChannel(future, uri, request.getConnectionPoolPartitioning(), proxyServer, asyncHandler); if (Channels.isChannelValid(channel)) return sendRequestWithCachedChannel(request, uri, proxyServer, newFuture, asyncHandler, channel); else return sendRequestWithNewChannel(request, uri, proxyServer, useProxy, newFuture, asyncHandler, reclaimCache); }
/** * We know for sure if we have to force to connect or not, so we can * build the HttpRequest right away * This reduces the probability of having a pooled channel closed by the * server by the time we build the request */ private <T> ListenableFuture<T> sendRequestWithCertainForceConnect(// Request request,// AsyncHandler<T> asyncHandler,// NettyResponseFuture<T> future,// boolean reclaimCache,// Uri uri,// ProxyServer proxyServer,// boolean useProxy,// boolean forceConnect) throws IOException { NettyResponseFuture<T> newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, forceConnect); Channel channel = getCachedChannel(future, uri, request.getConnectionPoolPartitioning(), proxyServer, asyncHandler); if (Channels.isChannelValid(channel)) return sendRequestWithCachedChannel(request, uri, proxyServer, newFuture, asyncHandler, channel); else return sendRequestWithNewChannel(request, uri, proxyServer, useProxy, newFuture, asyncHandler, reclaimCache); }
/** * Using CONNECT depends on wither we can fetch a valid channel or not * Loop until we get a valid channel from the pool and it's still valid * once the request is built */ @SuppressWarnings("unused") private <T> ListenableFuture<T> sendRequestThroughSslProxy(// Request request,// AsyncHandler<T> asyncHandler,// NettyResponseFuture<T> future,// boolean reclaimCache,// Uri uri,// ProxyServer proxyServer) throws IOException { NettyResponseFuture<T> newFuture = null; for (int i = 0; i < 3; i++) { Channel channel = getCachedChannel(future, uri, request.getConnectionPoolPartitioning(), proxyServer, asyncHandler); if (Channels.isChannelValid(channel)) if (newFuture == null) newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, false); if (Channels.isChannelValid(channel)) // if the channel is still active, we can use it, otherwise try gain return sendRequestWithCachedChannel(request, uri, proxyServer, newFuture, asyncHandler, channel); else // pool is empty break; } newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, true); return sendRequestWithNewChannel(request, uri, proxyServer, true, newFuture, asyncHandler, reclaimCache); }
/** * Using CONNECT depends on wither we can fetch a valid channel or not Loop * until we get a valid channel from the pool and it's still valid once the * request is built */ @SuppressWarnings("unused") private <T> ListenableFuture<T> sendRequestThroughSslProxy(// Request request,// AsyncHandler<T> asyncHandler,// NettyResponseFuture<T> future,// boolean reclaimCache,// Uri uri,// ProxyServer proxyServer) throws IOException { NettyResponseFuture<T> newFuture = null; for (int i = 0; i < 3; i++) { Channel channel = getCachedChannel(future, uri, request.getConnectionPoolPartitioning(), proxyServer, asyncHandler); if (Channels.isChannelValid(channel)) if (newFuture == null) newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, false); if (Channels.isChannelValid(channel)) // if the channel is still active, we can use it, otherwise try gain return sendRequestWithCachedChannel(request, uri, proxyServer, newFuture, asyncHandler, channel); else // pool is empty break; } newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, uri, proxyServer, true); return sendRequestWithNewChannel(request, uri, proxyServer, true, newFuture, asyncHandler, reclaimCache); }
public RequestImpl(Request prototype) { if (prototype != null) { this.method = prototype.getMethod(); this.uri = prototype.getUri(); this.address = prototype.getInetAddress(); this.localAddress = prototype.getLocalAddress(); this.headers = new FluentCaseInsensitiveStringsMap(prototype.getHeaders()); this.cookies = new ArrayList<>(prototype.getCookies()); this.byteData = prototype.getByteData(); this.compositeByteData = prototype.getCompositeByteData(); this.stringData = prototype.getStringData(); this.byteBufferData = prototype.getByteBufferData(); this.streamData = prototype.getStreamData(); this.bodyGenerator = prototype.getBodyGenerator(); this.formParams = prototype.getFormParams() == null ? null : new ArrayList<>(prototype.getFormParams()); this.parts = prototype.getParts() == null ? null : new ArrayList<>(prototype.getParts()); this.virtualHost = prototype.getVirtualHost(); this.length = prototype.getContentLength(); this.proxyServer = prototype.getProxyServer(); this.realm = prototype.getRealm(); this.file = prototype.getFile(); this.followRedirect = prototype.getFollowRedirect(); this.requestTimeout = prototype.getRequestTimeout(); this.rangeOffset = prototype.getRangeOffset(); this.charset = prototype.getBodyCharset(); this.connectionPoolPartitioning = prototype.getConnectionPoolPartitioning(); this.nameResolver = prototype.getNameResolver(); } }
.setConnectionPoolPartitioning(request.getConnectionPoolPartitioning())// .setFollowRedirect(true)//
.setConnectionPoolPartitioning(request.getConnectionPoolPartitioning())// .setFollowRedirect(true)//
.setConnectionPoolPartitioning(request.getConnectionPoolPartitioning())// .setFollowRedirect(true)//
.setConnectionPoolPartitioning(request.getConnectionPoolPartitioning())// .setFollowRedirect(true)//