Params putParam(String name, String value) { if (Strings.hasLength(value)) { request.addParameter(name, value); } return this; }
/** * Add all parameters from a map to a {@link Request}. This only exists * to support methods that exist for backwards compatibility. */ @Deprecated private static void addParameters(Request request, Map<String, String> parameters) { Objects.requireNonNull(parameters, "parameters cannot be null"); for (Map.Entry<String, String> entry : parameters.entrySet()) { request.addParameter(entry.getKey(), entry.getValue()); } } }
Params putParam(String name, String value) { if (Strings.hasLength(value)) { request.addParameter(name, value); } return this; }
/** * Creates a new instance of the Elasticsearch sniffer. It will use the provided {@link RestClient} to fetch the hosts * through the nodes info api, the provided sniff request timeout value and scheme. * @param restClient client used to fetch the hosts from elasticsearch through nodes info api. Usually the same instance * that is also provided to {@link Sniffer#builder(RestClient)}, so that the hosts are set to the same * client that was used to sniff them. * @param sniffRequestTimeoutMillis the sniff request timeout (in milliseconds) to be passed in as a query string parameter * to elasticsearch. Allows to halt the request without any failure, as only the nodes * that have responded within this timeout will be returned. * @param scheme the scheme to associate sniffed nodes with (as it is not returned by elasticsearch) */ public ElasticsearchNodesSniffer(RestClient restClient, long sniffRequestTimeoutMillis, Scheme scheme) { this.restClient = Objects.requireNonNull(restClient, "restClient cannot be null"); if (sniffRequestTimeoutMillis < 0) { throw new IllegalArgumentException("sniffRequestTimeoutMillis must be greater than 0"); } this.request = new Request("GET", "/_nodes/http"); request.addParameter("timeout", sniffRequestTimeoutMillis + "ms"); this.scheme = Objects.requireNonNull(scheme, "scheme cannot be null"); }
/** * Creates a new instance of the Elasticsearch sniffer. It will use the provided {@link RestClient} to fetch the hosts * through the nodes info api, the provided sniff request timeout value and scheme. * @param restClient client used to fetch the hosts from elasticsearch through nodes info api. Usually the same instance * that is also provided to {@link Sniffer#builder(RestClient)}, so that the hosts are set to the same * client that was used to sniff them. * @param sniffRequestTimeoutMillis the sniff request timeout (in milliseconds) to be passed in as a query string parameter * to elasticsearch. Allows to halt the request without any failure, as only the nodes * that have responded within this timeout will be returned. * @param scheme the scheme to associate sniffed nodes with (as it is not returned by elasticsearch) */ public ElasticsearchNodesSniffer(RestClient restClient, long sniffRequestTimeoutMillis, Scheme scheme) { this.restClient = Objects.requireNonNull(restClient, "restClient cannot be null"); if (sniffRequestTimeoutMillis < 0) { throw new IllegalArgumentException("sniffRequestTimeoutMillis must be greater than 0"); } this.request = new Request("GET", "/_nodes/http"); request.addParameter("timeout", sniffRequestTimeoutMillis + "ms"); this.scheme = Objects.requireNonNull(scheme, "scheme cannot be null"); }
static Request info(XPackInfoRequest infoRequest) { Request request = new Request(HttpGet.METHOD_NAME, "/_xpack"); if (false == infoRequest.isVerbose()) { request.addParameter("human", "false"); } if (false == infoRequest.getCategories().equals(EnumSet.allOf(XPackInfoRequest.Category.class))) { request.addParameter("categories", infoRequest.getCategories().stream() .map(c -> c.toString().toLowerCase(Locale.ROOT)) .collect(Collectors.joining(","))); } return request; }
static Request info(XPackInfoRequest infoRequest) { Request request = new Request(HttpGet.METHOD_NAME, "/_xpack"); if (false == infoRequest.isVerbose()) { request.addParameter("human", "false"); } if (false == infoRequest.getCategories().equals(EnumSet.allOf(XPackInfoRequest.Category.class))) { request.addParameter("categories", infoRequest.getCategories().stream() .map(c -> c.toString().toLowerCase(Locale.ROOT)) .collect(Collectors.joining(","))); } return request; }
private static Request toRequest(ElasticsearchRequest elasticsearchRequest, HttpEntity entity) { Request request = new Request( elasticsearchRequest.getMethod(), elasticsearchRequest.getPath() ); for ( Entry<String, String> parameter : elasticsearchRequest.getParameters().entrySet() ) { request.addParameter( parameter.getKey(), parameter.getValue() ); } request.setEntity( entity ); return request; }
private static Request toRequest(ElasticsearchRequest elasticsearchRequest, HttpEntity entity) { Request request = new Request( elasticsearchRequest.getMethod(), elasticsearchRequest.getPath() ); for ( Entry<String, String> parameter : elasticsearchRequest.getParameters().entrySet() ) { request.addParameter( parameter.getKey(), parameter.getValue() ); } request.setEntity( entity ); return request; }
request.addParameter(name, value);
static Request scroll(String scroll, TimeValue keepAlive, Version remoteVersion) { Request request = new Request("POST", "/_search/scroll"); if (remoteVersion.before(Version.V_5_0_0)) { /* Versions of Elasticsearch before 5.0 couldn't parse nanos or micros * so we toss out that resolution, rounding up so we shouldn't end up * with 0s. */ keepAlive = timeValueMillis((long) Math.ceil(keepAlive.millisFrac())); } request.addParameter("scroll", keepAlive.getStringRep()); if (remoteVersion.before(Version.fromId(2000099))) { // Versions before 2.0.0 extract the plain scroll_id from the body request.setEntity(new NStringEntity(scroll, ContentType.TEXT_PLAIN)); return request; } try (XContentBuilder entity = JsonXContent.contentBuilder()) { entity.startObject() .field("scroll_id", scroll) .endObject(); request.setJsonEntity(Strings.toString(entity)); } catch (IOException e) { throw new ElasticsearchException("failed to build scroll entity", e); } return request; }
request.addParameter("scroll", keepAlive.getStringRep()); request.addParameter("size", Integer.toString(searchRequest.source().size())); if (searchRequest.source().version() == null || searchRequest.source().version() == true) { request.addParameter("version", null); request.addParameter("search_type", "scan"); } else { StringBuilder sorts = new StringBuilder(sortToUri(searchRequest.source().sorts().get(0))); sorts.append(',').append(sortToUri(searchRequest.source().sorts().get(i))); request.addParameter("sort", sorts.toString()); request.addParameter(storedFieldsParamName, fields.toString());