Refine search
@POST @Path("/candidates") @Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE}) @Consumes({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE}) @ResourceFilters(StateResourceFilter.class) public Response getQueryTargets( InputStream in, @QueryParam("pretty") String pretty, @QueryParam("numCandidates") @DefaultValue("-1") int numCandidates, @Context final HttpServletRequest req ) throws IOException { final ResponseContext context = createContext(req.getContentType(), pretty != null); try { Query<?> query = context.getObjectMapper().readValue(in, Query.class); return context.ok( ServerViewUtil.getTargetLocations( brokerServerView, query.getDataSource(), query.getIntervals(), numCandidates ) ); } catch (Exception e) { return context.gotError(e); } } }
try { final Query query = requestLogLine.getQuery(); MDC.put("queryId", query.getId()); MDC.put("dataSource", query.getDataSource().toString()); MDC.put("queryType", query.getType()); MDC.put("hasFilters", Boolean.toString(query.hasFilters())); MDC.put("remoteAddr", requestLogLine.getRemoteAddr()); MDC.put("duration", query.getDuration().toString()); MDC.put("descending", Boolean.toString(query.isDescending())); if (setContextMDC) { final Iterable<Map.Entry<String, Object>> entries = query.getContext() == null ? ImmutableList.<Map.Entry<String, Object>>of() : query.getContext().entrySet(); for (Map.Entry<String, Object> entry : entries) { MDC.put(entry.getKey(), entry.getValue() == null ? "NULL" : entry.getValue().toString());
private static Query<?> readQuery( final HttpServletRequest req, final InputStream in, final ResponseContext context ) throws IOException { Query baseQuery = context.getObjectMapper().readValue(in, Query.class); String prevEtag = getPreviousEtag(req); if (prevEtag != null) { baseQuery = baseQuery.withOverriddenContext( ImmutableMap.of(HEADER_IF_NONE_MATCH, prevEtag) ); } return baseQuery; }
public static <T> Query<T> withMaxScatterGatherBytes(Query<T> query, long maxScatterGatherBytesLimit) { Object obj = query.getContextValue(MAX_SCATTER_GATHER_BYTES_KEY); if (obj == null) { return query.withOverriddenContext(ImmutableMap.of(MAX_SCATTER_GATHER_BYTES_KEY, maxScatterGatherBytesLimit)); } else { long curr = ((Number) obj).longValue(); if (curr > maxScatterGatherBytesLimit) { throw new IAE( "configured [%s = %s] is more than enforced limit of [%s].", MAX_SCATTER_GATHER_BYTES_KEY, curr, maxScatterGatherBytesLimit ); } else { return query; } } }
/** * Initializes this object to execute a specific query. Does not actually execute the query. * * @param baseQuery the query */ @SuppressWarnings("unchecked") public void initialize(final Query baseQuery) { transition(State.NEW, State.INITIALIZED); String queryId = baseQuery.getId(); if (Strings.isNullOrEmpty(queryId)) { queryId = UUID.randomUUID().toString(); } this.queryPlus = QueryPlus.wrap(baseQuery.withId(queryId)); this.toolChest = warehouse.getToolChest(baseQuery); }
@Override public Sequence<T> run(final Query<T> input) final Query<T> query = input.withQuerySegmentSpec(specificSpec); final String newName = String.format("%s_%s_%s", query.getType(), query.getDataSource(), query.getIntervals());
@Override public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) { List<T> results = Sequences.toList(baseSequence, Lists.<T>newArrayList()); return accumulator.accumulate( initValue, (T) new Result<BySegmentResultValueClass<T>>( timestamp, new BySegmentResultValueClass<T>( results, segmentIdentifier, query.getIntervals().get(0) ) ) ); }
final List<Pair<DateTime, byte[]>> cachedResults = Lists.newArrayList(); final Map<String, CachePopulator> cachePopulatorMap = Maps.newHashMap(); final boolean useCache = query.getContextUseCache(true) && strategy != null && cacheConfig.isUseCache(); final boolean populateCache = query.getContextPopulateCache(true) && strategy != null && cacheConfig.isPopulateCache(); final boolean isBySegment = query.getContextBySegment(false); final int priority = query.getContextPriority(0); contextBuilder.put("priority", priority); final Query<T> rewrittenQuery = query.withOverriddenContext(contextBuilder.build()); VersionedIntervalTimeline<String, ServerSelector> timeline = serverView.getTimeline(query.getDataSource()); if (timeline == null) { return Sequences.empty(); List<TimelineObjectHolder<String, ServerSelector>> serversLookup = Lists.newLinkedList(); for (Interval interval : rewrittenQuery.getIntervals()) { serversLookup.addAll(timeline.lookup(interval)); descriptors = Lists.newArrayList(); serverSegments.put(server, descriptors);
Map<String, Object> requestMap = objectMapper.readValue( request.getInputStream(), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT String connectionId = getAvaticaConnectionId(requestMap); targetServer = hostFinder.findServerAvatica(connectionId); byte[] requestBytes = objectMapper.writeValueAsBytes(requestMap); request.setAttribute(AVATICA_QUERY_ATTRIBUTE, requestBytes); } else if (isQueryEndpoint && HttpMethod.DELETE.is(method)) { Query inputQuery = objectMapper.readValue(request.getInputStream(), Query.class); if (inputQuery != null) { targetServer = hostFinder.pickServer(inputQuery); if (inputQuery.getId() == null) { inputQuery = inputQuery.withId(UUID.randomUUID().toString()); request.getRemoteAddr(), null, new QueryStats(ImmutableMap.<String, Object>of("success", false, "exception", errorMessage)) objectMapper.writeValue( response.getOutputStream(), ImmutableMap.of("error", errorMessage) );
? objectMapper.writer() : objectMapper.writerWithDefaultPrettyPrinter(); OutputStream out = null; query = objectMapper.readValue(requestQuery, Query.class); queryId = query.getId(); if (queryId == null) { queryId = UUID.randomUUID().toString(); query = query.withId(queryId); Sequence<?> results = query.run(texasRanger); .setUser2(query.getDataSource().toString()) .setUser4(query.getType()) .setUser5(COMMA_JOIN.join(query.getIntervals())) .setUser6(String.valueOf(query.hasFilters())) .setUser7(req.getRemoteAddr()) .setUser8(queryId) .setUser9(query.getDuration().toPeriod().toStandardMinutes().toString()) .build("request/time", requestTime) ); req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("success", false, "exception", e.toString()))
query = objectMapper.readValue(req.getInputStream(), Query.class); queryId = query.getId(); if (queryId == null) { queryId = UUID.randomUUID().toString(); query = query.withId(queryId); req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("success", false, "exception", e.toString()))
public static <T> Query<T> withTimeout(Query<T> query, long timeout) { return query.withOverriddenContext(ImmutableMap.of(TIMEOUT_KEY, timeout)); }
checkTotalBytesLimit(response.getContent().readableBytes()); log.debug("Initial response from url[%s] for queryId[%s]", url, query.getId()); responseStartTimeNs = System.nanoTime(); acquireResponseMetrics().reportNodeTimeToFirstByte(responseStartTimeNs - requestStartTimeNs).emit(emitter); objectMapper.<Map<String, Object>>readValue( responseContext, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
@Override public Sequence<T> run(final Query<T> query) final boolean isBySegment = query.getContextBySegment(false); final boolean shouldFinalize = query.getContextFinalize(true); queryToRun = query.withOverriddenContext(ImmutableMap.<String, Object>of("finalize", false)); metricManipulationFn = MetricManipulatorFns.finalizing();
.setUser2(theQuery.getDataSource().getName()) .setUser4(theQuery.getType()) .setUser5(COMMA_JOIN.join(theQuery.getIntervals())) .setUser6(String.valueOf(theQuery.hasFilters())) .setUser7(req.getRemoteAddr()) .setUser8(theQueryId) .setUser9(theQuery.getDuration().toPeriod().toStandardMinutes().toString()) .build("request/time", requestTime) ); req.getRemoteAddr(), theQuery, new QueryStats(ImmutableMap.<String, Object>of("request/time", requestTime, "success", true))
if (((QueryDataSource) dataSource).getQuery().getContext() != null) { subqueryContext.putAll(((QueryDataSource) dataSource).getQuery().getContext()); subquery = (GroupByQuery) ((QueryDataSource) dataSource).getQuery().withOverriddenContext(subqueryContext); groupByStrategy, subquery.withOverriddenContext( ImmutableMap.<String, Object>of(
queryLifecycle.initialize(readQuery(req, in, context)); query = queryLifecycle.getQuery(); final String queryId = query.getId(); .setName(StringUtils.format("%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource().getNames(), queryId)); if (log.isDebugEnabled()) { log.debug("Got query [%s]", query); String responseCtxString = jsonMapper.writeValueAsString(responseContext); if (responseCtxString.length() > RESPONSE_CTX_HEADER_LEN_LIMIT) { log.warn("Response Context truncated for id [%s] . Full context is [%s].", queryId, responseCtxString);
private void testQueries(String url, List<QueryWithResults> queries, int timesToRun) throws Exception { for (int i = 0; i < timesToRun; i++) { LOG.info("Starting Iteration %d", i); boolean failed = false; for (QueryWithResults queryWithResult : queries) { LOG.info("Running Query %s", queryWithResult.getQuery().getType()); List<Map<String, Object>> result = queryClient.query(url, queryWithResult.getQuery()); if (!QueryResultVerifier.compareResults(result, queryWithResult.getExpectedResults())) { LOG.error( "Failed while executing query %s \n expectedResults: %s \n actualResults : %s", queryWithResult.getQuery(), jsonMapper.writeValueAsString(queryWithResult.getExpectedResults()), jsonMapper.writeValueAsString(result) ); failed = true; } else { LOG.info("Results Verified for Query %s", queryWithResult.getQuery().getType()); } } if (failed) { throw new ISE("one or more queries failed"); } } }
@Override public Sequence<T> run(final QueryPlus<T> queryPlus, final Map<String, Object> responseContext) { Query<T> query = queryPlus.getQuery(); DataSource dataSource = query.getDataSource(); if (dataSource instanceof UnionDataSource) { return new MergeSequence<>( query.getResultOrdering(), Sequences.simple( Lists.transform( ((UnionDataSource) dataSource).getDataSources(), new Function<DataSource, Sequence<T>>() { @Override public Sequence<T> apply(DataSource singleSource) { return baseRunner.run( queryPlus.withQuery(query.withDataSource(singleSource)), responseContext ); } } ) ) ); } else { return baseRunner.run(queryPlus, responseContext); } }
final TypeFactory typeFactory = objectMapper.getTypeFactory(); JavaType baseType = typeFactory.constructType(toolChest.getResultTypeReference()); JavaType bySegmentType = typeFactory.constructParametricType( final String cancelUrl = StringUtils.format("%s://%s/druid/v2/%s", scheme, host, query.getId()); log.debug("Querying queryId[%s] url[%s]", query.getId(), url); long timeoutAt = query.getContextValue(QUERY_FAIL_TIME); long maxScatterGatherBytes = QueryContexts.getMaxScatterGatherBytes(query); AtomicLong totalBytesGathered = (AtomicLong) context.get(QUERY_TOTAL_BYTES_GATHERED); throw new RE("Query[%s] url[%s] timed out.", query.getId(), url); HttpMethod.POST, new URL(url) ).setContent(objectMapper.writeValueAsBytes(QueryContexts.withTimeout(query, timeLeft))) .setHeader( HttpHeaders.Names.CONTENT_TYPE,