@Path("/") @GET public void proxy(RakamHttpRequest request, @QueryParam("u") String uri) throws InterruptedException { URI url = UriBuilder.fromUri(uri).build(); int port; if (url.getPort() != -1) { port = url.getPort(); } else if (url.getScheme().equals("http")) { port = 80; } else if (url.getScheme().equals("https")) { port = 443; } else { request.response("invalid scheme").end(); return; } Channel ch = (port == 443 ? sslBootstrap : bootstrap).connect(url.getHost(), port) .sync().channel(); ch.attr(CONNECTION_ATTR).set(request); HttpRequest req = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, url.getRawPath()); req.headers().set(HttpHeaders.Names.HOST, url.getHost()); req.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.CLOSE); req.headers().set(HttpHeaders.Names.ACCEPT_ENCODING, HttpHeaders.Values.GZIP); req.headers().set(HttpHeaders.Names.CACHE_CONTROL, HttpHeaders.Values.NO_CACHE); req.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.CLOSE); req.headers().set(HttpHeaders.Names.PRAGMA, HttpHeaders.Values.NO_CACHE); req.headers().set(HttpHeaders.Names.USER_AGENT, "rakam-ab-test-tool 0.1"); ch.writeAndFlush(req); }
Type actualType = getActualType(service.getClass(), parameter.getParameterizedType()); if (actualType.equals(String.class) || (actualType instanceof Class && PRIMITIVE_MAPPER.containsKey(actualType))) { return new IRequestParameter.QueryParameter(param.value(), param.required(), actualType.equals(String.class) ? Function.identity() : PRIMITIVE_MAPPER.get(actualType)); return new IRequestParameter.QueryParameter(param.value(), param.required(), o -> mapper.convertValue(o, ((Class) actualType)));
@ApiOperation(value = "Perform simple query on event data", authorizations = @Authorization(value = "read_key") ) @JsonRequest @Path("/analyze") public CompletableFuture<QueryResult> analyzeEvents(@Named("project") RequestContext context, @QueryParam("read_key") String readKey, @BodyParam AnalyzeRequest analyzeRequest) { checkArgument(!analyzeRequest.collections.isEmpty(), "collections array is empty"); return eventExplorer.analyze(context, analyzeRequest.collections, analyzeRequest.measure, analyzeRequest.grouping, analyzeRequest.segment, analyzeRequest.filterExpression, analyzeRequest.startDate, analyzeRequest.endDate, Optional.ofNullable(analyzeRequest.timezone).orElse(ZoneOffset.UTC)).getResult(); }
@GET @Consumes("text/event-stream") @IgnoreApi @ApiOperation(value = "Analyze events asynchronously", request = QueryRequest.class, authorizations = @Authorization(value = "read_key") ) @Path("/execute") public void execute(RakamHttpRequest request, @QueryParam("read_key") String apiKey) { handleServerSentQueryExecution(request, QueryRequest.class, (project, query) -> executorService.executeQuery(new RequestContext(project, apiKey), query.query, query.sample, query.defaultSchema, query.timezone, query.limit == null ? DEFAULT_QUERY_RESULT_COUNT : query.limit)); }
@ApiOperation(value = "Perform simple query on event data", request = AnalyzeRequest.class, consumes = "text/event-stream", produces = "text/event-stream", authorizations = @Authorization(value = "read_key") ) @GET @IgnoreApi @Path("/analyze/export") public void exportEvents(RakamHttpRequest request, @QueryParam("read_key") String readKey) { queryService.handleServerSentQueryExecution(request, AnalyzeRequest.class, (project, analyzeRequest) -> { checkArgument(!analyzeRequest.collections.isEmpty(), "collections array is empty"); return eventExplorer.export(new RequestContext(project, readKey), analyzeRequest.collections, analyzeRequest.measure, analyzeRequest.grouping, analyzeRequest.segment, analyzeRequest.filterExpression, analyzeRequest.startDate, analyzeRequest.endDate, Optional.ofNullable(analyzeRequest.timezone).orElse(ZoneOffset.UTC)); }); }
@ApiOperation(value = "Perform simple query on event data", request = AnalyzeRequest.class, consumes = "text/event-stream", produces = "text/event-stream", authorizations = @Authorization(value = "read_key") ) @GET @IgnoreApi @Path("/analyze") public void analyzeEvents(RakamHttpRequest request, @QueryParam("read_key") String readKey) { queryService.handleServerSentQueryExecution(request, AnalyzeRequest.class, (project, analyzeRequest) -> { checkArgument(!analyzeRequest.collections.isEmpty(), "collections array is empty"); ZoneId timezone = Optional.ofNullable(analyzeRequest.timezone).orElse(ZoneOffset.UTC); return eventExplorer.analyze(new RequestContext(project, readKey), analyzeRequest.collections, analyzeRequest.measure, analyzeRequest.grouping, analyzeRequest.segment, analyzeRequest.filterExpression, analyzeRequest.startDate, analyzeRequest.endDate, timezone); }); }
@ApiOperation(value = "Execute query", authorizations = @Authorization(value = "read_key"), consumes = "text/event-stream", produces = "text/event-stream" ) @GET @IgnoreApi @Path("/analyze") public void analyzeRetention(RakamHttpRequest request, @QueryParam("read_key") String apiKey) { queryService.handleServerSentQueryExecution(request, RetentionQuery.class, (project, query) -> retentionQueryExecutor.query(new RequestContext(project, apiKey), Optional.ofNullable(query.firstAction), Optional.ofNullable(query.returningAction), query.dateUnit, Optional.ofNullable(query.dimension), Optional.ofNullable(query.period), query.startDate, query.endDate, query.timezone, query.approximate), (query, result) -> LOGGER.error(new RuntimeException(JsonHelper.encode(query) + " : " + result.getError().toString()), "Error running retention query")); }
@GET @Consumes("text/event-stream") @Path("/update") @ApiOperation(value = "Update view", authorizations = @Authorization(value = "master_key"), notes = "Invalidate previous cached data, executes the materialized view query and caches it.\n" + "This feature is similar to UPDATE MATERIALIZED VIEWS in RDBMSs.") @IgnoreApi public void update(RakamHttpRequest request, @QueryParam("master_key") String apiKey) { queryService.handleServerSentQueryExecution(request, MaterializedViewRequest.class, (project, query) -> { RequestContext context = new RequestContext(project, apiKey); MaterializedViewExecution execution = service.lockAndUpdateView(context, service.get(project, query.name)); if (execution.materializedViewUpdateQuery == null) { QueryResult result = QueryResult.errorResult(new QueryError("There is another process that updates materialized view", null, null, null, null)); return QueryExecution.completedQueryExecution(null, result); } return execution.materializedViewUpdateQuery.get(); }); }
@ApiOperation(value = "Execute query", request = FunnelQuery.class, consumes = "text/event-stream", produces = "text/event-stream", authorizations = @Authorization(value = "read_key") ) @GET @IgnoreApi @Path("/analyze") public void analyzeFunnel(RakamHttpRequest request, @QueryParam("read_key") String apiKey) { queryService.handleServerSentQueryExecution(request, FunnelQuery.class, (project, query) -> { if (query.dimension != null && query.segment == null) { if (projectConfig.getTimeColumn().equals(query.dimension)) { throw new RakamException("You should use segments in order to group by with time column", BAD_REQUEST); } } return funnelQueryExecutor.query(new RequestContext(project, apiKey), query.steps, Optional.ofNullable(query.dimension), Optional.ofNullable(query.segment), query.startDate, query.endDate, Optional.ofNullable(query.window), query.timezone, Optional.ofNullable(query.connectors), getFunnelType(query)); }, (query, result) -> LOGGER.error(new RuntimeException(JsonHelper.encode(query) + " : " + result.getError().toString()), "Error running funnel query")); }