public static String checkCollection(String collection) { return checkCollection(collection, '"'); }
@JsonCreator public FunnelStep(@JsonProperty("collection") String collection, @JsonProperty("filterExpression") Optional<String> filterExpression) { checkCollection(collection); this.collection = collection; this.filterExpression = filterExpression == null ? Optional.empty() : filterExpression; }
@JsonCreator public static RetentionAction create(@JsonProperty("collection") String collection, @JsonProperty("filter") Optional<String> filterExpression) { checkCollection(collection); return new AutoValue_RetentionQueryExecutor_RetentionAction(collection, filterExpression.map(RetentionAction::parseExpression)); }
private String getTableReference(String project, String tableName, Optional<QuerySampling> sample) { String hotStorageConnector = prestoConfig.getHotStorageConnector(); String table = checkCollection(project) + "." + checkCollection(tableName) + sample.map(e -> " TABLESAMPLE " + e.method.name() + "(" + e.percentage + ")").orElse(""); if (hotStorageConnector != null) { return "((select * from " + prestoConfig.getColdStorageConnector() + "." + table + " union all " + "select * from " + hotStorageConnector + "." + table + ")" + " as " + tableName + ")"; } else { return prestoConfig.getColdStorageConnector() + "." + table; } } }
private String getQuery(String project, String collection, Schema schema) { StringBuilder query = new StringBuilder("INSERT INTO ") .append(checkProject(project, '"')) .append(".") .append(ValidationUtil.checkCollection(collection)); StringBuilder params = new StringBuilder(); List<Schema.Field> fields = schema.getFields(); Schema.Field firstField = fields.get(0); if (!sourceFields.contains(firstField.name())) { query.append(" (").append(checkTableColumn(firstField.name())); params.append('?'); } for (int i = 1; i < fields.size(); i++) { Schema.Field field = fields.get(i); if (!sourceFields.contains(field.name())) { query.append(", ").append(checkTableColumn(field.name())); params.append(", ?"); } } return query.append(") VALUES (").append(params.toString()).append(")").toString(); } }
@Subscribe public void onCreateProject(ProjectCreatedEvent event) { executor.executeRawStatement(format("CREATE TABLE %s(id VARCHAR, %s VARCHAR, " + "created_at TIMESTAMP, merged_at TIMESTAMP)", executor.formatTableReference(event.project, QualifiedName.of(ANONYMOUS_ID_MAPPING), Optional.empty(), ImmutableMap.of()), checkCollection(projectConfig.getUserColumn()))); } }
if (query.collection == null) { table = String.format("SELECT cast(%s as date) as date, %s %s FROM _all", checkCollection(projectConfig.getTimeColumn()), Optional.ofNullable(query.dimension).map(v -> v + ",").orElse(""), checkTableColumn(projectConfig.getUserColumn())); dateColumn = "date"; } else { table = "\"" + query.collection + "\""; dateColumn = String.format("cast(%s as date)", checkCollection(projectConfig.getTimeColumn()));
public String convertFunnel(String project, String connectorField, int idx, FunnelStep funnelStep, Optional<String> dimension, Optional<String> segment, LocalDate startDate, LocalDate endDate) { String table = checkProject(project, '"') + "." + ValidationUtil.checkCollection(funnelStep.getCollection()); Optional<String> filterExp = funnelStep.getExpression().map(value -> RakamSqlFormatter.formatExpression(value, name -> name.getParts().stream().map(e -> formatIdentifier(e, '"')).collect(Collectors.joining(".")), name -> formatIdentifier("step" + idx, '"') + "." + name, '"')); String format = format("SELECT %s %s %s, %d as step, %s from %s %s %s", segment.isPresent() ? "" : dimension.map(ValidationUtil::checkTableColumn).map(v -> v + ",").orElse(""), segment.isPresent() ? format(timeStampMapping.get(FunnelTimestampSegments.valueOf(segment.get().replace(" ", "_").toUpperCase())), dimension.get()) + " as " + checkTableColumn(dimension.get() + "_segment") + "," : "", format(connectorField, "step" + idx), idx + 1, checkTableColumn(projectConfig.getTimeColumn()), table, "step" + idx, filterExp.map(v -> "where " + v).orElse("")); return format; } }
public CompletableFuture<QueryResult> createTable(String project) { return executor.executeRawStatement(format("CREATE TABLE %s(id VARCHAR, %s VARCHAR, " + "created_at TIMESTAMP, merged_at TIMESTAMP)", executor.formatTableReference(project, QualifiedName.of(ANONYMOUS_ID_MAPPING), Optional.empty(), ImmutableMap.of()), checkCollection(projectConfig.getUserColumn()))).getResult(); } }
@Override public void dropProjectIfExists(String project) { ProjectCollection userTable = getUserTable(project, false); String table = checkProject(userTable.project, '"') + "." + checkCollection(userTable.collection); QueryResult result = queryExecutor.executeRawStatement(new RequestContext(project, null), format("DROP TABLE IF EXISTS %s", table)).getResult().join(); propertyCache.invalidateAll(); userTypeCache.invalidateAll(); if (result.isFailed()) { throw new IllegalStateException(result.toString()); } }
public String convertFunnel(String project, String connectorField, int idx, FunnelStep funnelStep, Optional<String> dimension, Optional<String> segment, LocalDate startDate, LocalDate endDate) { Optional<String> filterExp = funnelStep.getExpression().map(value -> RakamSqlFormatter.formatExpression(value, name -> name.getParts().stream().map(e -> formatIdentifier(e, '"')).collect(Collectors.joining(".")), name -> formatIdentifier("step" + idx, '"') + "." + name, '"')); String format = format("SELECT %s %s, %d as step, %s.%s from %s.%s.%s %s %s %s", dimension.map(ValidationUtil::checkTableColumn).map(v -> "step" + idx + "." + v).map(v -> segment.isPresent() ? applySegment(v, segment) + " as \"" + dimension.orElse("") + "_segment\"" + "," : v + ",").orElse(""), userMappingEnabled ? format("coalesce(mapping._user, %s._user, %s) as _user", "step" + idx, format(connectorField, "step" + idx)) : format(connectorField, "step" + idx), idx + 1, "step" + idx, checkTableColumn(projectConfig.getTimeColumn()), prestoConfig.getColdStorageConnector(), checkProject(project, '"'), checkCollection(funnelStep.getCollection()), "step" + idx, userMappingEnabled ? format("left join %s.%s mapping on (%s.%s is null and mapping.created_at >= date '%s' and mapping.merged_at <= date '%s' and mapping.id = %s.%s)", project, checkCollection(ANONYMOUS_ID_MAPPING), "step" + idx, checkTableColumn(projectConfig.getUserColumn()), startDate.format(ISO_LOCAL_DATE), endDate.format(ISO_LOCAL_DATE), "step" + idx, checkTableColumn(projectConfig.getUserColumn())) : "", filterExp.map(v -> "where " + v).orElse("")); return format; }
protected String getTableSubQuery(String project, String collection, String connectorField, Optional<String> dimension, String timePredicate, Optional<Expression> filter) { return format("select %s, %s %s from %s where %s %s %s", checkTableColumn(projectConfig.getTimeColumn()), dimension.isPresent() ? checkTableColumn(dimension.get(), "dimension", '"') + " as dimension, " : "", connectorField, checkProject(project, '"') + "." + checkCollection(collection), checkTableColumn(projectConfig.getTimeColumn()), timePredicate, filter.isPresent() ? "and " + formatExpression(filter.get(), reference -> { throw new UnsupportedOperationException(); }, '"') : ""); } }
public void onCreateCollectionFields(String project, String collection, List<SchemaField> fields) { for (SchemaField field : fields) { try { // We cant't use CONCURRENTLY because it causes dead-lock with ALTER TABLE and it's slow. projectConfig.getTimeColumn(); executor.executeRawStatement(String.format("CREATE INDEX %s %s ON %s.%s USING %s(%s)", postgresql9_5 ? "IF NOT EXISTS" : "", checkCollection(String.format("%s_%s_%s_auto_index", project, collection, field.getName())), project, checkCollection(collection), (postgresql9_5 && field.getName().equals(projectConfig.getTimeColumn())) ? "BRIN" : "BTREE", checkTableColumn(field.getName()))); } catch (Exception e) { if (postgresql9_5) { throw e; } } } } }
protected String getTableSubQuery( boolean mappingEnabled, String collection, String connectorField, Optional<Boolean> isText, String timeColumn, Optional<String> dimension, LocalDate startDate, LocalDate endDate, Optional<Expression> filter) { String userField = isText.map(text -> String.format("%s", checkTableColumn(connectorField))).orElse(connectorField); String timePredicate = String.format("between date '%s' and date '%s' + interval '1' day", startDate.format(ISO_LOCAL_DATE), endDate.format(ISO_LOCAL_DATE)); return format("select %s as date, %s %s from %s as data %s where data.%s %s %s", String.format(timeColumn, "data." + checkTableColumn(projectConfig.getTimeColumn())), dimension.isPresent() ? checkTableColumn(dimension.get(), "data.dimension", '"') + " as dimension, " : "", (userMappingEnabled && mappingEnabled) ? String.format("(case when data.%s is not null then data.%s else coalesce(mapping._user, data._device_id) end) as %s", userField, userField, userField) : ("data." + userField), checkCollection(collection), (userMappingEnabled && mappingEnabled) ? String.format("left join %s mapping on (data.%s is null and mapping.created_at >= date '%s' and mapping.merged_at <= date '%s' and mapping.id = data._user)", checkCollection(ANONYMOUS_ID_MAPPING), checkTableColumn(projectConfig.getUserColumn()), startDate.format(ISO_LOCAL_DATE), endDate.format(ISO_LOCAL_DATE)) : "", checkTableColumn(projectConfig.getTimeColumn()), timePredicate, filter.isPresent() ? "and " + formatExpression(filter.get(), reference -> { throw new UnsupportedOperationException(); }, '"') : ""); } }
@JsonCreator public OLAPTable( @ApiParam("collections") Set<String> collections, @ApiParam("dimensions") Set<Dimension> dimensions, @ApiParam("aggregations") Set<AggregationType> aggregations, @ApiParam("measures") Set<String> measures, @ApiParam("tableName") String tableName) { checkCollection(tableName); this.collections = collections; this.dimensions = dimensions; this.aggregations = aggregations; this.measures = measures; this.tableName = tableName; }
@Override public CompletableFuture<QueryResult> delete(RequestContext context, String name) { MaterializedView materializedView = database.getMaterializedView(context.project, name); String type = materializedView.incremental ? "TABLE" : "MATERIALIZED VIEW"; QueryExecution queryExecution = queryExecutor.executeRawStatement(context, format("DROP %s %s.%s", type, checkProject(context.project, '"'), checkCollection(MATERIALIZED_VIEW_PREFIX + materializedView.tableName))); return queryExecution.getResult().thenApply(result -> { if (!result.isFailed()) database.deleteMaterializedView(context.project, name); else return result; return QueryResult.empty(); }); }
@Override public void createProjectIfNotExists(String project, boolean userIdIsNumeric) { ProjectCollection userTable = getUserTable(project, false); String table = checkProject(userTable.project, '"') + "." + checkCollection(userTable.collection); QueryResult join = queryExecutor.executeRawStatement(new RequestContext(project, null), format("CREATE TABLE IF NOT EXISTS %s (" + " %s " + (userIdIsNumeric ? "serial" : "text") + " NOT NULL,\n" + " created_at timestamp NOT NULL,\n" + " PRIMARY KEY (%s)" + ")", table, PRIMARY_KEY, PRIMARY_KEY)).getResult().join(); if (join.isFailed()) { throw new RakamException(join.getError().toString(), INTERNAL_SERVER_ERROR); } }
public void incrementProperty(Connection conn, String project, Object userId, String property, double value) throws SQLException { Map<String, FieldType> columns = createMissingColumns(project, userId, ImmutableList.of(new SimpleImmutableEntry<>(property, new DoubleNode(value))), new CommitConnection(conn)); FieldType fieldType = columns.get(property); if (fieldType == null) { createColumn(project, userId, property, JsonHelper.numberNode(0)); } if (!fieldType.isNumeric()) { throw new RakamException(String.format("The property the is %s and it can't be incremented.", fieldType.name()), BAD_REQUEST); } String tableRef = checkTableColumn(stripName(property, "table column")); Statement statement = conn.createStatement(); ProjectCollection userTable = getUserTable(project, false); String table = checkProject(userTable.project, '"') + "." + checkCollection(userTable.collection); int execute = statement.executeUpdate("update " + table + " set " + tableRef + " = " + value + " + coalesce(" + tableRef + ", 0)"); if (execute == 0) { create(project, userId, JsonHelper.jsonObject().put(property, value)); } }
@Override public int[] storeBatch(List<Event> events) { for (Map.Entry<String, List<Event>> collection : events.stream().collect(Collectors.groupingBy(e -> e.collection())).entrySet()) { QueryResult join = queryExecutor.executeRawStatement(String.format("INSERT INTO %s.%s.%s (_shard_time, %s) (%s)", config.getColdStorageConnector(), checkProject(events.get(0).project(), '"'), checkCollection(collection.getKey()), collection.getValue().get(0).schema().stream().map(e -> ValidationUtil.checkCollection(e.getName())) .collect(Collectors.joining(", ")), collection.getValue().stream() .map(e -> buildValues(e.properties(), e.schema())) .collect(Collectors.joining(" union all ")))) .getResult().join(); if (join.isFailed()) { try { Thread.sleep(300000); } catch (InterruptedException e) { e.printStackTrace(); } throw new IllegalStateException(join.getError().message); } } return EventStore.SUCCESSFUL_BATCH; }
@Test(invocationCount = 5) public void testIncrementalRealtimeExpiration() throws Exception { String tableName = CryptUtil.generateRandomKey(10); populate(tableName); MaterializedView view = new MaterializedView("testview", "testview", format("select count(*) as count from %s", checkCollection(tableName)), Duration.ofSeconds(1), true, true, ImmutableMap.of()); getMaterializedViewService().create(new RequestContext(PROJECT_NAME), view).join(); QueryExecutorService queryService = getQueryService(); QueryResult result = queryService.executeQuery(PROJECT_NAME, "select * from materialized.testview", ZoneId.systemDefault()).getResult().join(); assertFalse(result.isFailed()); populate(tableName); getClock().increment(Duration.ofSeconds(2).toMillis()); result = queryService.executeQuery(PROJECT_NAME, "select * from materialized.testview", ZoneId.systemDefault()).getResult().join(); assertFalse(result.isFailed()); int total = 0; for (List<Object> objects : result.getResult()) { total += ((Number) objects.get(0)).intValue(); } assertEquals(SCALE_FACTOR * 2, total); }