@Override public <T> List<TileData<T>> readTiles(String pyramidId, TileSerializer<T> serializer, Iterable<TileIndex> tiles, JSONObject properties) throws IOException{ List<TileData<T>> results = new LinkedList<TileData<T>>(); // iterate over the tile indices for (TileIndex tileIndex: tiles) { Rectangle2D rect = tilePyramid.getTileBounds(tileIndex); // get minimum/start time, max/end time double startX = rect.getX(); double endX = rect.getMaxX(); double startY = rect.getMaxY(); double endY = rect.getY(); SearchResponse sr = timeFilteredRequest(startX, endX, startY, endY, properties); if (responseHasData(sr)) { Histogram date_agg = sr.getAggregations().get("xField"); Map<Integer, Map> tileMap = parseAggregations(date_agg, tileIndex); SparseTileData tileData = new SparseTileData(tileIndex,tileMap, 0); results.add(tileData); } } return results; }
@Override public <T> List<TileData<T>> readTiles(String pyramidId, TileSerializer<T> serializer, Iterable<TileIndex> tiles) throws IOException { return readTiles(pyramidId,serializer,tiles); }
private List<Double> readMetaMaxFromElasticsearch() { Rectangle2D bounds = tilePyramid.getBounds(); List<Double> maxCountList = new ArrayList<>(); double pixelsPerTile = 256.0; for ( int i = 0; i < this.numZoomlevels ; i++ ){ // calculated by dividing the entirety of the dataset bounds // by the number of pixels in a tile and the number of tiles at that Zoom level double tilesAtZoomLevel = Math.pow(2, i); double xInterval = bounds.getWidth() / (pixelsPerTile * tilesAtZoomLevel); double yInterval = bounds.getHeight() / (pixelsPerTile * tilesAtZoomLevel); if (xInterval < 1){ xInterval = 1; } if (yInterval < 1){ yInterval = 1; } // search ES based off the calculated interval double maxDocCount = searchForMaxBucketValue(xInterval, yInterval); maxCountList.add(i, maxDocCount); } while(maxCountList.size() < 15){ maxCountList.add(maxCountList.get(maxCountList.size()-1)*0.75); } return maxCountList; }
try { List<Double> maxValues = readMetaMaxFromElasticsearch(); this.maxValues = maxValues; metaDataJSON.put("minzoom", 1); metaDataJSON.put("tilesize", 256); metaDataJSON.put("meta", formatMaxValuesAsMetadataJSONString(maxValues));
baseQuery(boundaryFilter) .addAggregation( AggregationBuilders.histogram("xField") .field(this.xField) .interval(getHistogramIntervalFromBounds(startX, endX)) .minDocCount(1) .subAggregation( AggregationBuilders.histogram("yField") .field(this.yField) .interval(getHistogramIntervalFromBounds(endY, startY)) .minDocCount(1)
@Override protected PyramidIO createInstance() throws ConfigurationException { LOGGER.info("ES pyramid factory."); String clusterName = getPropertyValue(ES_CLUSTER_NAME); String transportAddress = getPropertyValue(ES_TRANSPORT_ADDRESS); int transportPort = getPropertyValue(ES_TRANSPORT_PORT); Integer numZoomLevels = getPropertyValue(NUM_ZOOM_LEVELS); String elasticIndex = getPropertyValue(ES_INDEX); String esFieldX = getPropertyValue(ES_FIELD_X); String esFieldY = getPropertyValue(ES_FIELD_Y); TilePyramid tilePyramid = getRoot().produce(TilePyramid.class); return new ElasticsearchPyramidIO(clusterName, elasticIndex, esFieldX, esFieldY, transportAddress, transportPort, tilePyramid, numZoomLevels); } }
private double searchForMaxBucketValue(double intervalX, double intervalY) { // build a query with a 2d aggregation on the xField and yField // change the interval SearchRequestBuilder metaDataQuery = this.client.prepareSearch(this.index) .setTypes("datum") .setSearchType(SearchType.COUNT) .addAggregation( AggregationBuilders.histogram("xAgg") .field(this.xField) .interval((long) intervalX) .order(Histogram.Order.COUNT_DESC) .subAggregation( AggregationBuilders.histogram("yAgg") .field(this.yField) .interval((long) intervalY) .order(Histogram.Order.COUNT_DESC) ) ); SearchResponse searchResponse = metaDataQuery.execute().actionGet(); Histogram agg = searchResponse.getAggregations().get("xAgg"); return getMaxValueFrom2DHistogram(agg); }