@Override public String apply(DataSegment segment) { return segment.getIdentifier(); } }
@Override public String apply(DataSegment input) { return input.getIdentifier(); } }
@Override public String apply(DataSegment input) { return input.getIdentifier(); } }
@Override public String apply(DataSegment segment) { return segment.getIdentifier(); } }
@Override public String apply(DataSegment dataSegment) { return dataSegment.getIdentifier(); } };
@Override public boolean equals(Object o) { if (o instanceof DataSegment) { return getIdentifier().equals(((DataSegment) o).getIdentifier()); } return false; }
@Override public int compareTo(DataSegment dataSegment) { return getIdentifier().compareTo(dataSegment.getIdentifier()); }
@Override public int hashCode() { return getIdentifier().hashCode(); }
public DruidDataSource addSegment(DataSegment dataSegment) { idToSegmentMap.put(dataSegment.getIdentifier(), dataSegment); return this; }
@Override public String asString() { return StringUtils.format("LOAD: %s", segment.getIdentifier()); }
@Override public ServerView.CallbackAction segmentRemoved(DruidServerMetadata server, DataSegment segment) { CachingClusteredClient.this.cache.close(segment.getIdentifier()); return ServerView.CallbackAction.CONTINUE; } }
@Override public String apply(Sink input) { return input.getSegment().getIdentifier(); } }
@Override public boolean isSegmentLoadedByServer(String serverKey, DataSegment segment) { DruidServerHolder holder = servers.get(serverKey); return holder != null && holder.druidServer.getSegment(segment.getIdentifier()) != null; }
@Override public String asString() { return StringUtils.format("DROP: %s", segment.getIdentifier()); }
@Override public boolean isSegmentLoadedByServer(String serverKey, DataSegment segment) { try { DruidServer server = getInventoryValue(serverKey); return server != null && server.getSegment(segment.getIdentifier()) != null; } catch (Exception ex) { throw Throwables.propagate(ex); } }
@Override public Segment factorize(DataSegment dataSegment, File parentDir) throws SegmentLoadingException { try { return new QueryableIndexSegment(dataSegment.getIdentifier(), indexIO.loadIndex(parentDir)); } catch (IOException e) { throw new SegmentLoadingException(e, "%s", e.getMessage()); } } }
@Override public ScheduledExecutors.Signal call() { log.info("Abandoning segment %s", sink.getSegment().getIdentifier()); abandonSegment(truncatedTime, sink); return ScheduledExecutors.Signal.STOP; } }
public void removeSegment(DataSegment segment) { log.info("Removing Segment[%s]", segment); metadataSegmentManager.removeSegment(segment.getDataSource(), segment.getIdentifier()); }
private void deleteSegment(final Handle handle, final DataSegment segment) { handle.createStatement( StringUtils.format("DELETE from %s WHERE id = :id", dbTables.getSegmentsTable()) ) .bind("id", segment.getIdentifier()) .execute(); }
protected void reduceLifetimes(String tier) { for (BalancerSegmentHolder holder : currentlyMovingSegments.get(tier).values()) { holder.reduceLifetime(); if (holder.getLifetime() <= 0) { log.makeAlert("[%s]: Balancer move segments queue has a segment stuck", tier) .addData("segment", holder.getSegment().getIdentifier()) .addData("server", holder.getFromServer().getMetadata()) .emit(); } } }