/** * @param pushedSegment the pushed data segment object * @param segmentsDescriptorDir actual directory path for descriptors. * * @return a sanitize file name */ public static Path makeSegmentDescriptorOutputPath(DataSegment pushedSegment, Path segmentsDescriptorDir) { return new Path(segmentsDescriptorDir, String.format("%s.json", pushedSegment.getIdentifier().replace(":", ""))); }
@VisibleForTesting void deleteSegment(DataSegment segment) throws SegmentLoadingException { LOG.info("removing segment {}, located at path {}", segment.getIdentifier(), path);
coordinatorAddress, dataSegment.getDataSource(), dataSegment.getIdentifier())); } catch (MalformedURLException e) { Throwables.propagate(e);
deleteSegment(dataSegment); } catch (SegmentLoadingException e) { LOG.error(String.format("Error while deleting segment [%s]", dataSegment.getIdentifier()), e);
batch.add(new ImmutableMap.Builder<String, Object>().put("id", segment.getIdentifier()) .put("dataSource", segment.getDataSource()) .put("created_date", new DateTime().toString()) .build()); LOG.info("Published {}", segment.getIdentifier());
@Override public String apply(DataSegment dataSegment) { return dataSegment.getIdentifier(); } };
@Override public String apply(DataSegment segment) { return segment.getIdentifier(); } }
@Override public String apply(DataSegment segment) { return segment.getIdentifier(); } }
@Override public String apply(DataSegment segment) { return segment.getIdentifier(); } }
@Override public String apply(@Nullable DataSegment segment) { return segment.getIdentifier(); } }
@Override public String apply(@Nullable DataSegment input) { return input.getIdentifier(); } }
@Override public String asString() { return StringUtils.format("LOAD: %s", segment.getIdentifier()); }
@Override public String apply(Sink input) { return input.getSegment().getIdentifier(); } }
@Override public boolean isSegmentLoadedByServer(String serverKey, DataSegment segment) { try { DruidServer server = getInventoryValue(serverKey); return server != null && server.getSegment(segment.getIdentifier()) != null; } catch (Exception ex) { throw Throwables.propagate(ex); } }
@Override public Segment factorize(DataSegment dataSegment, File parentDir) throws SegmentLoadingException { try { return new QueryableIndexSegment(dataSegment.getIdentifier(), indexIO.loadIndex(parentDir)); } catch (IOException e) { throw new SegmentLoadingException(e, "%s", e.getMessage()); } } }
@Override public List<Map<String, Object>> withHandle(Handle handle) throws Exception { return handle.createQuery( String.format("SELECT id FROM %s WHERE id=:id", config.getSegmentsTable()) ) .bind("id", segment.getIdentifier()) .list(); } }
@Override public SegmentToMergeHolder apply(TimelineObjectHolder<String, DataSegment> input) { final DataSegment segment = input.getObject().getChunk(0).getObject(); final File file = Preconditions.checkNotNull( segments.get(segment), "File for segment %s", segment.getIdentifier() ); return new SegmentToMergeHolder(segment, input.getInterval(), file); } }
@Override public ScheduledExecutors.Signal call() throws Exception { log.info("Abandoning segment %s", sink.getSegment().getIdentifier()); abandonSegment(truncatedTime, sink); return ScheduledExecutors.Signal.STOP; } }
public void removeSegment(DataSegment segment) { log.info("Removing Segment[%s]", segment); databaseSegmentManager.removeSegment(segment.getDataSource(), segment.getIdentifier()); }
@Override public void execute() { replicationManager.unregisterReplicantCreation( tier, segment.getIdentifier(), holder.getServer().getHost() ); } }