@Override public DataSegment archive(DataSegment segment) throws SegmentLoadingException { return getArchiver(segment).archive(segment); }
public SegmentLoader manufacturate(File storageDir) { return loader.withConfig( new SegmentLoaderConfig().withLocations(Collections.singletonList(new StorageLocationConfig().setPath(storageDir))) ); } }
private Segment getAdapter(final DataSegment segment) throws SegmentLoadingException { final Segment adapter; try { adapter = segmentLoader.getSegment(segment); } catch (SegmentLoadingException e) { segmentLoader.cleanup(segment); throw e; } if (adapter == null) { throw new SegmentLoadingException("Null adapter from loadSpec[%s]", segment.getLoadSpec()); } return adapter; }
@Override public File getSegmentFiles(DataSegment segment) throws SegmentLoadingException { StorageLocation loc = findStorageLocationIfLoaded(segment); String storageDir = DataSegmentPusher.getDefaultStorageDir(segment, false); if (loc == null) { loc = loadSegmentWithRetry(segment, storageDir); } loc.addSegment(segment); return new File(loc.getPath(), storageDir); }
@Override public LoadSpecResult loadSegment(final File outDir) throws SegmentLoadingException { return new LoadSpecResult(puller.getSegmentFiles(path.toFile(), outDir).size()); } }
private StorageLocation findStorageLocationIfLoaded(final DataSegment segment) { for (StorageLocation location : getSortedList(locations)) { File localStorageDir = new File(location.getPath(), DataSegmentPusher.getDefaultStorageDir(segment, false)); if (localStorageDir.exists()) { return location; } } return null; }
private void verifyLoc(long maxSize, StorageLocation loc) { Assert.assertEquals(maxSize, loc.available()); for (int i = 0; i <= maxSize; ++i) { Assert.assertTrue(String.valueOf(i), loc.canHandle(makeSegment("2013/2014", i))); } }
default String getStorageDir(DataSegment dataSegment, boolean useUniquePath) { return getDefaultStorageDir(dataSegment, useUniquePath); }
public SegmentLoaderConfig withLocations(List<StorageLocationConfig> locations) { SegmentLoaderConfig retVal = new SegmentLoaderConfig(); retVal.locations = Lists.newArrayList(locations); retVal.deleteOnRemove = this.deleteOnRemove; retVal.infoDir = this.infoDir; return retVal; }
@Override public DataSegment restore(DataSegment segment) throws SegmentLoadingException { return getArchiver(segment).restore(segment); }
public SegmentLoaderLocalCacheManager withConfig(SegmentLoaderConfig config) { return new SegmentLoaderLocalCacheManager(indexIO, config, jsonMapper); }
@Override public DataSegment push(final File dataSegmentFile, final DataSegment segment, final boolean useUniquePath) throws IOException { final DataSegment returnSegment = super.push(dataSegmentFile, segment, useUniquePath); segments.add(returnSegment); return returnSegment; } };
/** * @deprecated backward-compatibiliy shim that should be removed on next major release; * use {@link #getStorageDir(DataSegment, boolean)} instead. */ @Deprecated default String getStorageDir(DataSegment dataSegment) { return getStorageDir(dataSegment, false); }
@Override public boolean isSegmentLoaded(final DataSegment segment) { return findStorageLocationIfLoaded(segment) != null; }
public boolean isSegmentCached(final DataSegment segment) { return segmentLoader.isSegmentLoaded(segment); }
@Override public InputStream getInputStream(URI uri) throws IOException { return buildFileObject(uri).openInputStream(); }
@Override public DataSegment push(File file, DataSegment segment, boolean useUniquePath) throws IOException { segments.add(segment); return super.push(file, segment, useUniquePath); } };
default String makeIndexPathName(DataSegment dataSegment, String indexName) { // This is only called from Hadoop batch which doesn't require unique segment paths so set useUniquePath=false return StringUtils.format("./%s/%s", getStorageDir(dataSegment, false), indexName); }