public DataSegment uploadDataSegment( DataSegment segment, final int version, final File compressedSegmentData, final File descriptorFile, final Map<String, String> azurePaths ) throws StorageException, IOException, URISyntaxException { azureStorage.uploadBlob(compressedSegmentData, config.getContainer(), azurePaths.get("index")); azureStorage.uploadBlob(descriptorFile, config.getContainer(), azurePaths.get("descriptor")); final DataSegment outSegment = segment .withSize(compressedSegmentData.length()) .withLoadSpec( ImmutableMap.<String, Object>of( "type", AzureStorageDruidModule.SCHEME, "containerName", config.getContainer(), "blobPath", azurePaths.get("index") ) ) .withBinaryVersion(version); log.info("Deleting file [%s]", compressedSegmentData); compressedSegmentData.delete(); log.info("Deleting file [%s]", descriptorFile); descriptorFile.delete(); return outSegment; }
.withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), outDir DataSegment dataSegment = createDescriptorFile( segment.withLoadSpec(makeLoadSpec(new File(outDir, INDEX_FILENAME).toURI())) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), tmpDescriptorFile
@Override public DataSegment push(File dataSegmentFile, DataSegment segment) throws IOException { File outDir = new File(config.getStorageDirectory(), DataSegmentPusherUtil.getStorageDir(segment)); if (dataSegmentFile.equals(outDir)) { long size = 0; for (File file : dataSegmentFile.listFiles()) { size += file.length(); } return createDescriptorFile( segment.withLoadSpec(makeLoadSpec(outDir)) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), outDir ); } outDir.mkdirs(); File outFile = new File(outDir, "index.zip"); log.info("Compressing files from[%s] to [%s]", dataSegmentFile, outFile); long size = CompressionUtils.zip(dataSegmentFile, outFile); return createDescriptorFile( segment.withLoadSpec(makeLoadSpec(outFile)) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), outDir ); }
@Override public DataSegment call() throws Exception { S3Object toPush = new S3Object(zipOutFile); putObject(config.getBucket(), s3Path, toPush); final DataSegment outSegment = inSegment.withSize(indexSize) .withLoadSpec(makeLoadSpec(config.getBucket(), toPush.getKey())) .withBinaryVersion(SegmentUtils.getVersionFromDir(indexFilesDir)); File descriptorFile = File.createTempFile("druid", "descriptor.json"); // Avoid using Guava in DataSegmentPushers because they might be used with very diverse Guava versions in // runtime, and because Guava deletes methods over time, that causes incompatibilities. Files.write(descriptorFile.toPath(), jsonMapper.writeValueAsBytes(outSegment)); S3Object descriptorObject = new S3Object(descriptorFile); putObject( config.getBucket(), S3Utils.descriptorPathForSegmentPath(s3Path), descriptorObject ); log.info("Deleting zipped index File[%s]", zipOutFile); zipOutFile.delete(); log.info("Deleting descriptor file[%s]", descriptorFile); descriptorFile.delete(); return outSegment; } }
final DataSegment finalSegment = segmentTemplate .withLoadSpec(dataSegmentPusher.makeLoadSpec(indexOutURI)) .withSize(size.get()) .withBinaryVersion(SegmentUtils.getVersionFromDir(mergedBase));
segment = segment.withSize(indexSize) .withLoadSpec( ImmutableMap.<String, Object> of("type", "c*", "key", key)