@Override public DataSegment apply(DataSegment input) { DataSegment rv = input; if (config.isSkipDimensionsAndMetrics()) { rv = rv.withDimensions(null).withMetrics(null); } if (config.isSkipLoadSpec()) { rv = rv.withLoadSpec(null); } return rv; } };
public DataSegment uploadDataSegment( DataSegment segment, final int version, final File compressedSegmentData, final File descriptorFile, final Map<String, String> azurePaths ) throws StorageException, IOException, URISyntaxException { azureStorage.uploadBlob(compressedSegmentData, config.getContainer(), azurePaths.get("index")); azureStorage.uploadBlob(descriptorFile, config.getContainer(), azurePaths.get("descriptor")); final DataSegment outSegment = segment .withSize(compressedSegmentData.length()) .withLoadSpec( ImmutableMap.<String, Object>of( "type", AzureStorageDruidModule.SCHEME, "containerName", config.getContainer(), "blobPath", azurePaths.get("index") ) ) .withBinaryVersion(version); log.info("Deleting file [%s]", compressedSegmentData); compressedSegmentData.delete(); log.info("Deleting file [%s]", descriptorFile); descriptorFile.delete(); return outSegment; }
segment.withLoadSpec(makeLoadSpec(outDir.toURI())) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), segment.withLoadSpec(makeLoadSpec(new File(outDir, INDEX_FILENAME).toURI())) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)),
@Override public DataSegment push(File dataSegmentFile, DataSegment segment) throws IOException { File outDir = new File(config.getStorageDirectory(), DataSegmentPusherUtil.getStorageDir(segment)); if (dataSegmentFile.equals(outDir)) { long size = 0; for (File file : dataSegmentFile.listFiles()) { size += file.length(); } return createDescriptorFile( segment.withLoadSpec(makeLoadSpec(outDir)) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), outDir ); } outDir.mkdirs(); File outFile = new File(outDir, "index.zip"); log.info("Compressing files from[%s] to [%s]", dataSegmentFile, outFile); long size = CompressionUtils.zip(dataSegmentFile, outFile); return createDescriptorFile( segment.withLoadSpec(makeLoadSpec(outFile)) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), outDir ); }
.withLoadSpec(dataSegmentPusher.makeLoadSpec(indexOutURI)) .withSize(size.get()) .withBinaryVersion(SegmentUtils.getVersionFromDir(mergedBase));
@Override public DataSegment call() throws Exception { S3Object toPush = new S3Object(zipOutFile); putObject(config.getBucket(), s3Path, toPush); final DataSegment outSegment = inSegment.withSize(indexSize) .withLoadSpec(makeLoadSpec(config.getBucket(), toPush.getKey())) .withBinaryVersion(SegmentUtils.getVersionFromDir(indexFilesDir)); File descriptorFile = File.createTempFile("druid", "descriptor.json"); // Avoid using Guava in DataSegmentPushers because they might be used with very diverse Guava versions in // runtime, and because Guava deletes methods over time, that causes incompatibilities. Files.write(descriptorFile.toPath(), jsonMapper.writeValueAsBytes(outSegment)); S3Object descriptorObject = new S3Object(descriptorFile); putObject( config.getBucket(), S3Utils.descriptorPathForSegmentPath(s3Path), descriptorObject ); log.info("Deleting zipped index File[%s]", zipOutFile); zipOutFile.delete(); log.info("Deleting descriptor file[%s]", descriptorFile); descriptorFile.delete(); return outSegment; } }
.withLoadSpec( ImmutableMap.<String, Object> of("type", "c*", "key", key)
safeMove(s3Bucket, s3DescriptorPath, targetS3Bucket, targetS3DescriptorPath); return segment.withLoadSpec( ImmutableMap.<String, Object>builder() .putAll(