/** * Blob name * * @return */ public String getName() { return BlobUtils.relativize(name); }
public String getName() { return blobUri.getName(); }
@Override public BlobURI write(final UUID messageId, final Mailbox mailbox, final String profileName, final InputStream in, final Long size) throws IOException, GeneralSecurityException { // get blob name String blobName = new BlobNameBuilder().setMailbox(mailbox) .setMessageId(messageId).setMessageSize(size).build(); InputStream in1; Long updatedSize = size; // prepare URI BlobURI blobUri = new BlobURI() .setProfile(profileName) .setName(blobName); // encrypt stream if (encryptionHandler != null) { byte[] iv = getCipherIVFromBlobName(blobName); InputStream encryptedInputStream = this.encryptionHandler.encrypt(in, Configurator.getBlobStoreDefaultEncryptionKey(), iv); FileBackedOutputStream fbout = new FileBackedOutputStream(MAX_MEMORY_FILE_SIZE, true); updatedSize = ByteStreams.copy(encryptedInputStream, fbout); in1 = fbout.getSupplier().getInput(); blobUri.setEncryptionKey(Configurator.getBlobStoreDefaultEncryptionKeyAlias()); } else { in1 = in; } CloudStoreProxy.write(blobName, profileName, in1, updatedSize); return blobUri; }
public BlobDataSource read(final URI uri) throws IOException { // check if blob was stored for the message Assert.notNull(uri, "URI cannot be null"); BlobDataSource blobDS; BlobURI blobUri = new BlobURI().fromURI(uri); if (blobUri.getProfile().equals(DatabaseConstants.DATABASE_PROFILE)) { blobDS = dbBlobStorage.read(uri); } else { blobDS = cloudBlobStorage.read(uri); } // if compressed, add compression handler to data source if ((blobUri.getCompression() != null && blobUri.getCompression() .equals(DeflateCompressionHandler.COMPRESSION_TYPE_DEFLATE)) || // TODO: deprecated suffix based compression detection // kept for backward compatibility with 0.3 blobUri.getName().endsWith(BlobStoreConstants.COMPRESS_SUFFIX)) { CompressionHandler ch = new DeflateCompressionHandler(); return new BlobDataSource(uri, blobDS.getInputStream(), ch); } else { return blobDS; } }
@Override public BlobDataSource read(final URI uri) throws IOException { InputStream in; BlobURI blobUri = new BlobURI().fromURI(uri); String keyAlias = blobUri.getEncryptionKey(); if (keyAlias != null) { // currently we only support AES encryption, use by default EncryptionHandler eh = new AESEncryptionHandler(); try { logger.debug("Decrypting object {} with key {}", uri, keyAlias); byte[] iv = getCipherIVFromBlobName(BlobUtils.relativize(uri.getPath())); in = eh.decrypt(CloudStoreProxy.read(uri), Configurator.getEncryptionKey(keyAlias), iv); } catch (GeneralSecurityException gse) { throw new IOException("Unable to decrypt message blob: ", gse); } } else { in = CloudStoreProxy.read(uri); } return new BlobDataSource(uri, in); }
@Override public BlobDataSource read(final URI uri) throws IOException { logger.debug("Reading blob {} from Cassandra", uri); BlobURI blobUri = new BlobURI().fromURI(uri); Assert.isTrue(blobUri.getProfile().equals(DATABASE_PROFILE), "Blob store profile does not match database."); UUID messageId = UUID.fromString(blobUri.getName()); byte[] messageBlock = BlobPersistence.readBlock(messageId, DATABASE_DEFAULT_BLOCK_ID); InputStream in = ByteStreams.newInputStreamSupplier(messageBlock).getInput(); return new BlobDataSource(uri, in); }
&& blobDS.isCompressed()) .ok(blobDS.getInputStream(), MediaType.TEXT_PLAIN) .header(HttpHeaders.CONTENT_ENCODING, "deflate").build(); } else { response = Response.ok(blobDS.getUncompressedInputStream(), MediaType.TEXT_PLAIN).build();
public void delete(final URI uri) throws IOException { // check if blob was stored for the message, silently skip otherwise if (uri == null) { return; } boolean isDbProfile = new BlobURI().fromURI(uri).getProfile() .equals(DatabaseConstants.DATABASE_PROFILE); if (isDbProfile) { dbBlobStorage.delete(uri); } else { cloudBlobStorage.delete(uri); } } }
@Override public BlobURI write(final UUID messageId, final Mailbox mailbox, final String profileName, final InputStream in, final Long size) throws IOException, GeneralSecurityException { Assert.isTrue(size <= MAX_BLOB_SIZE, "Blob larger than " + MAX_BLOB_SIZE + " bytes can't be stored in Cassandra. Provided blob size: " + size + " bytes"); logger.debug("Storing blob {} in Cassandra", messageId); // prepare URI BlobURI blobUri = new BlobURI() .setProfile(DATABASE_PROFILE) .setName(messageId.toString()).setBlockCount(1); // store blob // TODO: currently we allow only single block writes (blockid=0). in future we can split blobs to multiple blocks BlobPersistence.writeBlock(messageId, DATABASE_DEFAULT_BLOCK_ID, ByteStreams.toByteArray(in)); return blobUri; }
InputStream compressedInputStream = compressionHandler.compress(in); FileBackedOutputStream fbout = new FileBackedOutputStream(MAX_MEMORY_FILE_SIZE, true); updatedSize = ByteStreams.copy(compressedInputStream, fbout); "Storing Blob in the database because size ({}KB) was less than database threshold {}KB", updatedSize, Configurator.getDatabaseBlobMaxSize()); blobUri = dbBlobStorage.write(messageId, mailbox, null, in1, updatedSize); } else { logger.debug( "Storing Blob in the cloud because size ({}KB) was greater than database threshold {}KB", updatedSize, Configurator.getDatabaseBlobMaxSize()); blobUri = cloudBlobStorage.write(messageId, mailbox, Configurator.getBlobStoreWriteProfileName(), in1, updatedSize); blobUri.setCompression(compressionHandler.getType());
/** * Returns Blob data and ensures that content is always uncompressed. * If not compressed, original Blob will be returned. * <p> * Use this method if you want to ensure that data is always uncompressed. * * @return * @throws IOException */ public InputStream getUncompressedInputStream() throws IOException { if (this.isCompressed()) { return this.compressionHandler.uncompress(in); } else { return in; } }
@Override public InputStream getMessage(String uid) throws IOException { UUID uuid = Base64UUIDUtils.decode(uid); logger.debug("POP3: Get message {}/{} [{}]", mailbox, uuid, uid); try { InputStream is = dao.getRaw(mailbox, uuid).getUncompressedInputStream(); return new CRLFInputStream(is); } catch (Exception e) { logger.error("Error occured while retreiving POP3 message " + mailbox + "/" + uuid + " :", e); throw new IOException("Unable to read message"); } }
/** * Read Blob contents * * @param uri * @return */ public static InputStream read(URI uri) { // check if blob was stored for the message Assert.notNull(uri, "URI cannot be null"); logger.debug("Reading blob {}", uri); String profileName = uri.getHost(); String container = Configurator.getBlobStoreProfile(profileName).getContainer(); BlobStoreContext context = getBlobStoreContext(profileName); String path = BlobUtils.relativize(uri.getPath()); InputStream in = context.getBlobStore() .getBlob(container, path) .getPayload().getInput(); return in; }
@Override public void delete(final URI uri) throws IOException { logger.debug("Deleting blob {}", uri); BlobURI blobUri = new BlobURI().fromURI(uri); Assert.isTrue(blobUri.getProfile().equals(DATABASE_PROFILE), "Blob store profile does not match database."); UUID messageId = UUID.fromString(blobUri.getName()); BlobPersistence.deleteBlock(messageId, BlobStoreConstants.DATABASE_DEFAULT_BLOCK_ID); }
rawIn = messageDAO.getRaw(mailbox, messageId).getUncompressedInputStream(); MimeParser mimeParser = new MimeParser(); mimeParser.parse(rawIn);
String path = BlobUtils.relativize(uri.getPath()); } else { String container = profile.getContainer(); BlobStoreContext context = getBlobStoreContext(profileName); context.getBlobStore().removeBlob(container, path);
rawIn = messageDAO.getRaw(mailbox, messageId).getUncompressedInputStream(); MimeParser mimeParser = new MimeParser(); mimeParser.parse(rawIn);