/** Example of reading the blob's content through a reader. */ // [TARGET reader(BlobSourceOption...)] public void reader() throws IOException { // [START reader] try (ReadChannel reader = blob.reader()) { ByteBuffer bytes = ByteBuffer.allocate(64 * 1024); while (reader.read(bytes) > 0) { bytes.flip(); // do something with bytes bytes.clear(); } } // [END reader] }
/** Example of reading just a portion of the blob's content. */ // [TARGET reader(BlobSourceOption...)] // [VARIABLE 1] // [VARIABLE 8] public byte[] readContentRange(int start, int end) throws IOException { // [START readContentRange] try (ReadChannel reader = blob.reader()) { reader.seek(start); ByteBuffer bytes = ByteBuffer.allocate(end - start); reader.read(bytes); return bytes.array(); } // [END readContentRange] }
@Test public void testRead() throws IOException { ByteBuffer buffer = ByteBuffer.allocate(1); when(gcsChannel.read(eq(buffer))).thenReturn(1); assertThat(chan.position()).isEqualTo(0L); assertThat(chan.read(buffer)).isEqualTo(1); assertThat(chan.position()).isEqualTo(1L); verify(gcsChannel).read(any(ByteBuffer.class)); verify(gcsChannel, times(3)).isOpen(); }
@Test public void testReadAndWriteCaptureChannels() throws IOException { String blobName = "test-read-and-write-capture-channels-blob"; BlobInfo blob = BlobInfo.newBuilder(BUCKET, blobName).build(); byte[] stringBytes; WriteChannel writer = storage.writer(blob); stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT)); RestorableState<WriteChannel> writerState = writer.capture(); WriteChannel secondWriter = writerState.restore(); secondWriter.write(ByteBuffer.wrap(stringBytes)); secondWriter.close(); ByteBuffer readBytes; ByteBuffer readStringBytes; ReadChannel reader = storage.reader(blob.getBlobId()); reader.setChunkSize(BLOB_BYTE_CONTENT.length); readBytes = ByteBuffer.allocate(BLOB_BYTE_CONTENT.length); reader.read(readBytes); RestorableState<ReadChannel> readerState = reader.capture(); ReadChannel secondReader = readerState.restore(); readStringBytes = ByteBuffer.allocate(stringBytes.length); secondReader.read(readStringBytes); reader.close(); secondReader.close(); assertArrayEquals(BLOB_BYTE_CONTENT, readBytes.array()); assertEquals(BLOB_STRING_CONTENT, new String(readStringBytes.array(), UTF_8)); assertTrue(storage.delete(BUCKET, blobName)); }
@Override public boolean isOpen() { synchronized (this) { return channel.isOpen(); } }
@Test public void testSetPosition() throws IOException { assertThat(chan.position()).isEqualTo(0L); assertThat(chan.size()).isEqualTo(42L); chan.position(1L); assertThat(chan.position()).isEqualTo(1L); assertThat(chan.size()).isEqualTo(42L); verify(gcsChannel).seek(1); verify(gcsChannel, times(5)).isOpen(); }
@Test public void testReadCompressedBlob() throws IOException { String blobName = "test-read-compressed-blob"; BlobInfo blobInfo = BlobInfo.newBuilder(BlobId.of(BUCKET, blobName)) .setContentType("text/plain") .setContentEncoding("gzip") .build(); Blob blob = storage.create(blobInfo, COMPRESSED_CONTENT); try (ByteArrayOutputStream output = new ByteArrayOutputStream()) { try (ReadChannel reader = storage.reader(BlobId.of(BUCKET, blobName))) { reader.setChunkSize(8); ByteBuffer buffer = ByteBuffer.allocate(8); while (reader.read(buffer) != -1) { buffer.flip(); output.write(buffer.array(), 0, buffer.limit()); buffer.clear(); } } assertArrayEquals( BLOB_STRING_CONTENT.getBytes(UTF_8), storage.readAllBytes(BUCKET, blobName)); assertArrayEquals(COMPRESSED_CONTENT, output.toByteArray()); try (GZIPInputStream zipInput = new GZIPInputStream(new ByteArrayInputStream(output.toByteArray()))) { assertArrayEquals(BLOB_STRING_CONTENT.getBytes(UTF_8), ByteStreams.toByteArray(zipInput)); } } }
private void innerOpen() throws IOException { this.channel = gcsStorage.reader(file, blobSourceOptions); if (position > 0) { channel.seek(position); } }
@Override public void close() throws IOException { synchronized (this) { channel.close(); } }
@Test public void testStateEquals() { replay(storageRpcMock); reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); @SuppressWarnings("resource") // avoid closing when you don't want partial writes to GCS ReadChannel secondReader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); RestorableState<ReadChannel> state = reader.capture(); RestorableState<ReadChannel> secondState = secondReader.capture(); assertEquals(state, secondState); assertEquals(state.hashCode(), secondState.hashCode()); assertEquals(state.toString(), secondState.toString()); }
@Test public void testReaderWithOptions() throws IOException { byte[] result = new byte[DEFAULT_CHUNK_SIZE]; EasyMock.expect( storageRpcMock.read(BLOB_INFO2.toPb(), BLOB_SOURCE_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) .andReturn(Tuple.of("etag", result)); EasyMock.replay(storageRpcMock); initializeService(); ReadChannel channel = storage.reader( BUCKET_NAME1, BLOB_NAME2, BLOB_SOURCE_GENERATION, BLOB_SOURCE_METAGENERATION); assertNotNull(channel); assertTrue(channel.isOpen()); channel.read(ByteBuffer.allocate(42)); }
private void checkOpen() throws ClosedChannelException { if (!channel.isOpen()) { throw new ClosedChannelException(); } }
assertEquals(blobSize, (long) remoteBlob.getSize()); try (ReadChannel reader = storage.reader(blob.getBlobId())) { reader.setChunkSize(chunkSize); ByteBuffer readBytes = ByteBuffer.allocate(chunkSize); int numReadBytes = reader.read(readBytes); assertEquals(chunkSize, numReadBytes); assertArrayEquals(Arrays.copyOf(content, chunkSize), readBytes.array()); reader.read(readBytes); fail("StorageException was expected"); } catch (StorageException ex) {
@Override public SeekableByteChannel position(long newPosition) throws IOException { checkArgument(newPosition >= 0); synchronized (this) { checkOpen(); if (newPosition == position) { return this; } channel.seek(newPosition); position = newPosition; return this; } }
@Override public void close() throws IOException { reader.close(); } };
/** Example of reading a blob's content through a reader. */ // [TARGET reader(String, String, BlobSourceOption...)] // [VARIABLE "my_unique_bucket"] // [VARIABLE "my_blob_name"] public void readerFromStrings(String bucketName, String blobName) throws IOException { // [START readerFromStrings] try (ReadChannel reader = storage.reader(bucketName, blobName)) { ByteBuffer bytes = ByteBuffer.allocate(64 * 1024); while (reader.read(bytes) > 0) { bytes.flip(); // do something with bytes bytes.clear(); } } // [END readerFromStrings] }
@Test public void testReaderWithDecryptionKey() throws IOException { byte[] result = new byte[DEFAULT_CHUNK_SIZE]; EasyMock.expect( storageRpcMock.read(BLOB_INFO2.toPb(), ENCRYPTION_KEY_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) .andReturn(Tuple.of("etag", result)) .times(2); EasyMock.replay(storageRpcMock); initializeService(); ReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME2, BlobSourceOption.decryptionKey(KEY)); assertNotNull(channel); assertTrue(channel.isOpen()); channel.read(ByteBuffer.allocate(42)); channel = storage.reader(BUCKET_NAME1, BLOB_NAME2, BlobSourceOption.decryptionKey(BASE64_KEY)); assertNotNull(channel); assertTrue(channel.isOpen()); channel.read(ByteBuffer.allocate(42)); }
@Override public byte[] load(final Value v) throws IOException { final BlobId blobId = GcsBlob.of(v._key).getBlobId(); final byte[] contentBytes = MemoryManager.malloc1(v._max); final ByteBuffer wrappingBuffer = ByteBuffer.wrap(contentBytes); final Key k = v._key; long offset = 0; // Skip offset based on chunk number if (k._kb[0] == Key.CHK) { offset = FileVec.chunkOffset(k); // The offset } final ReadChannel reader = storageProvider.getStorage().reader(blobId); reader.seek(offset); reader.read(wrappingBuffer); return contentBytes; }
@Test public void testRead_whenClosed_throwsCce() throws IOException { when(gcsChannel.isOpen()).thenReturn(false); thrown.expect(ClosedChannelException.class); chan.read(ByteBuffer.allocate(1)); }