@Override public RDBBlobStore createRDBBlobStore() { bs = new RDBBlobStore(dataSource, options); return bs; }
@Override public boolean hasExternalBlobReferences() throws IOException { Closer closer = Closer.create(); try { DataSource ds = getDataSource(closer); RDBBlobStore blobStore = new RDBBlobStore(ds); return !blobStore.getAllChunkIds(0).hasNext(); } catch(Throwable e) { throw closer.rethrow(e); } finally { closer.close(); } }
@Override public void dispose() { if (this.bs != null) { this.bs.close(); this.bs = null; } } }
private static void empty(RDBBlobStore blobStore) throws Exception { Iterator<String> iter = blobStore.getAllChunkIds(0); List<String> ids = Lists.newArrayList(); while (iter.hasNext()) { ids.add(iter.next()); } blobStore.deleteChunks(ids, 0); }
public static void deleteChunks(RDBBlobStore ds, List<String> chunkIds, long maxLastModifiedTime) throws Exception { ds.deleteChunks(chunkIds, maxLastModifiedTime); } }
getStatsCollector().downloaded(id, System.nanoTime() - start, TimeUnit.NANOSECONDS, data.length); cache.put(id, data); } finally {
@Test public void testBigBlob() throws Exception { int min = 0; int max = 8 * 1024 * 1024; int test = 0; while (max - min > 256) { if (test == 0) { test = max; // try largest first } else { test = (max + min) / 2; } byte[] data = new byte[test]; Random r = new Random(0); r.nextBytes(data); byte[] digest = getDigest(data); try { RDBBlobStoreFriend.storeBlock(blobStore, digest, 0, data); byte[] data2 = RDBBlobStoreFriend.readBlockFromBackend(blobStore, digest); if (!Arrays.equals(data, data2)) { throw new Exception("data mismatch for length " + data.length); } min = test; } catch (Exception ex) { max = test; } } LOG.info("max blob length for " + blobStoreName + " was " + test); int expected = Math.max(blobStore.getBlockSize(), 2 * 1024 * 1024); assertTrue(blobStoreName + ": expected supported block size is " + expected + ", but measured: " + test, test >= expected); }
@Test public void testDeleteChunks() throws Exception { byte[] data1 = new byte[256]; Random r = new Random(0); r.nextBytes(data1); byte[] digest1 = getDigest(data1); RDBBlobStoreFriend.storeBlock(blobStore, digest1, 0, data1); String id1 = StringUtils.convertBytesToHex(digest1); long now = System.currentTimeMillis(); long until = System.currentTimeMillis() + 10; while (System.currentTimeMillis() < until) { try { Thread.sleep(5); } catch (InterruptedException e) { } } byte[] data2 = new byte[256]; r.nextBytes(data2); byte[] digest2 = getDigest(data2); RDBBlobStoreFriend.storeBlock(blobStore, digest2, 0, data2); Assert.assertEquals("meta entry was not removed", 1, blobStore.countDeleteChunks(ImmutableList.of(id1), now)); Assert.assertFalse("data entry was not removed", RDBBlobStoreFriend.isDataEntryPresent(blobStore, digest1)); }
@Test public void testUpdateAndDelete() throws Exception { byte[] data = new byte[256]; Random r = new Random(0); r.nextBytes(data); byte[] digest = getDigest(data); RDBBlobStoreFriend.storeBlock(blobStore, digest, 0, data); String id = StringUtils.convertBytesToHex(digest); long until = System.currentTimeMillis() + 1000; while (System.currentTimeMillis() < until) { try { Thread.sleep(100); } catch (InterruptedException e) { } } // Force update to update timestamp long beforeUpdateTs = System.currentTimeMillis() - 100; RDBBlobStoreFriend.storeBlock(blobStore, digest, 0, data); // Metadata row should not have been touched Assert.assertFalse("entry was cleaned although it shouldn't have", blobStore.deleteChunks(ImmutableList.of(id), beforeUpdateTs)); // Actual data row should still be present Assert.assertNotNull(RDBBlobStoreFriend.readBlockFromBackend(blobStore, digest)); }
getStatsCollector().downloaded(id, System.nanoTime() - start, TimeUnit.NANOSECONDS, data.length); cache.put(id, data); } finally {
/** * Sets a {@link DataSource} to use for the RDB document and blob * stores, including {@link RDBOptions}. * * @return this */ public RDBDocumentNodeStoreBuilder setRDBConnection(DataSource ds, RDBOptions options) { this.documentStoreSupplier = ofInstance(new RDBDocumentStore(ds, this, options)); if(blobStore == null) { GarbageCollectableBlobStore s = new RDBBlobStore(ds, options); setGCBlobStore(s); } return thisBuilder(); }
@Override public boolean hasExternalBlobReferences() throws IOException { Closer closer = Closer.create(); try { DataSource ds = getDataSource(closer); RDBBlobStore blobStore = new RDBBlobStore(ds); return !blobStore.getAllChunkIds(0).hasNext(); } catch(Throwable e) { throw closer.rethrow(e); } finally { closer.close(); } }
@After @Override public void tearDown() throws Exception { super.tearDown(); if (blobStore != null) { empty(blobStore); blobStore.close(); } }
/** * Sets a {@link DataSource}s to use for the RDB document and blob * stores. * * @return this */ public RDBDocumentNodeStoreBuilder setRDBConnection(DataSource documentStoreDataSource, DataSource blobStoreDataSource) { this.documentStoreSupplier = ofInstance(new RDBDocumentStore(documentStoreDataSource, this)); if(blobStore == null) { GarbageCollectableBlobStore s = new RDBBlobStore(blobStoreDataSource); setGCBlobStore(s); } return thisBuilder(); }
/** * Sets a {@link DataSource} to use for the RDB document and blob * stores, including {@link RDBOptions}. * * @return this */ public RDBDocumentNodeStoreBuilder setRDBConnection(DataSource ds, RDBOptions options) { this.documentStoreSupplier = ofInstance(new RDBDocumentStore(ds, this, options)); if(blobStore == null) { GarbageCollectableBlobStore s = new RDBBlobStore(ds, options); setGCBlobStore(s); } return thisBuilder(); }
/** * Sets a {@link DataSource}s to use for the RDB document and blob * stores. * * @return this */ public RDBDocumentNodeStoreBuilder setRDBConnection(DataSource documentStoreDataSource, DataSource blobStoreDataSource) { this.documentStoreSupplier = ofInstance(new RDBDocumentStore(documentStoreDataSource, this)); if(blobStore == null) { GarbageCollectableBlobStore s = new RDBBlobStore(blobStoreDataSource); setGCBlobStore(s); } return thisBuilder(); }
private BlobStore getBlobStore(StatisticsProvider statsProvider) { try { if (useDataStore) { initializeBlobStoreFixture(statsProvider); return blobStoreFixture.setUp(); } else { DataSource ds = RDBDataSourceFactory.forJdbcUrl(jdbcuri, jdbcuser, jdbcpasswd); return new RDBBlobStore(ds, getOptions(dropDBAfterTest, tablePrefix)); } } catch (Exception e) { throw new RuntimeException(e); } }
private BlobStore getBlobStore(StatisticsProvider statsProvider) { try { if (useDataStore) { initializeBlobStoreFixture(statsProvider); return blobStoreFixture.setUp(); } else { DataSource ds = RDBDataSourceFactory.forJdbcUrl(jdbcuri, jdbcuser, jdbcpasswd); return new RDBBlobStore(ds, getOptions(dropDBAfterTest, tablePrefix)); } } catch (Exception e) { throw new RuntimeException(e); } }
/** * Sets a {@link DataSource} to use for the RDB document and blob * stores, including {@link RDBOptions}. * * @return this */ public Builder setRDBConnection(DataSource ds, RDBOptions options) { this.documentStoreSupplier = ofInstance(new RDBDocumentStore(ds, this, options)); if(blobStore == null) { GarbageCollectableBlobStore s = new RDBBlobStore(ds, options); setGCBlobStore(s); } return this; }
/** * Sets a {@link DataSource}s to use for the RDB document and blob * stores. * * @return this */ public Builder setRDBConnection(DataSource documentStoreDataSource, DataSource blobStoreDataSource) { this.documentStoreSupplier = ofInstance(new RDBDocumentStore(documentStoreDataSource, this)); if(blobStore == null) { GarbageCollectableBlobStore s = new RDBBlobStore(blobStoreDataSource); setGCBlobStore(s); } return this; }