Refine search
.withConcurrencyLevel(CONCURRENCY).call(); byte[] json = jsonMapper.writeValueAsBytes(segment); MutationBatch mutation = this.keyspace.prepareMutationBatch(); mutation.withRow(descriptorStorage, key) .putColumn("lastmodified", System.currentTimeMillis(), null) .putColumn("descriptor", json, null); mutation.execute(); log.info("Wrote index to C* in [%s] ms", System.currentTimeMillis() - start);
@Override public void mutateMany(Map<String, Map<StaticBuffer, KCVMutation>> batch, StoreTransaction txh) throws BackendException { MutationBatch m = keyspaceContext.getClient().prepareMutationBatch().withAtomicBatch(atomicBatch) .setConsistencyLevel(getTx(txh).getWriteConsistencyLevel().getAstyanax()) .withRetryPolicy(retryPolicy.duplicate()); ColumnListMutation<ByteBuffer> dels = m.withRow(columnFamily, key); dels.setTimestamp(commitTime.getDeletionTime(times)); dels.deleteColumn(b.as(StaticBuffer.BB_FACTORY)); ColumnListMutation<ByteBuffer> upds = m.withRow(columnFamily, key); upds.setTimestamp(commitTime.getAdditionTime(times)); m.execute(); } catch (ConnectionException e) { throw new TemporaryBackendException(e);
allVersions.getMigrationRelationship( currentVersion ); final Observable<List<Edge>> observable = migrationDataProvider.getData().flatMap( graphNode -> { final GraphManager gm = graphManagerFactory.createEdgeManager( graphNode.applicationScope ); return edgesFromSourceObservable.edgesFromSourceDescending( gm, graphNode.entryNode, true).buffer( 1000 ) .doOnNext( edges -> { final MutationBatch batch = keyspace.prepareMutationBatch(); final MutationBatch edgeBatch = migration.to.writeEdge( graphNode.applicationScope, edge ); batch.mergeShallow( edgeBatch ); batch.execute();
/** * Remove the edge * * @param scope The scope * @param rowKeyId The id to use in the row key * @param edgeType The edge type * @param version The version of the edge * @param cf The column family */ private MutationBatch removeEdgeType( final ApplicationScope scope, final Id rowKeyId, final String edgeType, final long version, final MultiTenantColumnFamily<ScopedRowKey<Id>, String> cf ) { //write target<--source edge type meta data final ScopedRowKey< Id> rowKey = new ScopedRowKey< Id>( scope.getApplication(), rowKeyId ); final MutationBatch batch = keyspace.prepareMutationBatch().withTimestamp( version ); batch.withRow( cf, rowKey ).deleteColumn( edgeType ); return batch; }
@Override public MutationBatch delete( final ApplicationScope scope, final Id node, final long timestamp ) { ValidationUtils.validateApplicationScope( scope ); ValidationUtils.verifyIdentity( node ); GraphValidation.validateTimestamp( timestamp, "timestamp" ); MutationBatch batch = keyspace.prepareMutationBatch().withConsistencyLevel( fig.getWriteCL() ); batch.withRow( GRAPH_DELETE, ScopedRowKey.fromKey( scope.getApplication(), node ) ).setTimestamp( timestamp ) .deleteColumn( COLUMN_NAME ); return batch; }
@Override public MutationBatch mark( final ApplicationScope scope, final Id node, final long timestamp ) { ValidationUtils.validateApplicationScope( scope ); ValidationUtils.verifyIdentity( node ); GraphValidation.validateTimestamp( timestamp, "timestamp" ); MutationBatch batch = keyspace.prepareMutationBatch().withConsistencyLevel( fig.getWriteCL() ); batch.withRow( GRAPH_DELETE, ScopedRowKey.fromKey( scope.getApplication(), node ) ).setTimestamp( timestamp ) .putColumn( COLUMN_NAME, timestamp ); return batch; }
return Observable.just( node ) .flatMap( id -> { logger.trace("Node with id {} has max version of {}", node, maxVersion.orNull()); if ( !maxVersion.isPresent() ) { return Observable.empty(); doDeletes( node, scope, maxVersion.get(), timestamp ).doOnCompleted( () -> { try { nodeSerialization.delete( scope, node, maxVersion.get()).execute(); } catch ( ConnectionException e ) { throw new RuntimeException( "Unable to connect to cassandra", e );
@Test public void writeLoadClearDelete() throws ConnectionException { final Id applicationId = new SimpleId( "application" ); ApplicationScope context = new ApplicationScopeImpl( applicationId ); final UUID version = UUIDGenerator.newTimeUUID(); final Id entityId = new SimpleId( "test" ); Entity entity = new Entity( entityId ); EntityUtils.setVersion( entity, version ); MvccEntity saved = new MvccEntityImpl( entityId, version, MvccEntity.Status.COMPLETE, Optional.of( entity ) ); //persist the entity serializationStrategy.write( context, saved ).execute(); //now load it back MvccEntity returned = serializationStrategy.load( context, Collections.singleton( entityId ), version ).getEntity( entityId ); assertEquals( "Mvcc entities are the same", saved, returned ); assertEquals( entityId, returned.getId() ); //check the target entity has the right id assertEquals( entityId, returned.getEntity().get().getId() ); //now mark it serializationStrategy.mark( context, entityId, version ).execute(); returned = serializationStrategy.load( context, Collections.singleton( entityId ), version ).getEntity( entityId ); assertEquals( entityId, returned.getId() ); assertEquals( version, returned.getVersion() ); assertFalse( returned.getEntity().isPresent() ); assertEquals( MvccEntity.Status.DELETED, returned.getStatus()); //now delete it serializationStrategy.delete( context, entityId, version ).execute(); //now get it, should be gone returned = serializationStrategy.load( context, Collections.singleton( entityId ), version ).getEntity( entityId ); assertNull( returned ); }
/** * Happy path */ @Test public void writeReadDelete() throws ConnectionException { final Id nodeId = IdGenerator.createId( "test" ); final long version = System.currentTimeMillis(); serialization.mark( scope, nodeId, version ).execute(); Optional<Long> returned = serialization.getMaxVersion( scope, nodeId ); assertEquals( version, returned.get().longValue() ); serialization.delete( scope, nodeId, returned.get() ).execute(); returned = serialization.getMaxVersion( scope, nodeId ); /** * Verifies that it is deleted */ assertFalse( returned.isPresent() ); }
private void confirmUniqueFields( MvccEntity mvccEntity, UUID version, ApplicationScope scope, MutationBatch logMutation) { final Entity entity = mvccEntity.getEntity().get(); // re-write the unique values but this time with no TTL final BatchStatement uniqueBatch = new BatchStatement(); for ( Field field : EntityUtils.getUniqueFields(mvccEntity.getEntity().get()) ) { UniqueValue written = new UniqueValueImpl( field, entity.getId(), version); uniqueBatch.add(uniqueValueStrat.writeCQL(scope, written, -1 )); logger.debug("Finalizing {} unique value {}", field.getName(), field.getValue().toString()); } try { logMutation.execute(); session.execute(uniqueBatch); } catch ( ConnectionException e ) { logger.error( "Failed to execute write asynchronously ", e ); throw new WriteCommitException( mvccEntity, scope, "Failed to execute write asynchronously ", e ); } }
@Test public void saveReturn() throws ConnectionException { final Id now = IdGenerator.createId( "test" ); final long timestamp = 10000l; final Shard shard1 = new Shard( 1000l, timestamp, false ); final Shard shard2 = new Shard( shard1.getShardIndex() * 2, timestamp, true ); final Shard shard3 = new Shard( shard2.getShardIndex() * 2, timestamp, false ); final DirectedEdgeMeta sourceEdgeMeta = DirectedEdgeMeta.fromSourceNodeTargetType( now, "edgeType", "subType" ); MutationBatch batch = edgeShardSerialization.writeShardMeta( scope, shard1, sourceEdgeMeta ); batch.mergeShallow( edgeShardSerialization.writeShardMeta( scope, shard2, sourceEdgeMeta ) ); batch.mergeShallow( edgeShardSerialization.writeShardMeta( scope, shard3, sourceEdgeMeta ) ); batch.execute(); Iterator<Shard> results = edgeShardSerialization.getShardMetaData( scope, Optional.<Shard>absent(), sourceEdgeMeta ); assertEquals( shard3, results.next() ); assertEquals( shard2, results.next() ); assertEquals( shard1, results.next() ); assertFalse( results.hasNext() ); final DirectedEdgeMeta targetEdgeMeta = DirectedEdgeMeta.fromTargetNodeSourceType( now, "edgeType", "subType" ); //test we get nothing with the other node type results = edgeShardSerialization.getShardMetaData( scope, Optional.<Shard>absent(), targetEdgeMeta ); assertFalse( results.hasNext() ); //test paging and size results = edgeShardSerialization.getShardMetaData( scope, Optional.of( shard2 ), sourceEdgeMeta ); assertEquals( shard2, results.next() ); assertEquals( shard1, results.next() ); assertFalse( results.hasNext() ); }
v1Impl.write( scope, entity1 ).execute(); v1Impl.write( scope, entity2 ).execute(); MvccEntity returned1 = v1Impl.load( scope, entity1.getId() ).get(); MvccEntity returned2 = v1Impl.load( scope, entity2.getId() ).get(); Observable.just( new EntityIdScope( scope, entity1.getId() ), new EntityIdScope( scope, entity2.getId() ) ); returned1 = v3Impl.load( scope, entity1.getId() ).get(); returned2 = v3Impl.load( scope, entity2.getId() ).get();
@Test public void loadAscendingHistory() throws ConnectionException { final Id applicationId = new SimpleId( "application" ); ApplicationScope context = new ApplicationScopeImpl( applicationId ); final UUID entityId = UUIDGenerator.newTimeUUID(); final UUID version1 = UUIDGenerator.newTimeUUID(); final String type = "test"; final Id id = new SimpleId(entityId, type); Entity entityv1 = new Entity(id); EntityUtils.setVersion(entityv1, version1); MvccEntity saved = new MvccEntityImpl(id, version1, MvccEntity.Status.COMPLETE, Optional.of(entityv1)); //persist the entity serializationStrategy.write(context, saved).execute(); //now write a new version of it Entity entityv2 = new Entity(id); UUID version2 = UUIDGenerator.newTimeUUID(); EntityUtils.setVersion(entityv1, version2); MvccEntity savedV2 = new MvccEntityImpl(id, version2, MvccEntity.Status.COMPLETE, Optional.of(entityv2)); serializationStrategy.write(context, savedV2).execute(); Iterator<MvccEntity> entities = serializationStrategy.loadAscendingHistory( context, id, savedV2.getVersion(), 20 ); assertTrue(entities.hasNext()); assertEquals(saved.getVersion(), entities.next().getVersion()); assertEquals(savedV2.getVersion(), entities.next().getVersion()); assertFalse(entities.hasNext()); }
Entity createReturned = observable.toBlocking().lastOrDefault( null ); .toBlocking().last(); fieldResults.getEntity( expectedInteger ).getEntity().get() ); //loadReturned ); entitySerializationStrategy.delete( context, createReturned.getId(), createReturned.getVersion() ).execute(); .toBlocking().last();
/** * Commit log tests */ @Test public void commitLogTest() throws ConnectionException { final Id sourceId = IdGenerator.createId( "source" ); final Id targetId = IdGenerator.createId( "target" ); final String edgeType = "edge"; final MarkedEdge edge1 = createEdge( sourceId, edgeType, targetId, System.currentTimeMillis(), true ); //write it as non deleted to storage storageEdgeSerialization.writeEdge( scope, edge1, UUIDGenerator.newTimeUUID() ).execute(); final MarkedEdge edge2 = createEdge( sourceId, edgeType, targetId ); storageEdgeSerialization.writeEdge( scope, edge2, UUIDGenerator.newTimeUUID() ).execute(); //now repair delete the first edge Iterator<MarkedEdge> itr = storageEdgeSerialization.getEdgeVersions( scope, new SimpleSearchByEdge( sourceId, edgeType, targetId, System.currentTimeMillis(), SearchByEdgeType.Order.DESCENDING, Optional.<Edge>absent() ) ); assertEquals( edge2, itr.next() ); assertEquals( edge1, itr.next() ); assertFalse( itr.hasNext() ); MarkedEdge deleted = edgeDeleteRepair.repair( scope, edge1, UUIDGenerator.newTimeUUID() ).toBlocking().single(); assertEquals( edge1, deleted ); itr = storageEdgeSerialization.getEdgeVersions( scope, new SimpleSearchByEdge( sourceId, edgeType, targetId, System.currentTimeMillis(), SearchByEdgeType.Order.DESCENDING, Optional.<Edge>absent() ) ); assertEquals( edge2, itr.next() ); assertFalse( itr.hasNext() ); }
@Test public void testShardDelete() throws ConnectionException { final Id now = IdGenerator.createId( "test" ); final long timestamp = 2000L; final Shard shard1 = new Shard( 1000L, timestamp, false ); final Shard shard2 = new Shard( shard1.getShardIndex(), timestamp * 2, true ); final Shard shard3 = new Shard( shard2.getShardIndex() * 2, timestamp * 3, true ); final DirectedEdgeMeta sourceEdgeMeta = DirectedEdgeMeta.fromSourceNodeTargetType( now, "edgeType", "subType" ); MutationBatch batch = edgeShardSerialization.writeShardMeta( scope, shard1, sourceEdgeMeta ); batch.mergeShallow( edgeShardSerialization.writeShardMeta( scope, shard2, sourceEdgeMeta ) ); batch.mergeShallow( edgeShardSerialization.writeShardMeta( scope, shard3, sourceEdgeMeta ) ); batch.execute(); Iterator<Shard> results = edgeShardSerialization.getShardMetaData( scope, Optional.<Shard>absent(), sourceEdgeMeta ); // Latest timestamp comes first assertEquals( shard3, results.next() ); // This should now not remove anything edgeShardSerialization.removeShardMeta( scope, shard3, sourceEdgeMeta ).execute(); // Get iterator again results = edgeShardSerialization.getShardMetaData( scope, Optional.<Shard>absent(), sourceEdgeMeta ); // We should still have shard2 stored assertEquals( shard2, results.next() ); }
@Override public Observable<Id> markNode( final Id node, final long timestamp ) { final Observable<Id> idObservable = Observable.just( node ).map( id -> { //mark the node as deleted final MutationBatch nodeMutation = nodeSerialization.mark( scope, id, timestamp ); if (logger.isTraceEnabled()) { logger.trace("Marking node {} as deleted to node mark", node); } try { nodeMutation.execute(); } catch ( ConnectionException e ) { throw new RuntimeException( "Unable to execute mutation", e ); } return id; } ); return ObservableTimer.time( idObservable, markNodeTimer ); }
collectionIoEventObservable.buffer( serializationFig.getBufferSize() ).flatMap( buffer -> Observable.from( buffer ).collect( () -> keyspace.prepareMutationBatch(), ( ( mutationBatch, mvccLogEntryCollectionIoEvent ) -> { mutationBatch.mergeShallow( logDelete ); mutationBatch.mergeShallow( entityDelete ); .doOnNext( mutationBatch -> { try { mutationBatch.execute();
@Override public Observable<MarkedEdge> writeEdge( final Edge edge ) { GraphValidation.validateEdge( edge ); final MarkedEdge markedEdge = new SimpleMarkedEdge( edge, false ); final Observable<MarkedEdge> observable = Observable.just( markedEdge ).map( edge1 -> { final UUID timestamp = UUIDGenerator.newTimeUUID(); final MutationBatch mutation = edgeMetadataSerialization.writeEdge( scope, edge1 ); final MutationBatch edgeMutation = storageEdgeSerialization.writeEdge( scope, edge1, timestamp ); mutation.mergeShallow( edgeMutation ); try { mutation.execute(); } catch ( ConnectionException e ) { throw new RuntimeException( "Unable to execute mutation", e ); } return edge1; } ); return ObservableTimer.time( observable, writeEdgeTimer ); }