Refine search
public boolean hasOutstandingTransactions( UUID queueId, UUID consumerId ) { SliceQuery<ByteBuffer, UUID, UUID> q = createSliceQuery( ko, be, ue, ue ); q.setColumnFamily( CONSUMER_QUEUE_TIMEOUTS.getColumnFamily() ); q.setKey( getQueueClientTransactionKey( queueId, consumerId ) ); q.setRange( null, null, false, 1 ); return q.execute().get().getColumns().size() > 0; }
/** * Get the bounds for the queue * * @return The bounds for the queue */ public QueueBounds getQueueBounds( UUID queueId ) { try { ColumnSlice<String, UUID> result = HFactory.createSliceQuery( ko, ue, se, ue ).setKey( queueId ) .setColumnNames( QUEUE_NEWEST, QUEUE_OLDEST ) .setColumnFamily( QUEUE_PROPERTIES.getColumnFamily() ).execute() .get(); if ( result != null && result.getColumnByName( QUEUE_OLDEST ) != null && result.getColumnByName( QUEUE_NEWEST ) != null ) { return new QueueBounds( result.getColumnByName( QUEUE_OLDEST ).getValue(), result.getColumnByName( QUEUE_NEWEST ).getValue() ); } } catch ( Exception e ) { logger.error( "Error getting oldest queue message ID", e ); } return null; }
public void testCompositeOrdering() { Mutator<ByteBuffer> mutator = HFactory.createMutator( CassandraTestBase.keyspace, ByteBufferSerializer.get()); composite.addComponent("jeans", StringSerializer.get(), StringSerializer .get().getComparatorType().getTypeName()); DynamicCompositeSerializer.get(), ByteBufferSerializer.get()); sliceQuery.setColumnFamily(AbstractIndexOperation.CF_NAME); sliceQuery.setKey(rowKey); ComponentEquality.GREATER_THAN_EQUAL); sliceQuery.setRange(start, end, false, 1000); List<HColumn<DynamicComposite, ByteBuffer>> cols = sliceQuery.execute() .get().getColumns(); System.out.println(ByteBufferUtil.bytesToHex(cols.get(0).getNameBytes())); composite = cols.get(0).getName(); composite = cols.get(1).getName();
private Map<String, String> getResults(QueryResult<ColumnSlice<String, String>> queryResult) { Map<String, String> result = Maps.newHashMap(); for (HColumn<String, String> col : queryResult.get().getColumns()) { result.put(col.getName(), col.getValue()); } return result; }
if ( results != null ) { values = new HashMap<String, Object>(); for ( HColumn<ByteBuffer, ByteBuffer> result : results.getColumns() ) { String name = entityHasDictionary ? string( result.getName() ) : DynamicComposite.fromByteBuffer( result.getName() ).get( 0, se ); if ( entityHasDictionary && coTypeIsBasic ) { values.put( name, object( dictionaryCoType, result.getValue() ) ); else if ( result.getValue().remaining() > 0 ) {
SliceQuery<ByteBuffer, N, V> q = createSliceQuery( ko, be, nameSerializer, valueSerializer ); q.setColumnFamily( columnFamily.toString() ); q.setKey( bytebuffer( key ) ); q.setColumnNames( ( N[] ) nameSerializer.fromBytesSet( se.toBytesSet( new ArrayList<String>( columnNames ) ) ) .toArray() ); QueryResult<ColumnSlice<N, V>> r = q.execute(); ColumnSlice<N, V> slice = r.get(); List<HColumn<N, V>> results = slice.getColumns();
private void refresh() { query.setRange(start, finish.function(), reversed, count); columns = 0; List<HColumn<N, V>> list = query.execute().get().getColumns(); iterator = Iterators.peekingIterator(list.iterator()); if (iterator.hasNext()) { // The lower bound column may have been removed prior to the query executing, // so check to see if the first column returned by the current query is the same // as the lower bound column. If both columns are the same, skip the column N first = list.get(0).getName(); if (first.equals(start)) { next(); } } }
/** * Get multiple values * @param keys * @return */ public Map<String, String> getMulti(String columnName, String... keys) { MultigetSliceQuery<String, String,String> q = createMultigetSliceQuery(keyspace, serializer, serializer, serializer); q.setColumnFamily(columnFamilyName); q.setKeys(keys); q.setColumnNames(columnName); QueryResult<Rows<String,String,String>> r = q.execute(); Rows<String,String,String> rows = r.get(); Map<String, String> ret = new HashMap<String, String>(keys.length); for (String k: keys) { HColumn<String, String> c = rows.getByKey(k).getColumnSlice().getColumnByName(columnName); if (c != null && c.getValue() != null) { ret.put(k, c.getValue()); } } return ret; }
@Override public Void call() throws Exception { log.debug("Starting VerifyLastInsertCommand"); String key = "test"; sliceQuery.setColumnFamily(commandArgs.workingColumnFamily); log.info("StartKey: {} for thread {}", key, Thread.currentThread().getId()); String colValue; for (int col = 0; col < commandArgs.columnCount; col++) { colValue = String.format(COLUMN_VAL_FORMAT, col); mutator.addInsertion(key, commandArgs.workingColumnFamily, HFactory.createStringColumn(String.format(COLUMN_NAME_FORMAT, col), colValue)); executeMutator(col); // Let's verify sliceQuery.setKey(key); sliceQuery.setRange(null, null, true, 1); QueryResult<ColumnSlice<String,String>> result = sliceQuery.execute(); String actualValue = result.get().getColumns().get(0).getValue(); if (!actualValue.equals(colValue)) { log.error("Column values don't match. Expected: " + colValue + " - Actual: " + actualValue); break; } } commandRunner.doneSignal.countDown(); log.debug("VerifyLastInsertCommand complete"); return null; }
/** * This method intentionally swallows ordered execution issues. For some reason, our Time UUID ordering does * not agree with the cassandra comparator as our micros get very close * @param query * @param <K> * @param <UUID> * @param <V> * @return */ protected static <K, UUID, V> List<HColumn<UUID, V>> swallowOrderedExecution( final SliceQuery<K, UUID, V> query ) { try { return query.execute().get().getColumns(); } catch ( HInvalidRequestException e ) { //invalid request. Occasionally we get order issues when there shouldn't be, disregard them. final Throwable invalidRequestException = e.getCause(); if ( invalidRequestException instanceof InvalidRequestException //we had a range error && ( ( InvalidRequestException ) invalidRequestException ).getWhy().contains( "range finish must come after start in the order of traversal" )) { return Collections.emptyList(); } throw e; } }
.createRangeSlicesQuery( keyspace, StringSerializer.get(), StringSerializer.get(), StringSerializer.get() ) // if ( result.get().getCount() < 1 ) MutationResult mutationResult = HFactory.createMutator( keyspace, StringSerializer.get() ) // .addInsertion( repositoryId, cf, result.get().getList().get( 0 ).getColumnSlice().getColumnByName( REPOSITORY_NAME.toString() ).getValue() );
MultigetSliceQuery<String, String, String> q = createMultigetSliceQuery(ko, se, se, se); q.setColumnFamily(cf); slice.getColumnByName("testMultigetSliceQueryColumn1").getValue()); assertEquals("value12", slice.getColumnByName("testMultigetSliceQueryColumn2").getValue()); assertNull(slice.getColumnByName("testMultigetSliceQueryColumn3")); List<HColumn<String, String>> columns = slice.getColumns(); assertNotNull(columns); assertEquals(2, columns.size()); q = createMultigetSliceQuery(ko, se, se, se); q.setColumnFamily(cf); q.setKeys("testMultigetSliceQuery3"); slice = row2.getColumnSlice(); assertNotNull(slice); for (HColumn<String, String> column : slice.getColumns()) { if (!column.getName().equals("testMultigetSliceQueryColumn1") && !column.getName().equals("testMultigetSliceQueryColumn2") && !column.getName().equals("testMultigetSliceQueryColumn3")) {
@Override public String getStoredType(ByteBuffer rowKey, String cfName, Keyspace keyspace) { SliceQuery<ByteBuffer, String, ByteBuffer> query = MappingUtils .buildSliceQuery(rowKey, columns, cfName, keyspace); QueryResult<ColumnSlice<String, ByteBuffer>> result = query.execute(); // only need to check > 0. If the entity wasn't tombstoned then we would // have loaded the static jpa marker column HColumn<String, ByteBuffer> descrimValue = result.get().getColumnByName( DISCRIMINAATOR_COL); if (descrimValue == null) { return null; } return StringSerializer.get().fromByteBuffer(descrimValue.getValue()); }
/** * Read the field from the query result into the opject within the state * manager. * * @param stateManager * @param result * @return True if the field was loaded. False otherwise */ public boolean readField(OpenJPAStateManager stateManager, QueryResult<ColumnSlice<String, ByteBuffer>> result) { HColumn<String, ByteBuffer> column = result.get().getColumnByName(name); if (column == null) { stateManager.store(fieldId, null); return false; } Object value = serializer.fromByteBuffer(column.getValue()); stateManager.store(fieldId, value); return true; }
/** * Loads a lazy property's value * * @param metadata the entity metadata * @param self the entity instance * @param proceed the method being intercepted * @param m * @param args the method arguments */ @Override protected <T> void loadLazyPropertyIfNecessary(ClassMetadata<T> metadata, Object self, Method proceed, Method m, Object[] args) throws Exception { Object value = proceed.invoke(self, args); String key = getKey(self); if (key != null) { //key may be null if this is just a regular access to the property before the entity has been persisted and no key has been assigned SliceQuery<String, String, Object> query = getSliceQuery(metadata); query.setColumnFamily(metadata.getColumnFamily()); query.setKey(key); String column = metadata.getLazyProperty(m); query.setColumnNames(column); List<HColumn<String, Object>> columns = query.execute().get().getColumns(); HColumn<String, Object> mappedColumnValue = columns.size() == 1 ? columns.get(0) : null; if (mappedColumnValue != null && isEmptyContainerValue(value)) { //todo once a load attempt has been made we should not attempt again but we have no sessions...perhaps a weakreference map? Object propertyValue = loadProperty(metadata, column, mappedColumnValue); PropertyUtils.setProperty(self, column, propertyValue); } } }
SubSliceQuery<String, String, String, String> q = createSubSliceQuery(ko, se, se, se, se); q.setColumnFamily(cf); QueryResult<ColumnSlice<String, String>> r = q.execute(); assertNotNull(r); ColumnSlice<String, String> slice = r.get(); assertNotNull(slice); assertEquals(2, slice.getColumns().size()); assertEquals("v000", slice.getColumnByName("c000").getValue()); q = createSubSliceQuery(ko, se, se, se, se); q.setColumnFamily(cf); q.setKey("testSliceQueryOnSubcolumns0"); r = q.execute(); assertNotNull(r); slice = r.get(); assertNotNull(slice); for (HColumn<String, String> column : slice.getColumns()) { if (!column.getName().equals("c000") && !column.getName().equals("c110")) { fail("A columns with unexpected column name returned: " + column.getName()); slice = r.get(); assertNotNull(slice); assertTrue(slice.getColumns().isEmpty());
MultigetSubSliceQuery<String, String, String, String> q = createMultigetSubSliceQuery( ko, se, se, se, se); q.setColumnFamily(cf); assertNotNull(slice); assertEquals("v000", slice.getColumnByName("c000").getValue()); assertEquals("v100", slice.getColumnByName("c110").getValue()); List<HColumn<String, String>> columns = slice.getColumns(); assertNotNull(columns); assertEquals(2, columns.size()); q = createMultigetSubSliceQuery(ko, se, se, se, se); q.setColumnFamily(cf); q.setKeys("testMultigetSubSliceQuery0"); slice = row2.getColumnSlice(); assertNotNull(slice); assertEquals(2, slice.getColumns().size()); for (HColumn<String, String> column : slice.getColumns()) { if (!column.getName().equals("c000") && !column.getName().equals("c110")) { fail("A columns with unexpected column name returned: "
/** * Fetch a map of columns and their values * * @param query a cql query * @return the resulting columns and their values */ protected Map<String, ByteBuffer> getColumns(String query) { Map<String, ByteBuffer> resultMap = new LinkedHashMap<String, ByteBuffer>(); CqlQuery<String, String, Object> cqlQuery = new CqlQuery<String, String, Object>(getDefaultKeyspace(), StringSerializer.get(), StringSerializer.get(), new TypeConverterSerializer<Object>()); cqlQuery.setQuery(query); cqlQuery.setSuppressKeyInColumns(true); QueryResult<CqlRows<String, String, Object>> results = cqlQuery.execute(); CqlRows<String, String, Object> rows = results.get(); for (Row<String, String, Object> row : rows) { ColumnSlice<String, Object> slice = row.getColumnSlice(); for (HColumn<String, Object> column : slice.getColumns()) { resultMap.put(column.getName(), column.getValueBytes()); } } return resultMap; }
/** Load the messages into an array list */ protected List<Message> loadMessages( Collection<UUID> messageIds, boolean reversed ) { Rows<UUID, String, ByteBuffer> messageResults = createMultigetSliceQuery( ko, ue, se, be ).setColumnFamily( MESSAGE_PROPERTIES.getColumnFamily() ) .setKeys( messageIds ) .setRange( null, null, false, ALL_COUNT ).execute().get(); List<Message> messages = new ArrayList<Message>( messageIds.size() ); for ( Row<UUID, String, ByteBuffer> row : messageResults ) { Message message = deserializeMessage( row.getColumnSlice().getColumns() ); if ( message != null ) { messages.add( message ); } } Collections.sort( messages, new RequestedOrderComparator( messageIds ) ); return messages; }
@Test public void testModificationIterator() { LinkedList<String> KEYS= new LinkedList<String>(); for (int j = 0; j < 100; j++) { KEYS.add(KEY+j); } Mutator mutator = HFactory.createMutator(keyspace, se); MultigetSliceIterator<String, UUID, String> it= new MultigetSliceIterator<String, UUID, String>(false,keyspace, se,us,se, CF, KEYS, null,null,1,10,1000);// Retrieve all columns from row -> 1000 Map<UUID, String> results = new HashMap<UUID, String>(); while (it.hasNext()) { Row<String,UUID, String> r = it.next(); for (HColumn<UUID, String> c : r.getColumnSlice().getColumns()) { results.put(c.getName(), c.getValue()); mutator.addDeletion(r.getKey(), CF, c.getName(), us); mutator.execute(); } } assertEquals(100*1000, results.size()); } @Test