@Override public Map<String, String> buildResultsCQL( final ResultSet resultSet ) { final Map<String, String> results = new HashMap<>(); resultSet.all().forEach( row -> { @SuppressWarnings("unchecked") List<Object> keys = (List) deserializeMapEntryKey(row.getBytes("key")); String value = (String)DataType.text().deserialize( row.getBytes("value"), ProtocolVersion.NEWEST_SUPPORTED ); // the actual string key value is the last element results.put((String)keys.get(keys.size() -1), value); }); return results; } }
ByteBuffer partitionKey = unique.getBytes("key"); ByteBuffer column = unique.getBytesUnsafe("column1");
@Override public DatabaseQueueMessageBody loadMessageData(final UUID messageId ){ logger.trace("loadMessageData {}", messageId); Clause messageIdClause = QueryBuilder.eq( COLUMN_MESSAGE_ID, messageId ); Statement select = QueryBuilder.select().from( TABLE_MESSAGE_DATA).where(messageIdClause); Row row = cassandraClient.getApplicationSession().execute(select).one(); if ( row == null ) { return null; } return new DatabaseQueueMessageBody( row.getBytes( COLUMN_MESSAGE_DATA), row.getString( COLUMN_CONTENT_TYPE)); }
@Override public Map<String, Object> getTokenInfo(UUID tokenUUID){ Preconditions.checkNotNull(tokenUUID, "token UUID is required"); List<ByteBuffer> tokenProperties = new ArrayList<>(); TOKEN_PROPERTIES.forEach( prop -> tokenProperties.add(DataType.serializeValue(prop, ProtocolVersion.NEWEST_SUPPORTED))); final ByteBuffer key = DataType.uuid().serialize(tokenUUID, ProtocolVersion.NEWEST_SUPPORTED); final Clause inKey = QueryBuilder.eq("key", key); final Clause inColumn = QueryBuilder.in("column1", tokenProperties ); final Statement statement = QueryBuilder.select().all().from(TOKENS_TABLE) .where(inKey) .and(inColumn) .setConsistencyLevel(cassandraConfig.getDataStaxReadCl()); final ResultSet resultSet = session.execute(statement); final List<Row> rows = resultSet.all(); Map<String, Object> tokenInfo = new HashMap<>(); rows.forEach( row -> { final String name = (String)DataType.text() .deserialize(row.getBytes("column1"), ProtocolVersion.NEWEST_SUPPORTED); final Object value = deserializeColumnValue(name, row.getBytes("value")); if (value == null){ throw new RuntimeException("error deserializing token info for property: "+name); } tokenInfo.put(name, value); }); logger.trace("getTokenInfo, info: {}", tokenInfo); return tokenInfo; }
rowKey = CassandraDatastore.DATA_POINTS_ROW_KEY_SERIALIZER.fromByteBuffer(record.getBytes(0), m_clusterName);
private V readValueCQL(CacheScope scope, K key, TypeReference typeRef){ Preconditions.checkNotNull(scope, "scope is required"); Preconditions.checkNotNull(key, "key is required"); final String rowKeyString = scope.getApplication().getUuid().toString(); final int bucket = BUCKET_LOCATOR.getCurrentBucket(rowKeyString); // determine column name based on K key to string final String columnName = key.toString(); final Clause inKey = QueryBuilder.eq("key", getPartitionKey(scope, rowKeyString, bucket) ); final Clause inColumn = QueryBuilder.eq("column1", DataType.text().serialize(columnName, ProtocolVersion.NEWEST_SUPPORTED) ); final Statement statement = QueryBuilder.select().all().from(SCOPED_CACHE_TABLE) .where(inKey) .and(inColumn) .setConsistencyLevel(cassandraConfig.getDataStaxReadCl()); final ResultSet resultSet = session.execute(statement); final com.datastax.driver.core.Row row = resultSet.one(); if (row == null){ if(logger.isDebugEnabled()){ logger.debug("Cache value not found for key {}", key ); } return null; } try { return MAPPER.readValue(row.getBytes("value").array(), typeRef); } catch (IOException ioe) { logger.error("Unable to read cached value", ioe); throw new RuntimeException("Unable to read cached value", ioe); } }
private ByteBuffer getValueCQL( MapScope scope, String key, final ConsistencyLevel consistencyLevel ) { Clause in = QueryBuilder.in("key", getMapEntryPartitionKey(scope, key) ); Statement statement = QueryBuilder.select().all().from(MAP_ENTRIES_TABLE) .where(in) .setConsistencyLevel(consistencyLevel); ResultSet resultSet = session.execute(statement); com.datastax.driver.core.Row row = resultSet.one(); return row != null ? row.getBytes("value") : null; }
protected static Object getCassandraObject(Row row, int i, DataType dataType) { if (dataType.equals(DataType.blob())) { return row.getBytes(i);
return settings.getSerializer().deserialize(row.getBytes(col));
@Override public MapKeyResults getAllKeys(final MapScope scope, final String cursor, final int limit ){ final int[] buckets = BUCKET_LOCATOR.getAllBuckets( scope.getName() ); final List<ByteBuffer> partitionKeys = new ArrayList<>(NUM_BUCKETS.length); for (int bucket : buckets) { partitionKeys.add(getMapKeyPartitionKey(scope, bucket)); } Clause in = QueryBuilder.in("key", partitionKeys); Statement statement; if( isBlank(cursor) ){ statement = QueryBuilder.select().all().from(MAP_KEYS_TABLE) .where(in) .setFetchSize(limit); }else{ statement = QueryBuilder.select().all().from(MAP_KEYS_TABLE) .where(in) .setFetchSize(limit) .setPagingState(PagingState.fromString(cursor)); } ResultSet resultSet = session.execute(statement); PagingState pagingState = resultSet.getExecutionInfo().getPagingState(); final List<String> keys = new ArrayList<>(); Iterator<Row> resultIterator = resultSet.iterator(); int size = 0; while( resultIterator.hasNext() && size < limit){ size++; keys.add((String)DataType.text().deserialize(resultIterator.next().getBytes("column1"), ProtocolVersion.NEWEST_SUPPORTED)); } return new MapKeyResults(pagingState != null ? pagingState.toString() : null, keys); }
retVal = row.getBytes(columnName); if (member != null && retVal != null && entity != null)
@Override public ByteBuffer getBytes(String name) { return row.getBytes(name); }
@Override public ByteBuffer getBytes(int i) { return row.getBytes(i); }
@Override public ByteBuffer getBytes(String name) { return row.getBytes(name); }
private void testByteRows(int key) throws Throwable { // Build small ByteBuffer sample ByteBuffer bb = ByteBuffer.allocate(58); bb.putShort((short) 0xCAFE); bb.flip(); // Write data for (int i = 0; i < 1000000; ++i) { session() .execute( insertInto("wide_byte_rows") .value("k", key) .value("i", bb) .setConsistencyLevel(ConsistencyLevel.QUORUM)); } // Read data ResultSet rs = session().execute(select("i").from("wide_byte_rows").where(eq("k", key))); // Verify data for (Row row : rs) { assertEquals(row.getBytes("i"), bb); } }
ByteBuffer rawInitCond = row.getBytes("initcond"); if (rawInitCond == null) { initCond = null;
@Override protected BiConsumer<Row, List<Span>> accumulator() { return (row, result) -> { V1ThriftSpanReader reader = V1ThriftSpanReader.create(); V1SpanConverter converter = V1SpanConverter.create(); V1Span read = reader.read(row.getBytes("span")); converter.convert(read, result); }; }
/** {@inheritDoc} */ @Override public FijiCell<T> apply(final Row row) { try { final DecodedCell<T> decodedCell = mCellDecoder.decodeCell(ByteUtils.toBytes(row.getBytes(CQLUtils.VALUE_COL))); return FijiCell.create(mColumnName, row.getLong(CQLUtils.VERSION_COL), decodedCell); } catch (IOException e) { throw new FijiIOException(e); } } }