public static LucandraTermInfo[] convertTermInfo(Collection<IColumn> docs) { LucandraTermInfo termInfo[] = new LucandraTermInfo[docs.size()]; int i = 0; for (IColumn col : docs) { if (i == termInfo.length) break; if (i == 0 && col instanceof SuperColumn) throw new IllegalStateException( "TermInfo ColumnFamily is a of type Super: This is no longer supported, please see NEWS.txt"); if (col == null || col.name() == null || col.value() == null) throw new IllegalStateException("Encountered missing column: " + col); termInfo[i] = new LucandraTermInfo(CassandraUtils.readVInt(col.name()), col.value()); i++; } return termInfo; }
.getSubColumn(resourceNameBytes).value();
String shardStr = ByteBufferUtil.string(c.name()); Integer shardNum = null; logger.warn("invalid shard name encountered: "+shardStr+" "+c.getSubColumns().size()); continue; for (IColumn subCol : c.getSubColumns()) String token = ByteBufferUtil.string(subCol.name()); .value()))); int startSeqOffset = getRandomSequenceOffset(offset.get());
rows.get(0).cf.getColumn(CassandraUtils.cachedColBytes).getSubColumn(CassandraUtils.cachedColBytes).timestamp() >= lastCheck)
public Long checkForUpdate(String indexName, String key) throws IOException { ByteBuffer keyCol = ByteBuffer.wrap(key.getBytes("UTF-8")); ByteBuffer keyKey = CassandraUtils.hashKeyBytes((indexName + "~" + key).getBytes("UTF-8"), CassandraUtils.delimeterBytes, "keys".getBytes("UTF-8")); List<Row> rows = CassandraUtils.robustRead(keyKey, new QueryPath(CassandraUtils.schemaInfoColumnFamily), Arrays.asList(keyCol), ConsistencyLevel.QUORUM); if (rows.size() == 1) { Row row = rows.get(0); if (row.cf != null) { IColumn col = row.cf.getColumn(keyCol); if (col != null) { Collection<IColumn> subCols = col.getSubColumns(); if (subCols != null && !subCols.isEmpty()) { ByteBuffer idVal = col.getSubColumns().iterator().next().name(); Long id = Long.valueOf(ByteBufferUtil.string(idVal)); return id; } } } } return null; }
for (IColumn c : supercol.getSubColumns()) logger.debug(offset + " was taken by " + ByteBufferUtil.string(c.name())); if (c.isMarkedForDelete()) if (c.timestamp() < minTtl) minTtl = c.timestamp(); winningToken = c.name(); if (c.timestamp() == minTtl && winningToken.compareTo(c.name()) <= 0) winningToken = c.name();
public void deleteDocuments(String indexName, Term term, boolean autoCommit) throws CorruptIndexException, IOException { ColumnParent cp = new ColumnParent(CassandraUtils.termVecColumnFamily); ByteBuffer key = CassandraUtils.hashKeyBytes(indexName.getBytes("UTF-8"), CassandraUtils.delimeterBytes, term .field().getBytes("UTF-8"), CassandraUtils.delimeterBytes, term.text().getBytes("UTF-8")); ReadCommand rc = new SliceFromReadCommand(CassandraUtils.keySpace, key, cp, ByteBufferUtil.EMPTY_BYTE_BUFFER, ByteBufferUtil.EMPTY_BYTE_BUFFER, false, Integer.MAX_VALUE); List<Row> rows = CassandraUtils.robustRead(CassandraUtils.consistency, rc); // delete by documentId for (Row row : rows) { if (row.cf != null) { Collection<IColumn> columns = row.cf.getSortedColumns(); for (IColumn col : columns) { deleteLucandraDocument(indexName, CassandraUtils.readVInt(col.name()), autoCommit); } } } }
return; DocumentMetadata terms = fromBytesUsingThrift(metaCol.value());
ByteBuffer idCol = col.getSubColumns().iterator().next().name(); Long id = Long.valueOf(ByteBufferUtil.string(idCol)); int shard = CassandraIndexManager.getShardFromDocId(id);
public LogEntry(String keyspace, ColumnFamily columnFamily, ByteBuffer rowKey, ConsistencyLevel consistencyLevel, String host, long timestamp, Collection<String> columnNames) throws Throwable { this(keyspace, rowKey, consistencyLevel, host, timestamp, columnNames); this.columnFamily = columnFamily.metadata().cfName; for (IColumn column : columnFamily.getSortedColumns()) { ColumnOperation operation = new ColumnOperation(); operation.setName(column.name()); operation.setDelete(columnFamily.isMarkedForDelete()); operations.add(operation); } }
@Override protected Object createValue(IndexReader reader, Entry entryKey) throws IOException { String field = StringHelper.intern(entryKey.field); final String[] retArray = new String[reader.maxDoc()]; Collection<IColumn> fcEntries = getFieldCacheEntries(reader, field); for (IColumn col : fcEntries) { if (col instanceof DeletedColumn) continue; int docId = CassandraUtils.readVInt(col.name()); String val = ByteBufferUtil.string(col.value()); retArray[docId] = val; } return retArray; } }
Iterator<ByteBuffer> = columns.keySet().iterator(); while (iter.hasNext()) { IColumn col = columns.get(iter.next()); xxx colVal = xxxSerializer.get().fromByteBuffer(TBaseHelper.rightSize(col.value())); }
public void map(ByteBuffer key, SortedMap<ByteBuffer, IColumn> columns, Context context) throws IOException, InterruptedException { IColumn column = columns.get(sourceColumn); if (column == null) return; String value = ByteBufferUtil.string(column.value()); logger.debug("read " + key + ":" + value + " from " + context.getInputSplit()); StringTokenizer itr = new StringTokenizer(value); while (itr.hasMoreTokens()) { word.set(itr.nextToken()); context.write(word, one); } } }
public void map(ByteBuffer key, SortedMap<ByteBuffer, IColumn> columns, Context context) throws IOException, InterruptedException { IColumn column = columns.get(sourceColumn); if (column == null) return; String value = ByteBufferUtil.string(column.value()); logger.debug("read " + key + ":" + value + " from " + context.getInputSplit()); StringTokenizer itr = new StringTokenizer(value); while (itr.hasMoreTokens()) { word.set(itr.nextToken()); context.write(word, one); } } }