public void serialize(CounterCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException { assert(cfs.metadata.isCounter()); out.write(cfs.metadata.ksAndCFBytes); ByteBufferUtil.writeWithLength(key.partitionKey, out); ByteBufferUtil.writeWithLength(key.cellName, out); }
public void serialize(CounterCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException { assert(cfs.metadata.isCounter()); out.write(cfs.metadata.ksAndCFBytes); ByteBufferUtil.writeWithLength(key.partitionKey, out); ByteBufferUtil.writeWithLength(key.cellName, out); }
public void serialize(CounterCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException { assert(cfs.metadata.isCounter()); out.write(cfs.metadata.ksAndCFBytes); ByteBufferUtil.writeWithLength(key.partitionKey, out); ByteBufferUtil.writeWithLength(key.cellName, out); }
public void serialize(CounterCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException { assert(cfs.metadata.isCounter()); out.write(cfs.metadata.ksAndCFBytes); ByteBufferUtil.writeWithLength(key.partitionKey, out); ByteBufferUtil.writeWithLength(key.cellName, out); }
public static void serialize(IPartitioner partitioner, Collection<Token> tokens, DataOutput out) throws IOException { for (Token token : tokens) { ByteBuffer tokenBuffer = partitioner.getTokenFactory().toByteArray(token); assert tokenBuffer.arrayOffset() == 0; ByteBufferUtil.writeWithLength(tokenBuffer.array(), out); } out.writeInt(0); }
public static void serialize(IPartitioner partitioner, Collection<Token> tokens, DataOutput out) throws IOException { for (Token token : tokens) { ByteBuffer tokenBuffer = partitioner.getTokenFactory().toByteArray(token); assert tokenBuffer.arrayOffset() == 0; ByteBufferUtil.writeWithLength(tokenBuffer.array(), out); } out.writeInt(0); }
public static void serialize(IPartitioner partitioner, Collection<Token> tokens, DataOutput out) throws IOException { for (Token token : tokens) { ByteBuffer tokenBuffer = partitioner.getTokenFactory().toByteArray(token); assert tokenBuffer.arrayOffset() == 0; ByteBufferUtil.writeWithLength(tokenBuffer.array(), out); } out.writeInt(0); }
public void serialize(RowCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException { assert(!cfs.isIndex()); out.write(cfs.metadata.ksAndCFBytes); ByteBufferUtil.writeWithLength(key.key, out); }
public void serialize(RowCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException { assert(!cfs.isIndex());//Shouldn't have row cache entries for indexes out.write(cfs.metadata.ksAndCFBytes); ByteBufferUtil.writeWithLength(key.key, out); }
public void serialize(RowCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException { assert(!cfs.isIndex());//Shouldn't have row cache entries for indexes out.write(cfs.metadata.ksAndCFBytes); ByteBufferUtil.writeWithLength(key.key, out); }
public void serialize(RowCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException { assert(!cfs.isIndex());//Shouldn't have row cache entries for indexes out.write(cfs.metadata.ksAndCFBytes); ByteBufferUtil.writeWithLength(key.key, out); }
public void serialize(CompactionMetadata component, DataOutputPlus out) throws IOException { out.writeInt(component.ancestors.size()); for (int g : component.ancestors) out.writeInt(g); ByteBufferUtil.writeWithLength(component.cardinalityEstimator.getBytes(), out); }
public void serialize(Token token, DataOutputPlus out, int version) throws IOException { IPartitioner p = token.getPartitioner(); ByteBuffer b = p.getTokenFactory().toByteArray(token); ByteBufferUtil.writeWithLength(b, out); }
public void serialize(Version version, CompactionMetadata component, DataOutputPlus out) throws IOException { if (version.hasCompactionAncestors()) { // write empty ancestor marker out.writeInt(0); } ByteBufferUtil.writeWithLength(component.cardinalityEstimator.getBytes(), out); }
public void serialize(Version version, CompactionMetadata component, DataOutputPlus out) throws IOException { if (version.hasCompactionAncestors()) { // write empty ancestor marker out.writeInt(0); } ByteBufferUtil.writeWithLength(component.cardinalityEstimator.getBytes(), out); }
public void serialize(Token token, DataOutputPlus out, int version) throws IOException { IPartitioner p = token.getPartitioner(); ByteBuffer b = p.getTokenFactory().toByteArray(token); ByteBufferUtil.writeWithLength(b, out); }
public void serialize(Token token, DataOutputPlus out) throws IOException { IPartitioner p = StorageService.getPartitioner(); ByteBuffer b = p.getTokenFactory().toByteArray(token); ByteBufferUtil.writeWithLength(b, out); }
public void serialize(Token token, DataOutputPlus out, int version) throws IOException { IPartitioner p = token.getPartitioner(); ByteBuffer b = p.getTokenFactory().toByteArray(token); ByteBufferUtil.writeWithLength(b, out); }
public void serialize(Version version, CompactionMetadata component, DataOutputPlus out) throws IOException { if (version.hasCompactionAncestors()) { // write empty ancestor marker out.writeInt(0); } ByteBufferUtil.writeWithLength(component.cardinalityEstimator.getBytes(), out); }
public void serialize(KeyCacheKey key, DataOutputPlus out, ColumnFamilyStore cfs) throws IOException { RowIndexEntry entry = CacheService.instance.keyCache.getInternal(key); if (entry == null) return; out.write(cfs.metadata.ksAndCFBytes); ByteBufferUtil.writeWithLength(key.key, out); out.writeInt(key.desc.generation); out.writeBoolean(true); cfs.metadata.comparator.rowIndexEntrySerializer().serialize(entry, out); }