public byte[] toBytes(Tuple tuple) { nullFlags.clear(); int size = estimateTupleDataSize(tuple); ByteBuffer bb = ByteBuffer.allocate(size + headerSize); bb.position(headerSize);
public byte[] toBytes(Tuple tuple) { nullFlags.clear(); int size = estimateTupleDataSize(tuple); ByteBuffer bb = ByteBuffer.allocate(size + headerSize); bb.position(headerSize);
public byte[] toBytes(Tuple tuple) { nullFlags.clear(); int size = estimateTupleDataSize(tuple); ByteBuffer bb = ByteBuffer.allocate(size + headerSize); bb.position(headerSize);
public byte[] toBytes(Tuple tuple) { nullFlags.clear(); int size = estimateTupleDataSize(tuple); ByteBuffer bb = ByteBuffer.allocate(size + headerSize); bb.position(headerSize);
public byte [] toBytes(Tuple tuple) { nullFlags.clear(); int size = estimateTupleDataSize(tuple); ByteBuffer bb = ByteBuffer.allocate(size + headerSize); bb.position(headerSize);
byte [] bytes = encoder.toBytes(tuple); Tuple tuple2 = decoder.toTuple(bytes);
public List<ByteString> getNextRows(int fetchRowNum) throws IOException { List<ByteString> rows = new ArrayList<>(); if (scanExec == null) { return rows; } int rowCount = 0; while (!eof) { Tuple tuple = scanExec.next(); if (tuple == null) { eof = true; break; } rows.add(ByteString.copyFrom((rowEncoder.toBytes(tuple)))); rowCount++; currentNumRows++; if (rowCount >= fetchRowNum) { break; } if (currentNumRows >= maxRow) { eof = true; break; } } if(eof) { close(); } return rows; }
rows.add(ByteString.copyFrom(encoder.toBytes(currentTuple)));
public List<ByteString> getNextRows(int fetchRowNum) throws IOException { List<ByteString> rows = new ArrayList<>(); if (scanExec == null) { return rows; } int rowCount = 0; while (!eof) { Tuple tuple = scanExec.next(); if (tuple == null) { eof = true; break; } rows.add(ByteString.copyFrom((rowEncoder.toBytes(tuple)))); rowCount++; currentNumRows++; if (rowCount >= fetchRowNum) { break; } if (currentNumRows >= maxRow) { eof = true; break; } } if(eof) { close(); } return rows; }
rows.add(ByteString.copyFrom(encoder.toBytes(currentTuple)));
private void writeIndex(ByteBuf byteBuf, Tuple tuple, Long... offsets) throws IOException { byte[] buf = rowStoreEncoder.toBytes(tuple); int size = buf.length + 8 + (offsets.length * 8); if (!byteBuf.isWritable(size)) { byteBuf.ensureWritable(size); } // key writing byteBuf.writeInt(buf.length); byteBuf.writeBytes(buf); //offset num writing byteBuf.writeInt(offsets.length); /* offset writing */ for (long offset : offsets) { byteBuf.writeLong(offset); } entrySize++; // flush to file and reset buffer if (byteBuf.writerIndex() >= BUFFER_SIZE) { filePos += flushBuffer(byteBuf, outChannel, out); } }
public void writeHeader(int entryNum) throws IOException { // schema byte [] schemaBytes = keySchema.getProto().toByteArray(); out.writeInt(schemaBytes.length); out.write(schemaBytes); // comparator byte [] comparatorBytes = compartor.getProto().toByteArray(); out.writeInt(comparatorBytes.length); out.write(comparatorBytes); // level out.writeInt(this.level); // entry out.writeInt(entryNum); if (entryNum > 0) { byte [] minBytes = rowStoreEncoder.toBytes(firstKey); out.writeInt(minBytes.length); out.write(minBytes); byte [] maxBytes = rowStoreEncoder.toBytes(lastKey); out.writeInt(maxBytes.length); out.write(maxBytes); } out.flush(); }
private void writeIndex(ByteBuf byteBuf, Tuple tuple, Long... offsets) throws IOException { byte[] buf = rowStoreEncoder.toBytes(tuple); int size = buf.length + 8 + (offsets.length * 8); if (!byteBuf.isWritable(size)) { byteBuf.ensureWritable(size); } // key writing byteBuf.writeInt(buf.length); byteBuf.writeBytes(buf); //offset num writing byteBuf.writeInt(offsets.length); /* offset writing */ for (long offset : offsets) { byteBuf.writeLong(offset); } entrySize++; // flush to file and reset buffer if (byteBuf.writerIndex() >= BUFFER_SIZE) { filePos += flushBuffer(byteBuf, outChannel, out); } }
private void writeRootIndex(ByteBuf byteBuf, Tuple tuple, long offset) throws IOException { byte[] buf = rowStoreEncoder.toBytes(tuple); int size = buf.length + 12; if (!byteBuf.isWritable(size)) { byteBuf.ensureWritable(size); } // key writing byteBuf.writeInt(buf.length); byteBuf.writeBytes(buf); // leaf offset writing byteBuf.writeLong(offset); rootEntrySize++; // flush to file and reset buffer if (byteBuf.writerIndex() >= BUFFER_SIZE) { flushBuffer(byteBuf, rootOutChannel, rootOut); } }
private void writeRootIndex(ByteBuf byteBuf, Tuple tuple, long offset) throws IOException { byte[] buf = rowStoreEncoder.toBytes(tuple); int size = buf.length + 12; if (!byteBuf.isWritable(size)) { byteBuf.ensureWritable(size); } // key writing byteBuf.writeInt(buf.length); byteBuf.writeBytes(buf); // leaf offset writing byteBuf.writeLong(offset); rootEntrySize++; // flush to file and reset buffer if (byteBuf.writerIndex() >= BUFFER_SIZE) { flushBuffer(byteBuf, rootOutChannel, rootOut); } }
public static RowStoreEncoder createEncoder(Schema schema) { return new RowStoreEncoder(schema); }
public static RowStoreEncoder createEncoder(Schema schema) { return new RowStoreEncoder(schema); }