protected static boolean indexIdLengthExceedLimit(Id id) { return id.asBytes().length > BytesBuffer.INDEX_ID_MAX_LENGTH; }
@Override public boolean test(HugeElement element) { byte[] elem = element.id().asBytes(); int cmp = Bytes.compare(elem, this.start.asBytes()); boolean matchedStart = this.inclusiveStart ? cmp >= 0 : cmp > 0; boolean matchedPrefix = Bytes.prefixWith(elem, this.prefix.asBytes()); return matchedStart && matchedPrefix; }
protected void checkIdLength() { assert this.id != null; int len = this.id.asBytes().length; E.checkArgument(len <= BytesBuffer.ID_MAX_LEN, "The max length of vertex id is %s, but got %s {%s}", BytesBuffer.ID_MAX_LEN, len, this.id); }
@Override public int compareTo(Id other) { return Bytes.compare(this.bytes, other.asBytes()); }
protected RowIterator queryById(Session session, Id id) { return session.get(this.table(), null, id.asBytes()); }
protected BackendColumnIterator queryById(Session session, Id id) { // TODO: change to get() after vertex and schema don't use id prefix return session.scan(this.table(), id.asBytes()); }
public default boolean belongToMe(BackendColumn column) { return Bytes.prefixWith(column.name, id().asBytes()); }
@Override protected BackendColumnIterator queryById(Session session, Id id) { byte[] value = session.get(this.table(), id.asBytes()); if (value == null) { return BackendColumnIterator.empty(); } BackendColumn col = BackendColumn.of(id.asBytes(), value); return new BackendColumnIteratorWrapper(col); } }
protected BackendColumnIterator queryByPrefix(Session session, IdPrefixQuery query) { int type = query.inclusiveStart() ? Session.SCAN_GTE_BEGIN : Session.SCAN_GT_BEGIN; type |= Session.SCAN_PREFIX_WITH_END; return session.scan(this.table(), query.start().asBytes(), query.prefix().asBytes(), type); }
@Override public void insert(Session session, BackendEntry entry) { assert !entry.columns().isEmpty(); session.put(this.table(), CF, entry.id().asBytes(), entry.columns()); }
protected BackendColumnIterator queryByRange(Session session, IdRangeQuery query) { byte[] start = query.start().asBytes(); byte[] end = query.end() == null ? null : query.end().asBytes(); int type = query.inclusiveStart() ? Session.SCAN_GTE_BEGIN : Session.SCAN_GT_BEGIN; if (end != null) { type |= query.inclusiveEnd() ? Session.SCAN_LTE_END : Session.SCAN_LT_END; } return session.scan(this.table(), start, end, type); }
@Override public void delete(Session session, BackendEntry entry) { if (entry.columns().isEmpty()) { session.delete(this.table(), entry.id().asBytes()); } else { for (BackendColumn col : entry.columns()) { assert entry.belongToMe(col) : entry; session.remove(this.table(), col.name); } } }
private BackendEntryIterator newEntryIterator(RowIterator rows, Query query) { return new BinaryEntryIterator<>(rows, query, (entry, row) -> { E.checkState(!row.isEmpty(), "Can't parse empty HBase result"); byte[] id = row.getRow(); if (entry == null || !Bytes.prefixWith(id, entry.id().asBytes())) { HugeType type = query.resultType(); // NOTE: only support BinaryBackendEntry currently entry = new BinaryBackendEntry(type, id); } try { this.parseRowColumns(row, entry, query); } catch (IOException e) { throw new BackendException("Failed to read HBase columns", e); } return entry; }); }
buffer.write(IdGenerator.of(idString).asBytes()); entry.column(buffer.bytes(), null);
public BytesBuffer writeId(Id id, boolean big) { boolean number = id.number(); if (number) { long value = id.asLong(); this.writeNumber(value); } else { byte[] bytes = id.asBytes(); int len = bytes.length; E.checkArgument(len > 0, "Can't write empty id"); if (!big) { E.checkArgument(len <= ID_MAX_LEN, "Id max length is %s, but got %s {%s}", ID_MAX_LEN, len, id); len -= 1; // mapping [1, 128] to [0, 127] this.writeUInt8(len | 0x80); } else { E.checkArgument(len <= BIG_ID_MAX_LEN, "Big id max length is %s, but got %s", BIG_ID_MAX_LEN, len); len -= 1; int high = len >> 8; int low = len & 0xff; this.writeUInt8(high | 0x80); this.writeUInt8(low); } this.write(bytes); } return this; }