private static boolean isSweepSentinel(Value value) { return value.getTimestamp() == Value.INVALID_VALUE_TIMESTAMP; }
private Set<Long> getStartTimestampsForValues(Iterable<Value> values) { Set<Long> results = Sets.newHashSet(); for (Value v : values) { results.add(v.getTimestamp()); } return results; }
private Value getValueForEntry(TableReference tableRef, Map.Entry<Cell, Value> entry) { return kvs.get().get(tableRef, ImmutableMap.of(entry.getKey(), entry.getValue().getTimestamp())) .get(entry.getKey()); }
static Column createColumn(Cell cell, Value value) { return createColumnAtSpecificCassandraTimestamp(cell, value, value.getTimestamp()); }
public long numSweptAtlasDeletes(TableReference tableRef) { return getAllAtlasDeletes(tableRef).entrySet().stream() .filter(entry -> getValueForEntry(tableRef, entry).getTimestamp() == Value.INVALID_VALUE_TIMESTAMP) .count(); }
@Override public Collection<Row3<byte[], byte[], Long>> getRowsForSelect() { return Collections2.transform(data.entries(), entry -> DSL.row(entry.getKey().getRowName(), entry.getKey().getColumnName(), entry.getValue().getTimestamp())); }
@Override public InsertValuesStep4<Record, byte[], byte[], Long, byte[]> addValuesForInsert(InsertValuesStep4<Record, byte[], byte[], Long, byte[]> query) { for (Entry<Cell, Value> entry : data.entries()) { query = query.values(entry.getKey().getRowName(), entry.getKey().getColumnName(), entry.getValue().getTimestamp(), entry.getValue().getContents()); } return query; }
@Override public void put(Collection<Map.Entry<Cell, Value>> data) { List<Object[]> args = Lists.newArrayListWithCapacity(data.size()); for (Entry<Cell, Value> entry : data) { Cell cell = entry.getKey(); Value val = entry.getValue(); args.add(new Object[] { cell.getRowName(), cell.getColumnName(), val.getTimestamp(), val.getContents() }); } put(args); }
private void fillOverflowValues(ConnectionSupplier conns, TableReference tableRef, Map<Cell, OverflowValue> overflowValues, @Output Map<Cell, Value> values) { Iterator<Entry<Cell, OverflowValue>> overflowIterator = overflowValues.entrySet().iterator(); while (overflowIterator.hasNext()) { Entry<Cell, OverflowValue> entry = overflowIterator.next(); Value value = values.get(entry.getKey()); if (value != null && value.getTimestamp() > entry.getValue().ts()) { overflowIterator.remove(); } } Map<Long, byte[]> resolvedOverflowValues = overflowValueLoader.loadOverflowValues( conns, tableRef, Collections2.transform(overflowValues.values(), OverflowValue::id)); for (Entry<Cell, OverflowValue> entry : overflowValues.entrySet()) { Cell cell = entry.getKey(); OverflowValue ov = entry.getValue(); byte[] val = resolvedOverflowValues.get(ov.id()); Preconditions.checkNotNull(val, "Failed to load overflow data: cell=%s, overflowId=%s", cell, ov.id()); values.put(cell, Value.create(val, ov.ts())); } }
private void putIfNotUpdate( DbReadTable readTable, DbWriteTable writeTable, TableReference tableRef, List<Entry<Cell, Value>> batch, KeyAlreadyExistsException ex) { Map<Cell, Long> timestampByCell = Maps.newHashMap(); for (Entry<Cell, Value> entry : batch) { timestampByCell.put(entry.getKey(), entry.getValue().getTimestamp() + 1); } Map<Cell, Value> results = extractResults(readTable, tableRef, readTable.getLatestCells(timestampByCell, true)); ListIterator<Entry<Cell, Value>> iter = batch.listIterator(); while (iter.hasNext()) { Entry<Cell, Value> entry = iter.next(); Cell key = entry.getKey(); Value value = entry.getValue(); if (results.containsKey(key)) { if (results.get(key).equals(value)) { iter.remove(); } else { throw new KeyAlreadyExistsException( "primary key violation for key " + key + " with value " + value, ex); } } } writeTable.put(batch); }
private void putInternal( TableReference tableRef, Collection<Map.Entry<Cell, Value>> values, boolean doNotOverwriteWithSameValue) { Table table = getTableMap(tableRef); for (Map.Entry<Cell, Value> e : values) { byte[] contents = e.getValue().getContents(); long timestamp = e.getValue().getTimestamp(); Key key = getKey(table, e.getKey(), timestamp); byte[] oldContents = putIfAbsent(table, key, contents); if (oldContents != null && (doNotOverwriteWithSameValue || !Arrays.equals(oldContents, contents))) { throw new KeyAlreadyExistsException("We already have a value for this timestamp"); } } }
private SortedMap<Long, Multimap<TableReference, Cell>> transformRows(List<RowResult<Value>> input) { SortedMap<Long, Multimap<TableReference, Cell>> scrubTimestampToTableNameToCell = Maps.newTreeMap(); for (RowResult<Value> rowResult : input) { byte[] row = rowResult.getRowName(); for (Entry<byte[], Value> entry : rowResult.getColumns().entrySet()) { byte[] fullCol = entry.getKey(); String table = EncodingUtils.decodeVarString(fullCol); byte[] col = Arrays.copyOfRange(fullCol, EncodingUtils.sizeOfVarString(table), fullCol.length); TableReference tableRef = TableReference.fromString(table); Cell cell = Cell.create(row, col); long timestamp = entry.getValue().getTimestamp(); Multimap<TableReference, Cell> cells = scrubTimestampToTableNameToCell.get(timestamp); if (cells == null) { cells = ArrayListMultimap.create(); scrubTimestampToTableNameToCell.put(timestamp, cells); } cells.put(tableRef, cell); } } return scrubTimestampToTableNameToCell; }
@Override public void put(Collection<Map.Entry<Cell, Value>> data) { List<Object[]> args = Lists.newArrayListWithCapacity(data.size()); List<Object[]> overflowArgs = Lists.newArrayList(); for (Entry<Cell, Value> entry : data) { Cell cell = entry.getKey(); Value val = entry.getValue(); if (val.getContents().length <= AtlasDbConstants.ORACLE_OVERFLOW_THRESHOLD) { args.add(new Object[] { cell.getRowName(), cell.getColumnName(), val.getTimestamp(), val.getContents(), null }); } else { long overflowId = config.overflowIds().orElse(overflowSequenceSupplier).get(); overflowArgs.add(new Object[] { overflowId, val.getContents() }); args.add(new Object[] { cell.getRowName(), cell.getColumnName(), val.getTimestamp(), null, overflowId }); } } put(args, overflowArgs); }
Pair<String, Long> getDirect(TableReference tableRef, String rowName, String columnName, long timestamp) { Cell cell = createCell(rowName, columnName); Value valueBytes = keyValueService.get(tableRef, ImmutableMap.of(cell, timestamp)).get(cell); return valueBytes != null ? Pair.create(PtBytes.toString(valueBytes.getContents()), valueBytes.getTimestamp()) : null; }
@Test public void testGetAtDifferentVersions() { putDirect("row1", "col1", "v1", 1); putDirect("row1", "col1", "v2", 5); putDirect("row2", "col1", "v3", 3); putDirect("row2", "col1", "v4", 8); Cell cell1 = Cell.create("row1".getBytes(), "col1".getBytes()); Cell cell2 = Cell.create("row2".getBytes(), "col1".getBytes()); Map<Cell, Value> results = keyValueService.get(TEST_TABLE, ImmutableMap.of(cell1, 5L, cell2, 8L)); Value v = results.get(cell1); assertEquals(1L, v.getTimestamp()); assertEquals("v1", new String(v.getContents())); v = results.get(cell2); assertEquals(3L, v.getTimestamp()); assertEquals("v3", new String(v.getContents())); }
@Override @Nullable public PutBatch getNextBatch(Result<? extends Record> existingRecords) { Map<CellTimestamp, byte[]> existing = Maps.newHashMapWithExpectedSize(existingRecords.size()); for (Record record : existingRecords) { existing.put( new CellTimestamp(record.getValue(JdbcConstants.A_ROW_NAME), record.getValue(JdbcConstants.A_COL_NAME), record.getValue(JdbcConstants.A_TIMESTAMP)), record.getValue(JdbcConstants.A_VALUE)); } Multimap<Cell, Value> nextBatch = ArrayListMultimap.create(); for (Entry<Cell, Value> entry : data.entries()) { Cell cell = entry.getKey(); Value newValue = entry.getValue(); byte[] oldValue = existing.get(new CellTimestamp(cell.getRowName(), cell.getColumnName(), newValue.getTimestamp())); if (oldValue == null) { nextBatch.put(cell, newValue); } else if (!Arrays.equals(oldValue, newValue.getContents())) { return null; } } return new MultiTimestampPutBatch(nextBatch); }
private void assertReadAtTimestampReturnsTombstoneAtTimestamp(TableReference tableRef, long readTs, long tombTs) { Value readValue = Iterables.getOnlyElement(readFromDefaultCell(tableRef, readTs).values()); assertThat(readValue.getTimestamp()).isEqualTo(tombTs); assertThat(readValue.getContents()).isEmpty(); }
private static void runOnce(Context context, ClosableIterator<RowResult<Value>> iter) { Multimap<Cell, Value> batchToCreate = ArrayListMultimap.create(); Multimap<Cell, Long> batchToDelete = ArrayListMultimap.create(); while (iter.hasNext()) { RowResult<Value> rowResult = iter.next(); byte[] row = rowResult.getRowName(); for (Entry<byte[], Value> entry : rowResult.getColumns().entrySet()) { byte[] col = entry.getKey(); Value value = entry.getValue(); long timestamp = value.getTimestamp(); String[] tableNames = StringUtils.split( PtBytes.toString(value.getContents()), AtlasDbConstants.OLD_SCRUB_TABLE_SEPARATOR_CHAR); for (String tableName : tableNames) { byte[] tableBytes = EncodingUtils.encodeVarString(tableName); byte[] newCol = EncodingUtils.add(tableBytes, col); Cell newCell = Cell.create(row, newCol); batchToCreate.put(newCell, Value.create(DUMMY_CONTENTS, timestamp)); } batchToDelete.put(Cell.create(row, col), timestamp); if (batchToDelete.size() >= context.batchSize) { flush(context, batchToCreate, batchToDelete); } } } if (!batchToDelete.isEmpty()) { flush(context, batchToCreate, batchToDelete); } }
Value value = Value.create(row.getBytes(VAL), row.getLong(TIMESTAMP)); Value oldValue = results.put(cell, value); if (oldValue != null && oldValue.getTimestamp() > value.getTimestamp()) { results.put(cell, oldValue);
private static Column createColumnAtSpecificCassandraTimestamp(Cell cell, Value value, long cassandraTimestamp) { byte[] contents = value.getContents(); long atlasTimestamp = value.getTimestamp(); ByteBuffer colName = makeCompositeBuffer(cell.getColumnName(), atlasTimestamp); Column col = new Column(); col.setName(colName); col.setValue(contents); col.setTimestamp(cassandraTimestamp); return col; }