public String toString() { TimeUnit u = times.getUnit(); if (start==null) return "Initialized"; if (stop==null) return String.format("Started at %d %s",start.getNativeTimestamp(),u); return String.format("%d %s", stop.getNativeTimestamp() - start.getNativeTimestamp(), u); } }
public long getNativeDeletionTime() { assert t.getProvider().equals(times); return t.getNativeTimestamp() & 0xFFFFFFFFFFFFFFFEL; // zero the LSB }
public long getNativeAdditionTime() { assert t.getProvider().equals(times); return (t.getNativeTimestamp() & 0xFFFFFFFFFFFFFFFEL) | 1L; // force the LSB to 1 }
public Duration elapsed() { if (null == start) { return ZeroDuration.INSTANCE; } final Timepoint stopTime = (null==stop? times.getTime() : stop); return new StandardDuration(stopTime.getNativeTimestamp() - start.getNativeTimestamp(), times.getUnit()); }
public void updateSchemaVertex(TitanSchemaVertex schemaVertex) { setProperty(schemaVertex, BaseKey.SchemaUpdateTime, times.getTime().getNativeTimestamp()); }
private void setReadMarker() { if (readMarker.hasIdentifier()) { try { log.debug("Attempting to persist read marker with identifier {}", readMarker.getIdentifier()); writeSetting(readMarker.getIdentifier(), getMarkerColumn(partitionId, bucketId), messageTimeStart.getNativeTimestamp()); log.debug("Persisted read marker: identifier={} partitionId={} buckedId={} nextTimepoint={}", readMarker.getIdentifier(), partitionId, bucketId, messageTimeStart); } catch (Throwable e) { log.error("Could not persist read marker [" + readMarker.getIdentifier() + "] on bucket ["+bucketId+"] + partition ["+partitionId+"]",e); } } }
private void initializeTimepoint() { Preconditions.checkState(null == this.messageTimeStart); if (!readMarker.hasIdentifier()) { this.messageTimeStart = readMarker.getStartTime(times); log.info("Loaded unidentified ReadMarker start time {} into {}", messageTimeStart, this); } else { long savedTimestamp = readSetting(readMarker.getIdentifier(),getMarkerColumn(partitionId,bucketId),readMarker.getStartTime(times).getNativeTimestamp()); this.messageTimeStart = new StandardTimepoint(savedTimestamp, times); log.info("Loaded indentified ReadMarker start time {} into {}", messageTimeStart, this); } }
/** * Deletes the specified table with all its columns. * ATTENTION: Invoking this method will delete the table if it exists and therefore causes data loss. */ @Override public void clearStorage() throws BackendException { try (AdminMask adm = getAdminInterface()) { adm.clearTable(tableName, times.getTime().getNativeTimestamp()); } catch (IOException e) { throw new TemporaryBackendException(e); } }
/** * Deletes the specified table with all its columns. * ATTENTION: Invoking this method will delete the table if it exists and therefore causes data loss. */ @Override public void clearStorage() throws BackendException { try (AdminMask adm = getAdminInterface()) { adm.clearTable(tableName, times.getTime().getNativeTimestamp()); } catch (IOException e) { throw new TemporaryBackendException(e); } }
private DataOutput serializeHeader(Serializer serializer, int capacity, LogTxStatus status, EnumMap<LogTxMeta,Object> meta) { Preconditions.checkArgument(status!=null && meta!=null,"Invalid status or meta"); DataOutput out = serializer.getDataOutput(capacity); out.putLong(txTimestamp.getNativeTimestamp()); VariableLong.writePositive(out, transactionId); out.writeObjectNotNull(status); Preconditions.checkArgument(meta.size()<Byte.MAX_VALUE,"Too much meta data: %s",meta.size()); out.putByte(VariableLong.unsignedByte(meta.size())); for (Map.Entry<LogTxMeta,Object> metaentry : meta.entrySet()) { assert metaentry.getValue()!=null; out.putByte(VariableLong.unsignedByte(metaentry.getKey().ordinal())); out.writeObjectNotNull(metaentry.getValue()); } return out; }
private final StaticBuffer getBlockApplication(long blockValue, Timepoint timestamp) { WriteByteBuffer bb = new WriteByteBuffer( 8 // counter long + 8 // time in ms + uidBytes.length); bb.putLong(-blockValue).putLong(timestamp.getNativeTimestamp()); WriteBufferUtil.put(bb, uidBytes); return bb.getStaticBuffer(); }
final long delta = messageTimeStart.getNativeTimestamp() - messageTimeEnd.getNativeTimestamp(); KeySliceQuery query = new KeySliceQuery(logKey, BufferUtil.getLongBuffer(messageTimeStart.getNativeTimestamp()), BufferUtil.getLongBuffer(messageTimeEnd.getNativeTimestamp())); query.setLimit(maxReadMsg); log.trace("Converted MessagePuller time window to {}", query); log.debug("Extended time window to {}", messageTimeEnd); query = new KeySliceQuery(logKey, BufferUtil.nextBiggerBuffer(lastEntry.getColumn()), BufferUtil.getLongBuffer(messageTimeEnd.getNativeTimestamp())); log.debug("Converted extended MessagePuller time window to {}", query); List<Entry> extraEntries = BackendOperation.execute(getOperation(query),KCVSLog.this,times,maxReadTime);
addProperty(schemaVertex, BaseKey.SchemaCategory, schemaCategory); updateSchemaVertex(schemaVertex); addProperty(schemaVertex, BaseKey.SchemaUpdateTime, times.getTime().getNativeTimestamp()); for (Map.Entry<TypeDefinitionCategory,Object> def : definition.entrySet()) { TitanProperty p = addProperty(schemaVertex, BaseKey.SchemaDefinitionProperty,def.getValue());