private boolean canTruncate(SegmentChunk segmentChunk, long truncationOffset) { // We should only truncate those SegmentChunks that are entirely before the truncationOffset. An empty SegmentChunk // that starts exactly at the truncationOffset should be spared (this means we truncate the entire Segment), as // we need that SegmentChunk to determine the actual length of the Segment. return segmentChunk.getStartOffset() < truncationOffset && segmentChunk.getLastOffset() <= truncationOffset; }
/** * Adds multiple SegmentChunks. * * @param segmentChunks The SegmentChunks to add. These SegmentChunks must be in continuity of any existing SegmentChunks. */ synchronized void addChunks(List<SegmentChunk> segmentChunks) { Preconditions.checkState(!this.sealed, "Cannot add SegmentChunks for a Sealed Handle."); long expectedOffset = 0; if (this.segmentChunks.size() > 0) { expectedOffset = this.segmentChunks.get(this.segmentChunks.size() - 1).getLastOffset(); } else if (segmentChunks.size() > 0) { expectedOffset = segmentChunks.get(0).getStartOffset(); } for (SegmentChunk s : segmentChunks) { Preconditions.checkArgument(s.getStartOffset() == expectedOffset, "Invalid SegmentChunk StartOffset. Expected %s, given %s.", expectedOffset, s.getStartOffset()); expectedOffset += s.getLength(); } this.segmentChunks.addAll(segmentChunks); this.activeChunkHandle = null; }
/** * Serializes a single SegmentChunk. * * @param segmentChunk The SegmentChunk to serialize. * @return A byte array containing the serialization. */ static byte[] serializeChunk(SegmentChunk segmentChunk) { return combine(Long.toString(segmentChunk.getStartOffset()), segmentChunk.getName()); }
/** * Adds a new SegmentChunk. * * @param segmentChunk The SegmentChunk to add. This SegmentChunk must be in continuity of any existing SegmentChunks. * @param activeChunkHandle The newly added SegmentChunk's write handle. */ synchronized void addChunk(SegmentChunk segmentChunk, SegmentHandle activeChunkHandle) { Preconditions.checkState(!this.sealed, "Cannot add SegmentChunks for a Sealed Handle."); if (this.segmentChunks.size() > 0) { long expectedOffset = this.segmentChunks.get(this.segmentChunks.size() - 1).getLastOffset(); Preconditions.checkArgument(segmentChunk.getStartOffset() == expectedOffset, "Invalid SegmentChunk StartOffset. Expected %s, given %s.", expectedOffset, segmentChunk.getStartOffset()); } // Update the SegmentChunk and its Handle atomically. Preconditions.checkNotNull(activeChunkHandle, "activeChunkHandle"); Preconditions.checkArgument(!activeChunkHandle.isReadOnly(), "Active SegmentChunk handle cannot be readonly."); Preconditions.checkArgument(activeChunkHandle.getSegmentName().equals(segmentChunk.getName()), "Active SegmentChunk handle must be for the last SegmentChunk."); this.activeChunkHandle = activeChunkHandle; this.segmentChunks.add(segmentChunk); }
private boolean chunkEquals(SegmentChunk s1, SegmentChunk s2) { return s1.getName().equals(s2.getName()) && s1.getStartOffset() == s2.getStartOffset(); }
long offset = om.adjustOffset(Long.parseLong(entry.getKey())); SegmentChunk s = new SegmentChunk(entry.getValue(), offset); Preconditions.checkArgument(lastOffset <= s.getStartOffset(), "SegmentChunk Entry '%s' has out-of-order offset (previous=%s).", s, lastOffset); segmentChunks.add(s); lastOffset = s.getStartOffset();
@SneakyThrows(StreamingException.class) private void checkTruncatedSegment(StreamingException ex, RollingSegmentHandle handle, SegmentChunk segmentChunk) { if (ex != null && (Exceptions.unwrap(ex) instanceof StreamSegmentNotExistsException) || !segmentChunk.exists()) { // We ran into a SegmentChunk that does not exist (either marked as such or due to a failed read). segmentChunk.markInexistent(); String message = String.format("Offsets %d-%d have been deleted.", segmentChunk.getStartOffset(), segmentChunk.getLastOffset()); ex = new StreamSegmentTruncatedException(handle.getSegmentName(), message, ex); } if (ex != null) { throw ex; } }
private boolean shouldConcatNatively(RollingSegmentHandle source, RollingSegmentHandle target) { if (source.getHeaderHandle() == null) { // Source does not have a Header, hence we cannot do Header concat. return true; } SegmentChunk lastSource = source.lastChunk(); SegmentChunk lastTarget = target.lastChunk(); return lastSource != null && lastSource.getStartOffset() == 0 && lastTarget != null && !lastTarget.isSealed() && lastTarget.getLength() + lastSource.getLength() <= target.getRollingPolicy().getMaxLength(); }
int currentIndex = CollectionHelpers.binarySearch(chunks, s -> offset < s.getStartOffset() ? -1 : (offset >= s.getLastOffset() ? 1 : 0)); assert currentIndex >= 0 : "unable to locate first SegmentChunk index."; long readOffset = offset + bytesRead - current.getStartOffset(); int readLength = (int) Math.min(length - bytesRead, current.getLength() - readOffset); assert readOffset >= 0 && readLength >= 0 : "negative readOffset or readLength";
for (SegmentChunk s : handle.chunks()) { if (last != null) { last.setLength(s.getStartOffset() - last.getStartOffset()); last.markSealed();
os.write(serialize(concatHandle)); source.addChunks(concatHandle.chunks().stream() .map(s -> s.withNewOffset(s.getStartOffset() + source.length())).collect(Collectors.toList()));
@Override public void write(SegmentHandle handle, long offset, InputStream data, int length) throws StreamSegmentException { val h = asWritableHandle(handle); ensureNotDeleted(h); ensureNotSealed(h); ensureOffset(h, offset); long traceId = LoggerHelpers.traceEnter(log, "write", handle, offset, length); // We run this in a loop because we may have to split the write over multiple SegmentChunks in order to avoid exceeding // any SegmentChunk's maximum length. int bytesWritten = 0; while (bytesWritten < length) { if (h.getActiveChunkHandle() == null || h.lastChunk().getLength() >= h.getRollingPolicy().getMaxLength()) { rollover(h); } SegmentChunk last = h.lastChunk(); int writeLength = (int) Math.min(length - bytesWritten, h.getRollingPolicy().getMaxLength() - last.getLength()); assert writeLength > 0 : "non-positive write length"; long chunkOffset = offset + bytesWritten - last.getStartOffset(); this.baseStorage.write(h.getActiveChunkHandle(), chunkOffset, data, writeLength); last.increaseLength(writeLength); bytesWritten += writeLength; } LoggerHelpers.traceLeave(log, "write", traceId, handle, offset, bytesWritten); }
} else { expectedExists = segmentChunk.getLastOffset() > truncateOffset || (segmentChunk.getStartOffset() == segmentChunk.getLastOffset() && segmentChunk.getLastOffset() == truncateOffset);
chunk.setLength(123L); Assert.assertEquals("Unexpected value for length() after adding one SegmentChunk.", chunk.getStartOffset() + chunk.getLength(), h.length()); AssertExtensions.assertListEquals("Unexpected contents for chunks().", Collections.singletonList(chunk), h.chunks(), Object::equals); Assert.assertEquals("Unexpected number of registered SegmentChunks.", 2, h.chunks().size()); Assert.assertEquals("Unexpected value for length() after adding two SegmentChunk.", chunk2.getStartOffset() + chunk2.getLength(), h.length()); Assert.assertEquals("Unexpected lastChunk.", chunk2, h.lastChunk()); h.lastChunk().markInexistent();