protected void process(KahaRewrittenDataFileCommand command, Location location) throws IOException { final TreeSet<Integer> completeFileSet = new TreeSet<>(journal.getFileMap().keySet()); // Mark the current journal file as a compacted file so that gc checks can skip // over logs that are smaller compaction type logs. DataFile current = journal.getDataFileById(location.getDataFileId()); current.setTypeCode(command.getRewriteType()); if (completeFileSet.contains(command.getSourceDataFileId()) && command.getSkipIfSourceExists()) { // Move offset so that next location read jumps to next file. location.setOffset(journalMaxFileLength); } }
forwardsFile.setTypeCode(COMPACTED_JOURNAL_FILE); LOG.trace("Reserved file for forwarded acks: {}", forwardsFile);
protected void process(KahaRewrittenDataFileCommand command, Location location) throws IOException { final TreeSet<Integer> completeFileSet = new TreeSet<>(journal.getFileMap().keySet()); // Mark the current journal file as a compacted file so that gc checks can skip // over logs that are smaller compaction type logs. DataFile current = journal.getDataFileById(location.getDataFileId()); current.setTypeCode(command.getRewriteType()); if (completeFileSet.contains(command.getSourceDataFileId()) && command.getSkipIfSourceExists()) { // Move offset so that next location read jumps to next file. location.setOffset(journalMaxFileLength); } }
protected void process(KahaRewrittenDataFileCommand command, Location location) throws IOException { final TreeSet<Integer> completeFileSet = new TreeSet<>(journal.getFileMap().keySet()); // Mark the current journal file as a compacted file so that gc checks can skip // over logs that are smaller compaction type logs. DataFile current = journal.getDataFileById(location.getDataFileId()); current.setTypeCode(command.getRewriteType()); if (completeFileSet.contains(command.getSourceDataFileId()) && command.getSkipIfSourceExists()) { // Move offset so that next location read jumps to next file. location.setOffset(journalMaxFileLength); } }
protected void process(KahaRewrittenDataFileCommand command, Location location) throws IOException { final TreeSet<Integer> completeFileSet = new TreeSet<>(journal.getFileMap().keySet()); // Mark the current journal file as a compacted file so that gc checks can skip // over logs that are smaller compaction type logs. DataFile current = journal.getDataFileById(location.getDataFileId()); current.setTypeCode(command.getRewriteType()); if (completeFileSet.contains(command.getSourceDataFileId()) && command.getSkipIfSourceExists()) { // Move offset so that next location read jumps to next file. location.setOffset(journalMaxFileLength); } }
forwardsFile.setTypeCode(COMPACTED_JOURNAL_FILE); LOG.trace("Reserved file for forwarded acks: {}", forwardsFile);
forwardsFile.setTypeCode(COMPACTED_JOURNAL_FILE); LOG.trace("Reserved file for forwarded acks: {}", forwardsFile);
forwardsFile.setTypeCode(COMPACTED_JOURNAL_FILE); LOG.trace("Reserved file for forwarded acks: {}", forwardsFile);