void writeHeader() throws IOException { assert valueCount != -1; CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(bitsPerValue); out.writeVInt(valueCount); out.writeVInt(getFormat().getId()); }
throw new IllegalArgumentException("Invalid id: " + StringHelper.idToString(id)); writeHeader(out, codec, version); out.writeBytes(id, 0, id.length); BytesRef suffixBytes = new BytesRef(suffix);
synchronized private void persist() throws IOException { String fileName = SNAPSHOTS_PREFIX + nextWriteGen; IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT); boolean success = false; try { CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(refCounts.size()); for(Entry<Long,Integer> ent : refCounts.entrySet()) { out.writeVLong(ent.getKey()); out.writeVInt(ent.getValue()); } success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out); IOUtils.deleteFilesIgnoringExceptions(dir, fileName); } else { IOUtils.close(out); } } dir.sync(Collections.singletonList(fileName)); if (nextWriteGen > 0) { String lastSaveFile = SNAPSHOTS_PREFIX + (nextWriteGen-1); // exception OK: likely it didn't exist IOUtils.deleteFilesIgnoringExceptions(dir, lastSaveFile); } nextWriteGen++; }
private void writeIndex(IndexOutput out, int countPerLeaf, int numLeaves, byte[] packedIndex) throws IOException { CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(numDataDims); out.writeVInt(numIndexDims); out.writeVInt(countPerLeaf); out.writeVInt(bytesPerDim); assert numLeaves > 0; out.writeVInt(numLeaves); out.writeBytes(minPackedValue, 0, packedIndexBytesLength); out.writeBytes(maxPackedValue, 0, packedIndexBytesLength); out.writeVLong(pointCount); out.writeVInt(docsSeen.cardinality()); out.writeVInt(packedIndex.length); out.writeBytes(packedIndex, 0, packedIndex.length); }
throw new IllegalStateException("call finish first"); CodecUtil.writeHeader(out, FILE_FORMAT_NAME, VERSION_CURRENT);
try (OutputStreamIndexOutput out = new OutputStreamIndexOutput(resourceDesc, fileName, Files.newOutputStream(tmpStatePath), BUFFER_SIZE)) { CodecUtil.writeHeader(out, STATE_FILE_CODEC, STATE_FILE_VERSION); out.writeInt(FORMAT.index()); try (XContentBuilder builder = newXContentBuilder(FORMAT, new IndexOutputOutputStream(out) {
private void writeTo(final T obj, final String blobName, final CheckedConsumer<BytesArray, IOException> consumer) throws IOException { final BytesReference bytes = write(obj); try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { final String resourceDesc = "ChecksumBlobStoreFormat.writeBlob(blob=\"" + blobName + "\")"; try (OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput(resourceDesc, blobName, outputStream, BUFFER_SIZE)) { CodecUtil.writeHeader(indexOutput, codec, VERSION); try (OutputStream indexOutputOutputStream = new IndexOutputOutputStream(indexOutput) { @Override public void close() throws IOException { // this is important since some of the XContentBuilders write bytes on close. // in order to write the footer we need to prevent closing the actual index input. } }) { bytes.writeTo(indexOutputOutputStream); } CodecUtil.writeFooter(indexOutput); } consumer.accept(new BytesArray(outputStream.toByteArray())); } }
public static void write(ChannelFactory factory, Path checkpointFile, Checkpoint checkpoint, OpenOption... options) throws IOException { final ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream(V3_FILE_SIZE) { @Override public synchronized byte[] toByteArray() { // don't clone return buf; } }; final String resourceDesc = "checkpoint(path=\"" + checkpointFile + "\", gen=" + checkpoint + ")"; try (OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput(resourceDesc, checkpointFile.toString(), byteOutputStream, V3_FILE_SIZE)) { CodecUtil.writeHeader(indexOutput, CHECKPOINT_CODEC, CURRENT_VERSION); checkpoint.write(indexOutput); CodecUtil.writeFooter(indexOutput); assert indexOutput.getFilePointer() == V3_FILE_SIZE : "get you numbers straight; bytes written: " + indexOutput.getFilePointer() + ", buffer size: " + V3_FILE_SIZE; assert indexOutput.getFilePointer() < 512 : "checkpoint files have to be smaller than 512 bytes for atomic writes; size: " + indexOutput.getFilePointer(); } // now go and write to the channel, in one go. try (FileChannel channel = factory.open(checkpointFile, options)) { Channels.writeToChannel(byteOutputStream.toByteArray(), channel); // no need to force metadata, file size stays the same and we did the full fsync // when we first created the file, so the directory entry doesn't change as well channel.force(false); } }
/** * Writes this header with the latest format into the file channel */ void write(final FileChannel channel) throws IOException { // This output is intentionally not closed because closing it will close the FileChannel. @SuppressWarnings({"IOResourceOpenedButNotSafelyClosed", "resource"}) final BufferedChecksumStreamOutput out = new BufferedChecksumStreamOutput( new OutputStreamStreamOutput(java.nio.channels.Channels.newOutputStream(channel))); CodecUtil.writeHeader(new OutputStreamDataOutput(out), TRANSLOG_CODEC, CURRENT_VERSION); // Write uuid final BytesRef uuid = new BytesRef(translogUUID); out.writeInt(uuid.length); out.writeBytes(uuid.bytes, uuid.offset, uuid.length); // Write primary term out.writeLong(primaryTerm); // Checksum header out.writeInt((int) out.getChecksum()); out.flush(); channel.force(true); assert channel.position() == headerSizeInBytes : "Header is not fully written; header size [" + headerSizeInBytes + "], channel position [" + channel.position() + "]"; } }
CodecUtil.writeHeader(output, KEYSTORE_FILENAME, FORMAT_VERSION); output.writeByte(password.length == 0 ? (byte)0 : (byte)1);
/** * Marks this store as corrupted. This method writes a {@code corrupted_${uuid}} file containing the given exception * message. If a store contains a {@code corrupted_${uuid}} file {@link #isMarkedCorrupted()} will return <code>true</code>. */ public void markStoreCorrupted(IOException exception) throws IOException { ensureOpen(); if (!isMarkedCorrupted()) { String uuid = CORRUPTED + UUIDs.randomBase64UUID(); try (IndexOutput output = this.directory().createOutput(uuid, IOContext.DEFAULT)) { CodecUtil.writeHeader(output, CODEC, VERSION); BytesStreamOutput out = new BytesStreamOutput(); out.writeException(exception); BytesReference bytes = out.bytes(); output.writeVInt(bytes.length()); BytesRef ref = bytes.toBytesRef(); output.writeBytes(ref.bytes, ref.offset, ref.length); CodecUtil.writeFooter(output); } catch (IOException ex) { logger.warn("Can't mark store as corrupted", ex); } directory().sync(Collections.singleton(uuid)); } }
static void writeHeader(OutputStreamDataOutput out, BytesRef ref) throws IOException { CodecUtil.writeHeader(out, TRANSLOG_CODEC, VERSION); out.writeInt(ref.length); out.writeBytes(ref.bytes, ref.offset, ref.length); }
@Override public void start(final IndexOutput termsOut) throws IOException { this.termsOut = termsOut; CodecUtil.writeHeader(termsOut, CODEC, VERSION_CURRENT); termsOut.writeInt(blockSkipInterval); // write skipInterval termsOut.writeInt(maxSkipLevels); // write maxSkipLevels termsOut.writeInt(blockSkipMinimum); // write skipMinimum termsOut.writeInt(maxBlockSize); // write maxBlockSize }
@Override public void init(final IndexOutput termsOut) throws IOException { CodecUtil.writeHeader(termsOut, CODEC, VERSION_CURRENT); termsOut.writeInt(blockSkipInterval); // write skipInterval termsOut.writeInt(maxSkipLevels); // write maxSkipLevels termsOut.writeInt(blockSkipMinimum); // write skipMinimum termsOut.writeInt(maxBlockSize); // write maxBlockSize }
protected void writeEntryTable(Collection<FileEntry> entries, IndexOutput entryOut) throws IOException { CodecUtil.writeHeader(entryOut, ENTRY_CODEC, VERSION_CURRENT); entryOut.writeVInt(entries.size()); for (FileEntry fe : entries) { entryOut.writeString(IndexFileNames.stripSegmentName(fe.file)); entryOut.writeLong(fe.offset); entryOut.writeLong(fe.length); } CodecUtil.writeFooter(entryOut); }
void writeHeader() throws IOException { assert valueCount != -1; CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(bitsPerValue); out.writeVInt(valueCount); out.writeVInt(getFormat().getId()); }
@Override public boolean store(DataOutput output) throws IOException { CodecUtil.writeHeader(output, CODEC_NAME, VERSION_CURRENT); output.writeVLong(count); output.writeByte(separator); output.writeVInt(grams); output.writeVLong(totTokens); fst.save(output); return true; }
void writeHeader() throws IOException { assert valueCount != -1; CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(bitsPerValue); out.writeVInt(valueCount); out.writeVInt(getFormat().getId()); }
void writeHeader() throws IOException { assert valueCount != -1; CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(bitsPerValue); out.writeVInt(valueCount); out.writeVInt(getFormat().getId()); }
private void writeIndex(IndexOutput out, int countPerLeaf, int numLeaves, byte[] packedIndex) throws IOException { CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(numDataDims); out.writeVInt(numIndexDims); out.writeVInt(countPerLeaf); out.writeVInt(bytesPerDim); assert numLeaves > 0; out.writeVInt(numLeaves); out.writeBytes(minPackedValue, 0, packedIndexBytesLength); out.writeBytes(maxPackedValue, 0, packedIndexBytesLength); out.writeVLong(pointCount); out.writeVInt(docsSeen.cardinality()); out.writeVInt(packedIndex.length); out.writeBytes(packedIndex, 0, packedIndex.length); }