int version = checkHeader(in, codec, minVersion, maxVersion); checkIndexHeaderID(in, expectedID); checkIndexHeaderSuffix(in, expectedSuffix);
IndexInput in = dir.openInput(file, IOContext.DEFAULT); try { CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_START); int count = in.readVInt(); for(int i=0;i<count;i++) {
/** * Restore a {@link Reader} from a stream. * * @param in the stream to read data from * @return a Reader * @throws IOException If there is a low-level I/O error * @lucene.internal */ public static Reader getReader(DataInput in) throws IOException { final int version = CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT); final int bitsPerValue = in.readVInt(); assert bitsPerValue > 0 && bitsPerValue <= 64: "bitsPerValue=" + bitsPerValue; final int valueCount = in.readVInt(); final Format format = Format.byId(in.readVInt()); return getReaderNoHeader(in, format, version, valueCount, bitsPerValue); }
/** * Retrieve PackedInts as a {@link ReaderIterator} * @param in positioned at the beginning of a stored packed int structure. * @param mem how much memory the iterator is allowed to use to read-ahead (likely to speed up iteration) * @return an iterator to access the values * @throws IOException if the structure could not be retrieved. * @lucene.internal */ public static ReaderIterator getReaderIterator(DataInput in, int mem) throws IOException { final int version = CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT); final int bitsPerValue = in.readVInt(); assert bitsPerValue > 0 && bitsPerValue <= 64: "bitsPerValue=" + bitsPerValue; final int valueCount = in.readVInt(); final Format format = Format.byId(in.readVInt()); return getReaderIteratorNoHeader(in, format, version, valueCount, bitsPerValue, mem); }
/** * Construct a direct {@link Reader} from an {@link IndexInput}. This method * is useful to restore data from streams which have been created using * {@link PackedInts#getWriter(DataOutput, int, int, float)}. * <p> * The returned reader will have very little memory overhead, but every call * to {@link Reader#get(int)} is likely to perform a disk seek. * * @param in the stream to read data from * @return a direct Reader * @throws IOException If there is a low-level I/O error * @lucene.internal */ public static Reader getDirectReader(IndexInput in) throws IOException { final int version = CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT); final int bitsPerValue = in.readVInt(); assert bitsPerValue > 0 && bitsPerValue <= 64: "bitsPerValue=" + bitsPerValue; final int valueCount = in.readVInt(); final Format format = Format.byId(in.readVInt()); return getDirectReaderNoHeader(in, format, version, valueCount, bitsPerValue); }
version = CodecUtil.checkHeader(in, FILE_FORMAT_NAME, VERSION_PACKED, VERSION_CURRENT); if (version < VERSION_PACKED_REMOVED) { if (in.readByte() == 1) {
version = CodecUtil.checkHeader(in, BKDWriter.CODEC_NAME, BKDWriter.VERSION_START, BKDWriter.VERSION_CURRENT); numDataDims = in.readVInt(); if (version >= BKDWriter.VERSION_SELECTIVE_INDEXING) {
if (file.startsWith(CORRUPTED)) { try (ChecksumIndexInput input = directory.openChecksumInput(file, IOContext.READONCE)) { int version = CodecUtil.checkHeader(input, CODEC, VERSION_START, VERSION);
try (IndexInput indexInput = directory.openInput(KEYSTORE_FILENAME, IOContext.READONCE)) { ChecksumIndexInput input = new BufferedChecksumIndexInput(indexInput); int formatVersion = CodecUtil.checkHeader(input, KEYSTORE_FILENAME, MIN_FORMAT_VERSION, FORMAT_VERSION); byte hasPasswordByte = input.readByte(); boolean hasPassword = hasPasswordByte == 1;
public static Checkpoint read(Path path) throws IOException { try (Directory dir = new SimpleFSDirectory(path.getParent())) { try (IndexInput indexInput = dir.openInput(path.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); final int fileVersion = CodecUtil.checkHeader(indexInput, CHECKPOINT_CODEC, INITIAL_VERSION, CURRENT_VERSION); if (fileVersion == INITIAL_VERSION) { assert indexInput.length() == V1_FILE_SIZE : indexInput.length(); return Checkpoint.readCheckpointV5_0_0(indexInput); } else if (fileVersion == VERSION_6_0_0) { assert indexInput.length() == V2_FILE_SIZE : indexInput.length(); return Checkpoint.readCheckpointV6_0_0(indexInput); } else { assert fileVersion == CURRENT_VERSION : fileVersion; assert indexInput.length() == V3_FILE_SIZE : indexInput.length(); return Checkpoint.readCheckpointV6_4_0(indexInput); } } } }
final int version; try { version = CodecUtil.checkHeader(new InputStreamDataInput(in), TRANSLOG_CODEC, VERSION_CHECKSUMS, VERSION_PRIMARY_TERM); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException e) { tryReportOldVersionError(path, channel);
/** * Reads blob with specified name without resolving the blobName using using {@link #blobName} method. * * @param blobContainer blob container * @param blobName blob name */ public T readBlob(BlobContainer blobContainer, String blobName) throws IOException { try (InputStream inputStream = blobContainer.readBlob(blobName)) { ByteArrayOutputStream out = new ByteArrayOutputStream(); Streams.copy(inputStream, out); final byte[] bytes = out.toByteArray(); final String resourceDesc = "ChecksumBlobStoreFormat.readBlob(blob=\"" + blobName + "\")"; try (ByteArrayIndexInput indexInput = new ByteArrayIndexInput(resourceDesc, bytes)) { CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, codec, VERSION, VERSION); long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; BytesReference bytesReference = new BytesArray(bytes, (int) filePointer, (int) contentSize); return read(bytesReference); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } }
/** * Reads the state from a given file and compares the expected version against the actual version of * the state. */ public final T read(NamedXContentRegistry namedXContentRegistry, Path file) throws IOException { try (Directory dir = newDirectory(file.getParent())) { try (IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, STATE_FILE_VERSION); final XContentType xContentType = XContentType.values()[indexInput.readInt()]; if (xContentType != FORMAT) { throw new IllegalStateException("expected state in " + file + " to be " + FORMAT + " format but was " + xContentType); } long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { try (XContentParser parser = XContentFactory.xContent(FORMAT) .createParser(namedXContentRegistry, LoggingDeprecationHandler.INSTANCE, new InputStreamIndexInput(slice, contentSize))) { return fromXContent(parser); } } } catch(CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } }
@Override public void init(IndexInput termsIn, SegmentReadState state) throws IOException { // Make sure we are talking to the matching postings writer CodecUtil.checkHeader(termsIn, Lucene41PostingsFormat.TERMS_CODEC, Lucene41PostingsFormat.VERSION_START, Lucene41PostingsFormat.VERSION_CURRENT); final int indexBlockSize = termsIn.readVInt(); if (indexBlockSize != BLOCK_SIZE) { throw new IllegalStateException("index-time BLOCK_SIZE (" + indexBlockSize + ") != read-time BLOCK_SIZE (" + BLOCK_SIZE + ")"); } }
/** Reads terms file header. */ private int readHeader(IndexInput input) throws IOException { int version = CodecUtil.checkHeader(input, TERMS_CODEC_NAME, VERSION_START, VERSION_CURRENT); if (version < VERSION_APPEND_ONLY) { dirOffset = input.readLong(); } return version; }
/** Reads index file header. */ private int readIndexHeader(IndexInput input) throws IOException { int version = CodecUtil.checkHeader(input, TERMS_INDEX_CODEC_NAME, VERSION_START, VERSION_CURRENT); if (version < VERSION_APPEND_ONLY) { indexDirOffset = input.readLong(); } return version; }
@Override public void init(IndexInput termsIn, SegmentReadState state) throws IOException { // Make sure we are talking to the matching past writer CodecUtil.checkHeader(termsIn, TERMS_CODEC, VERSION_START, VERSION_CURRENT); skipInterval = termsIn.readInt(); maxSkipLevels = termsIn.readInt(); skipMinimum = termsIn.readInt(); }
public static Checkpoint read(Path path) throws IOException { try (Directory dir = new SimpleFSDirectory(path.getParent())) { try (IndexInput indexInput = dir.openInput(path.getFileName().toString(), IOContext.DEFAULT)) { if (indexInput.length() == LEGACY_NON_CHECKSUMMED_FILE_LENGTH) { // OLD unchecksummed file that was written < ES 5.0.0 return Checkpoint.readNonChecksummed(indexInput); } // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); final int fileVersion = CodecUtil.checkHeader(indexInput, CHECKPOINT_CODEC, INITIAL_VERSION, INITIAL_VERSION); return Checkpoint.readChecksummedV1(indexInput); } } }
@Override public void init(final IndexInput termsIn) throws IOException { // Make sure we are talking to the matching past writer CodecUtil.checkHeader(termsIn, Siren10PostingsWriter.CODEC, Siren10PostingsWriter.VERSION_START, Siren10PostingsWriter.VERSION_START); blockSkipInterval = termsIn.readInt(); maxSkipLevels = termsIn.readInt(); blockSkipMinimum = termsIn.readInt(); maxBlockSize = termsIn.readInt(); }
@Override public void init(final IndexInput termsIn) throws IOException { // Make sure we are talking to the matching past writer CodecUtil.checkHeader(termsIn, Siren10PostingsWriter.CODEC, Siren10PostingsWriter.VERSION_START, Siren10PostingsWriter.VERSION_CURRENT); blockSkipInterval = termsIn.readInt(); maxSkipLevels = termsIn.readInt(); blockSkipMinimum = termsIn.readInt(); maxBlockSize = termsIn.readInt(); }