protected void flush() throws IOException { assert off > 0; final float avg = off == 1 ? 0f : (float) (values[off - 1] - values[0]) / (off - 1); long min = values[0]; // adjust min so that all deltas will be positive for (int i = 1; i < off; ++i) { final long actual = values[i]; final long expected = expected(min, avg, i); if (expected > actual) { min -= (expected - actual); } } long maxDelta = 0; for (int i = 0; i < off; ++i) { values[i] = values[i] - expected(min, avg, i); maxDelta = Math.max(maxDelta, values[i]); } out.writeZLong(min); out.writeInt(Float.floatToIntBits(avg)); if (maxDelta == 0) { out.writeVInt(0); } else { final int bitsRequired = PackedInts.bitsRequired(maxDelta); out.writeVInt(bitsRequired); writeValues(bitsRequired); } off = 0; }
private void addAddresses(FieldInfo field, Iterable<Number> values) throws IOException { meta.writeVInt(field.number); meta.writeByte(Lucene50DocValuesFormat.NUMERIC); meta.writeVInt(MONOTONIC_COMPRESSED); meta.writeLong(-1L); meta.writeLong(data.getFilePointer()); meta.writeVLong(maxDoc); meta.writeVInt(PackedInts.VERSION_CURRENT); meta.writeVInt(MONOTONIC_BLOCK_SIZE); final MonotonicBlockPackedWriter writer = new MonotonicBlockPackedWriter(data, MONOTONIC_BLOCK_SIZE); long addr = 0; writer.add(addr); for (Number v : values) { addr += v.longValue(); writer.add(addr); } writer.finish(); meta.writeLong(data.getFilePointer()); }
meta.writeVInt(BLOCK_SIZE); meta.writeLong(data.getFilePointer()); final MonotonicBlockPackedWriter writer = new MonotonicBlockPackedWriter(data, BLOCK_SIZE); long addr = 0; writer.add(addr); for (Number v : docToValueCount) { addr += v.longValue(); writer.add(addr); writer.finish(); long valueCount = writer.ord(); meta.writeLong(valueCount);
SimpleFieldWriter(FieldInfo fieldInfo, long termsFilePointer) { this.fieldInfo = fieldInfo; indexStart = out.getFilePointer(); termsStart = termsFilePointer; // we write terms+1 offsets, term n's length is n+1 - n try { termOffsets.add(0L); } catch (IOException bogus) { throw new RuntimeException(bogus); } }
@Override public void finish(long termsFilePointer) throws IOException { // write primary terms dict offsets packedIndexStart = out.getFilePointer(); // relative to our indexStart termAddresses.finish(); addressBuffer.writeTo(out); packedOffsetsStart = out.getFilePointer(); // write offsets into the byte[] terms termOffsets.finish(); offsetsBuffer.writeTo(out); // our referrer holds onto us, while other fields are // being written, so don't tie up this RAM: termOffsets = termAddresses = null; addressBuffer = offsetsBuffer = null; } }
private void addAddresses(FieldInfo field, Iterable<Number> values) throws IOException { meta.writeVInt(field.number); meta.writeByte(Lucene49DocValuesFormat.NUMERIC); meta.writeVInt(MONOTONIC_COMPRESSED); meta.writeLong(-1L); meta.writeLong(data.getFilePointer()); meta.writeVLong(maxDoc); meta.writeVInt(PackedInts.VERSION_CURRENT); meta.writeVInt(BLOCK_SIZE); final MonotonicBlockPackedWriter writer = new MonotonicBlockPackedWriter(data, BLOCK_SIZE); long addr = 0; writer.add(addr); for (Number v : values) { addr += v.longValue(); writer.add(addr); } writer.finish(); meta.writeLong(data.getFilePointer()); }
@Override public void add(BytesRef text, TermStats stats, long termsFilePointer) throws IOException { final int indexedTermLength; if (numIndexTerms == 0) { // no previous term: no bytes to write indexedTermLength = 0; } else { indexedTermLength = indexedTermPrefixLength(lastTerm.get(), text); } //System.out.println("FGW: add text=" + text.utf8ToString() + " " + text + " fp=" + termsFilePointer); // write only the min prefix that shows the diff // against prior term out.writeBytes(text.bytes, text.offset, indexedTermLength); // save delta terms pointer termAddresses.add(termsFilePointer - termsStart); // save term length (in bytes) assert indexedTermLength <= Short.MAX_VALUE; currentOffset += indexedTermLength; termOffsets.add(currentOffset); lastTerm.copyBytes(text); numIndexTerms++; }
private void addAddresses(FieldInfo field, Iterable<Number> values) throws IOException { meta.writeVInt(field.number); meta.writeByte(Lucene410DocValuesFormat.NUMERIC); meta.writeVInt(MONOTONIC_COMPRESSED); meta.writeLong(-1L); meta.writeLong(data.getFilePointer()); meta.writeVLong(maxDoc); meta.writeVInt(PackedInts.VERSION_CURRENT); meta.writeVInt(BLOCK_SIZE); final MonotonicBlockPackedWriter writer = new MonotonicBlockPackedWriter(data, BLOCK_SIZE); long addr = 0; writer.add(addr); for (Number v : values) { addr += v.longValue(); writer.add(addr); } writer.finish(); meta.writeLong(data.getFilePointer()); }
protected void flush() throws IOException { assert off > 0; final float avg = off == 1 ? 0f : (float) (values[off - 1] - values[0]) / (off - 1); long min = values[0]; // adjust min so that all deltas will be positive for (int i = 1; i < off; ++i) { final long actual = values[i]; final long expected = expected(min, avg, i); if (expected > actual) { min -= (expected - actual); } } long maxDelta = 0; for (int i = 0; i < off; ++i) { values[i] = values[i] - expected(min, avg, i); maxDelta = Math.max(maxDelta, values[i]); } out.writeZLong(min); out.writeInt(Float.floatToIntBits(avg)); if (maxDelta == 0) { out.writeVInt(0); } else { final int bitsRequired = PackedInts.bitsRequired(maxDelta); out.writeVInt(bitsRequired); writeValues(bitsRequired); } off = 0; }
private void addReverseTermIndex(FieldInfo field, final Iterable<BytesRef> values, int maxLength) throws IOException { long count = 0; BytesRefBuilder priorTerm = new BytesRefBuilder(); priorTerm.grow(maxLength); BytesRef indexTerm = new BytesRef(); long startFP = data.getFilePointer(); PagedBytes pagedBytes = new PagedBytes(15); MonotonicBlockPackedWriter addresses = new MonotonicBlockPackedWriter(data, MONOTONIC_BLOCK_SIZE); for (BytesRef b : values) { int termPosition = (int) (count & REVERSE_INTERVAL_MASK); if (termPosition == 0) { int len = LegacyStringHelper.sortKeyLength(priorTerm.get(), b); indexTerm.bytes = b.bytes; indexTerm.offset = b.offset; indexTerm.length = len; addresses.add(pagedBytes.copyUsingLengthPrefix(indexTerm)); } else if (termPosition == REVERSE_INTERVAL_MASK) { priorTerm.copyBytes(b); } count++; } addresses.finish(); long numBytes = pagedBytes.getPointer(); pagedBytes.freeze(true); PagedBytesDataInput in = pagedBytes.getDataInput(); meta.writeLong(startFP); data.writeVLong(numBytes); data.copyBytes(in, numBytes); }
protected void flush() throws IOException { assert off > 0; final float avg = off == 1 ? 0f : (float) (values[off - 1] - values[0]) / (off - 1); long min = values[0]; // adjust min so that all deltas will be positive for (int i = 1; i < off; ++i) { final long actual = values[i]; final long expected = expected(min, avg, i); if (expected > actual) { min -= (expected - actual); } } long maxDelta = 0; for (int i = 0; i < off; ++i) { values[i] = values[i] - expected(min, avg, i); maxDelta = Math.max(maxDelta, values[i]); } out.writeZLong(min); out.writeInt(Float.floatToIntBits(avg)); if (maxDelta == 0) { out.writeVInt(0); } else { final int bitsRequired = PackedInts.bitsRequired(maxDelta); out.writeVInt(bitsRequired); writeValues(bitsRequired); } off = 0; }
private void addReverseTermIndex(FieldInfo field, final Iterable<BytesRef> values, int maxLength) throws IOException { long count = 0; BytesRefBuilder priorTerm = new BytesRefBuilder(); priorTerm.grow(maxLength); BytesRef indexTerm = new BytesRef(); long startFP = data.getFilePointer(); PagedBytes pagedBytes = new PagedBytes(15); MonotonicBlockPackedWriter addresses = new MonotonicBlockPackedWriter(data, MONOTONIC_BLOCK_SIZE); for (BytesRef b : values) { int termPosition = (int) (count & REVERSE_INTERVAL_MASK); if (termPosition == 0) { int len = StringHelper.sortKeyLength(priorTerm.get(), b); indexTerm.bytes = b.bytes; indexTerm.offset = b.offset; indexTerm.length = len; addresses.add(pagedBytes.copyUsingLengthPrefix(indexTerm)); } else if (termPosition == REVERSE_INTERVAL_MASK) { priorTerm.copyBytes(b); } count++; } addresses.finish(); long numBytes = pagedBytes.getPointer(); pagedBytes.freeze(true); PagedBytesDataInput in = pagedBytes.getDataInput(); meta.writeLong(startFP); data.writeVLong(numBytes); data.copyBytes(in, numBytes); }
protected void flush() throws IOException { assert off > 0; final float avg = off == 1 ? 0f : (float) (values[off - 1] - values[0]) / (off - 1); long min = values[0]; // adjust min so that all deltas will be positive for (int i = 1; i < off; ++i) { final long actual = values[i]; final long expected = expected(min, avg, i); if (expected > actual) { min -= (expected - actual); } } long maxDelta = 0; for (int i = 0; i < off; ++i) { values[i] = values[i] - expected(min, avg, i); maxDelta = Math.max(maxDelta, values[i]); } out.writeZLong(min); out.writeInt(Float.floatToIntBits(avg)); if (maxDelta == 0) { out.writeVInt(0); } else { final int bitsRequired = PackedInts.bitsRequired(maxDelta); out.writeVInt(bitsRequired); writeValues(bitsRequired); } off = 0; }
meta.writeVInt(BLOCK_SIZE); final MonotonicBlockPackedWriter writer = new MonotonicBlockPackedWriter(data, BLOCK_SIZE); long addr = 0; writer.add(addr); for (BytesRef v : values) { if (v != null) { addr += v.length; writer.add(addr); writer.finish();
meta.writeVInt(MONOTONIC_BLOCK_SIZE); final MonotonicBlockPackedWriter writer = new MonotonicBlockPackedWriter(data, MONOTONIC_BLOCK_SIZE); long addr = 0; writer.add(addr); for (BytesRef v : values) { if (v != null) { addr += v.length; writer.add(addr); writer.finish();
meta.writeVInt(BLOCK_SIZE); final MonotonicBlockPackedWriter writer = new MonotonicBlockPackedWriter(data, BLOCK_SIZE); long addr = 0; writer.add(addr); for (BytesRef v : values) { if (v != null) { addr += v.length; writer.add(addr); writer.finish();
private void addReverseTermIndex(FieldInfo field, final Iterable<BytesRef> values, int maxLength) throws IOException { long count = 0; BytesRefBuilder priorTerm = new BytesRefBuilder(); priorTerm.grow(maxLength); BytesRef indexTerm = new BytesRef(); long startFP = data.getFilePointer(); PagedBytes pagedBytes = new PagedBytes(15); MonotonicBlockPackedWriter addresses = new MonotonicBlockPackedWriter(data, MONOTONIC_BLOCK_SIZE); for (BytesRef b : values) { int termPosition = (int) (count & REVERSE_INTERVAL_MASK); if (termPosition == 0) { int len = StringHelper.sortKeyLength(priorTerm.get(), b); indexTerm.bytes = b.bytes; indexTerm.offset = b.offset; indexTerm.length = len; addresses.add(pagedBytes.copyUsingLengthPrefix(indexTerm)); } else if (termPosition == REVERSE_INTERVAL_MASK) { priorTerm.copyBytes(b); } count++; } addresses.finish(); long numBytes = pagedBytes.getPointer(); pagedBytes.freeze(true); PagedBytesDataInput in = pagedBytes.getDataInput(); meta.writeLong(startFP); data.writeVLong(numBytes); data.copyBytes(in, numBytes); }
private void addReverseTermIndex(FieldInfo field, final Iterable<BytesRef> values, int maxLength) throws IOException { long count = 0; BytesRefBuilder priorTerm = new BytesRefBuilder(); priorTerm.grow(maxLength); BytesRef indexTerm = new BytesRef(); long startFP = data.getFilePointer(); PagedBytes pagedBytes = new PagedBytes(15); MonotonicBlockPackedWriter addresses = new MonotonicBlockPackedWriter(data, BLOCK_SIZE); for (BytesRef b : values) { int termPosition = (int) (count & REVERSE_INTERVAL_MASK); if (termPosition == 0) { int len = StringHelper.sortKeyLength(priorTerm.get(), b); indexTerm.bytes = b.bytes; indexTerm.offset = b.offset; indexTerm.length = len; addresses.add(pagedBytes.copyUsingLengthPrefix(indexTerm)); } else if (termPosition == REVERSE_INTERVAL_MASK) { priorTerm.copyBytes(b); } count++; } addresses.finish(); long numBytes = pagedBytes.getPointer(); pagedBytes.freeze(true); PagedBytesDataInput in = pagedBytes.getDataInput(); meta.writeLong(startFP); data.writeVLong(numBytes); data.copyBytes(in, numBytes); }
meta.writeVInt(BLOCK_SIZE); final MonotonicBlockPackedWriter writer = new MonotonicBlockPackedWriter(data, BLOCK_SIZE); long addr = 0; for (BytesRef v : values) { addr += v.length; writer.add(addr); writer.finish();
private void addReverseTermIndex(FieldInfo field, final Iterable<BytesRef> values, int maxLength) throws IOException { long count = 0; BytesRefBuilder priorTerm = new BytesRefBuilder(); priorTerm.grow(maxLength); BytesRef indexTerm = new BytesRef(); long startFP = data.getFilePointer(); PagedBytes pagedBytes = new PagedBytes(15); MonotonicBlockPackedWriter addresses = new MonotonicBlockPackedWriter(data, MONOTONIC_BLOCK_SIZE); for (BytesRef b : values) { int termPosition = (int) (count & REVERSE_INTERVAL_MASK); if (termPosition == 0) { int len = StringHelper.sortKeyLength(priorTerm.get(), b); indexTerm.bytes = b.bytes; indexTerm.offset = b.offset; indexTerm.length = len; addresses.add(pagedBytes.copyUsingLengthPrefix(indexTerm)); } else if (termPosition == REVERSE_INTERVAL_MASK) { priorTerm.copyBytes(b); } count++; } addresses.finish(); long numBytes = pagedBytes.getPointer(); pagedBytes.freeze(true); PagedBytesDataInput in = pagedBytes.getDataInput(); meta.writeLong(startFP); data.writeVLong(numBytes); data.copyBytes(in, numBytes); }