@Override protected Mutable newMutable(int valueCount, int bitsPerValue) { return new GrowableWriter(bitsPerValue, valueCount, acceptableOverheadRatio); }
public GrowableWriter resize(int newSize) { GrowableWriter next = new GrowableWriter(getBitsPerValue(), newSize, acceptableOverheadRatio); final int limit = Math.min(size(), newSize); PackedInts.copy(current, 0, next, 0, limit, PackedInts.DEFAULT_BUFFER_SIZE); return next; }
OrdinalsStore(int maxDoc, int startBitsPerValue, float acceptableOverheadRatio) { this.startBitsPerValue = startBitsPerValue; this.acceptableOverheadRatio = acceptableOverheadRatio; positions = new PagedGrowableWriter(maxDoc, PAGE_SIZE, startBitsPerValue, acceptableOverheadRatio); firstOrdinals = new GrowableWriter(startBitsPerValue, maxDoc, acceptableOverheadRatio); // over allocate in order to never worry about the array sizes, 24 entries would allow // to store several millions of ordinals per doc... ordinals = new PagedGrowableWriter[24]; nextLevelSlices = new PagedGrowableWriter[24]; sizes = new int[24]; Arrays.fill(sizes, 1); // reserve the 1st slice on every level }
@Override protected Mutable newMutable(int valueCount, int bitsPerValue) { return new GrowableWriter(bitsPerValue, valueCount, acceptableOverheadRatio); }
@Override protected Mutable newMutable(int valueCount, int bitsPerValue) { return new GrowableWriter(bitsPerValue, valueCount, acceptableOverheadRatio); }
@Override protected Mutable newMutable(int valueCount, int bitsPerValue) { return new GrowableWriter(bitsPerValue, valueCount, acceptableOverheadRatio); }
OrdinalsStore(int maxDoc, int startBitsPerValue, float acceptableOverheadRatio) { this.startBitsPerValue = startBitsPerValue; this.acceptableOverheadRatio = acceptableOverheadRatio; positions = new PagedGrowableWriter(maxDoc, PAGE_SIZE, startBitsPerValue, acceptableOverheadRatio); firstOrdinals = new GrowableWriter(startBitsPerValue, maxDoc, acceptableOverheadRatio); // over allocate in order to never worry about the array sizes, 24 entries would allow to store several millions of ordinals per doc... ordinals = new PagedGrowableWriter[24]; nextLevelSlices = new PagedGrowableWriter[24]; sizes = new int[24]; Arrays.fill(sizes, 1); // reserve the 1st slice on every level }
OrdinalsStore(int maxDoc, int startBitsPerValue, float acceptableOverheadRatio) { this.startBitsPerValue = startBitsPerValue; this.acceptableOverheadRatio = acceptableOverheadRatio; positions = new PagedGrowableWriter(maxDoc, PAGE_SIZE, startBitsPerValue, acceptableOverheadRatio); firstOrdinals = new GrowableWriter(startBitsPerValue, maxDoc, acceptableOverheadRatio); // over allocate in order to never worry about the array sizes, 24 entries would allow to store several millions of ordinals per doc... ordinals = new PagedGrowableWriter[24]; nextLevelSlices = new PagedGrowableWriter[24]; sizes = new int[24]; Arrays.fill(sizes, 1); // reserve the 1st slice on every level }
OrdinalsStore(int maxDoc, int startBitsPerValue, float acceptableOverheadRatio) { this.startBitsPerValue = startBitsPerValue; this.acceptableOverheadRatio = acceptableOverheadRatio; positions = new PagedGrowableWriter(maxDoc, PAGE_SIZE, startBitsPerValue, acceptableOverheadRatio); firstOrdinals = new GrowableWriter(startBitsPerValue, maxDoc, acceptableOverheadRatio); // over allocate in order to never worry about the array sizes, 24 entries would allow to store several millions of ordinals per doc... ordinals = new PagedGrowableWriter[24]; nextLevelSlices = new PagedGrowableWriter[24]; sizes = new int[24]; Arrays.fill(sizes, 1); // reserve the 1st slice on every level }
OrdinalsStore(int maxDoc, int startBitsPerValue, float acceptableOverheadRatio) { this.startBitsPerValue = startBitsPerValue; this.acceptableOverheadRatio = acceptableOverheadRatio; positions = new PagedGrowableWriter(maxDoc, PAGE_SIZE, startBitsPerValue, acceptableOverheadRatio); firstOrdinals = new GrowableWriter(startBitsPerValue, maxDoc, acceptableOverheadRatio); // over allocate in order to never worry about the array sizes, 24 entries would allow to store several millions of ordinals per doc... ordinals = new PagedGrowableWriter[24]; nextLevelSlices = new PagedGrowableWriter[24]; sizes = new int[24]; Arrays.fill(sizes, 1); // reserve the 1st slice on every level }
FST(INPUT_TYPE inputType, Outputs<T> outputs, boolean willPackFST, float acceptableOverheadRatio, int bytesPageBits) { this.inputType = inputType; this.outputs = outputs; version = VERSION_CURRENT; bytesArray = null; bytes = new BytesStore(bytesPageBits); // pad: ensure no node gets address 0 which is reserved to mean // the stop state w/ no arcs bytes.writeByte((byte) 0); if (willPackFST) { nodeAddress = new GrowableWriter(15, 8, acceptableOverheadRatio); inCounts = new GrowableWriter(1, 8, acceptableOverheadRatio); } else { nodeAddress = null; inCounts = null; } emptyOutput = null; packed = false; nodeRefToAddress = null; }
FST(INPUT_TYPE inputType, Outputs<T> outputs, boolean willPackFST, float acceptableOverheadRatio, int bytesPageBits) { this.inputType = inputType; this.outputs = outputs; version = VERSION_CURRENT; bytesArray = null; bytes = new BytesStore(bytesPageBits); // pad: ensure no node gets address 0 which is reserved to mean // the stop state w/ no arcs bytes.writeByte((byte) 0); if (willPackFST) { nodeAddress = new GrowableWriter(15, 8, acceptableOverheadRatio); inCounts = new GrowableWriter(1, 8, acceptableOverheadRatio); } else { nodeAddress = null; inCounts = null; } emptyOutput = null; packed = false; nodeRefToAddress = null; }
public GrowableWriter resize(int newSize) { GrowableWriter next = new GrowableWriter(getBitsPerValue(), newSize, acceptableOverheadRatio); final int limit = Math.min(size(), newSize); PackedInts.copy(current, 0, next, 0, limit, PackedInts.DEFAULT_BUFFER_SIZE); return next; }
public GrowableWriter resize(int newSize) { GrowableWriter next = new GrowableWriter(getBitsPerValue(), newSize, acceptableOverheadRatio); final int limit = Math.min(size(), newSize); PackedInts.copy(current, 0, next, 0, limit, PackedInts.DEFAULT_BUFFER_SIZE); return next; }
public GrowableWriter resize(int newSize) { GrowableWriter next = new GrowableWriter(getBitsPerValue(), newSize, acceptableOverheadRatio); final int limit = Math.min(size(), newSize); PackedInts.copy(current, 0, next, 0, limit, PackedInts.DEFAULT_BUFFER_SIZE); return next; }
@Override public void visitTerm(BytesRef term) { currentValue = parser.parseValue(term); if (values == null) { // Lazy alloc so for the numeric field case // (which will hit a NumberFormatException // when we first try the DEFAULT_INT_PARSER), // we don't double-alloc: int startBitsPerValue; // Make sure than missing values (0) can be stored without resizing if (currentValue < 0) { minValue = currentValue; startBitsPerValue = minValue == Long.MIN_VALUE ? 64 : PackedInts.bitsRequired(-minValue); } else { minValue = 0; startBitsPerValue = PackedInts.bitsRequired(currentValue); } values = new GrowableWriter(startBitsPerValue, reader.maxDoc(), PackedInts.FAST); if (minValue != 0) { values.fill(0, values.size(), -minValue); // default value must be 0 } valuesRef.set(new GrowableWriterAndMinValue(values, minValue)); } }
@Override public void visitTerm(BytesRef term) { currentValue = parser.parseValue(term); if (values == null) { // Lazy alloc so for the numeric field case // (which will hit a NumberFormatException // when we first try the DEFAULT_INT_PARSER), // we don't double-alloc: int startBitsPerValue; // Make sure than missing values (0) can be stored without resizing if (currentValue < 0) { minValue = currentValue; startBitsPerValue = minValue == Long.MIN_VALUE ? 64 : PackedInts.bitsRequired(-minValue); } else { minValue = 0; startBitsPerValue = PackedInts.bitsRequired(currentValue); } values = new GrowableWriter(startBitsPerValue, reader.maxDoc(), PackedInts.FAST); if (minValue != 0) { values.fill(0, values.size(), -minValue); // default value must be 0 } valuesRef.set(new GrowableWriterAndMinValue(values, minValue)); } }
final GrowableWriter docToTermOrd = new GrowableWriter(startTermsBPV, maxDoc, acceptableOverheadRatio);
@Override public NumericDocValues getNumeric(FieldInfo field) throws IOException { if (VersionFieldMapper.NAME.equals(field.name)) { // uninvert into a packed ints and expose as docvalues final Terms terms = reader.terms(UidFieldMapper.NAME); final TermsEnum uids = terms.iterator(); final GrowableWriter versions = new GrowableWriter(2, reader.maxDoc(), PackedInts.COMPACT); PostingsEnum dpe = null; for (BytesRef uid = uids.next(); uid != null; uid = uids.next()) { dpe = uids.postings(dpe, PostingsEnum.PAYLOADS); assert terms.hasPayloads() : "field has payloads"; final Bits liveDocs = reader.getLiveDocs(); for (int doc = dpe.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = dpe.nextDoc()) { if (liveDocs != null && liveDocs.get(doc) == false) { continue; } dpe.nextPosition(); final BytesRef payload = dpe.getPayload(); if (payload != null && payload.length == 8) { final long version = Numbers.bytesToLong(payload); versions.set(doc, version); break; } } } return versions; } else { return in.getNumeric(field); } }
final GrowableWriter docToOffset = new GrowableWriter(startBPV, maxDoc, acceptableOverheadRatio);