public void clear() { clear(true); }
/** * Closes the BytesRefHash and releases all internally used memory */ public void close() { clear(true); ids = null; bytesUsed.addAndGet(Integer.BYTES * -hashSize); }
void reset() { bytesHash.clear(false); if (nextPerField != null) { nextPerField.reset(); } }
public void clear() { clear(true); }
public void clear() { clear(true); }
public void clear() { clear(true); }
void reset() { bytesHash.clear(false); if (nextPerField != null) { nextPerField.reset(); } }
/** * Closes the BytesRefHash and releases all internally used memory */ public void close() { clear(true); ids = null; bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * -hashSize); }
void reset() { bytesHash.clear(false); if (nextPerField != null) { nextPerField.reset(); } }
void reset() { bytesHash.clear(false); if (nextPerField != null) { nextPerField.reset(); } }
/** * Closes the BytesRefHash and releases all internally used memory */ public void close() { clear(true); ids = null; bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * -hashSize); }
/** * Closes the BytesRefHash and releases all internally used memory */ public void close() { clear(true); ids = null; bytesUsed.addAndGet(Integer.BYTES * -hashSize); }
@Override public synchronized void clear() { map.clear(); map.reinit(); ordinals = new int[1][]; ordinals[0] = new int[PAGE_SIZE]; count = 0; pageCount = 0; assert bytesUsed.get() == 0; }
@Override public void setGroups(Collection<SearchGroup<BytesRef>> searchGroups) { this.values.clear(); this.values.reinit(); for (SearchGroup<BytesRef> sg : searchGroups) { if (sg.groupValue == null) includeEmpty = true; else this.values.add(sg.groupValue); } this.secondPass = true; } }
_counter.clear(); _counter.reinit(); _compacted = false;