@Override public PostingsFormat postingsFormat() { return delegate.postingsFormat(); }
private void mergeTerms(SegmentWriteState segmentWriteState) throws IOException { try (FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(segmentWriteState)) { consumer.merge(mergeState); } } }
FieldsConsumer consumer = state.segmentInfo.getCodec().postingsFormat().fieldsConsumer(state); boolean success = false; try {
final PostingsFormat format = codec.postingsFormat();
@Override public PostingsFormat postingsFormat() { return delegate.postingsFormat(); }
@Override public PostingsFormat postingsFormat() { return delegate.postingsFormat(); }
@Override public PostingsFormat postingsFormat() { return delegate.postingsFormat(); }
private void mergeTerms(SegmentWriteState segmentWriteState) throws IOException { try (FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(segmentWriteState)) { consumer.merge(mergeState); } } }
private void mergeTerms(SegmentWriteState segmentWriteState) throws IOException { try (FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(segmentWriteState)) { consumer.merge(mergeState); } } }
private void mergeTerms(SegmentWriteState segmentWriteState) throws IOException { try (FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(segmentWriteState)) { consumer.merge(mergeState); } } }
@Override public void flush(Map<String,TermsHashPerField> fieldsToFlush, final SegmentWriteState state) throws IOException { super.flush(fieldsToFlush, state); // Gather all fields that saw any postings: List<FreqProxTermsWriterPerField> allFields = new ArrayList<>(); for (TermsHashPerField f : fieldsToFlush.values()) { final FreqProxTermsWriterPerField perField = (FreqProxTermsWriterPerField) f; if (perField.bytesHash.size() > 0) { perField.sortPostings(); assert perField.fieldInfo.getIndexOptions() != IndexOptions.NONE; allFields.add(perField); } } // Sort by field name CollectionUtil.introSort(allFields); Fields fields = new FreqProxFields(allFields); applyDeletes(state, fields); FieldsConsumer consumer = state.segmentInfo.getCodec().postingsFormat().fieldsConsumer(state); boolean success = false; try { consumer.write(fields); success = true; } finally { if (success) { IOUtils.close(consumer); } else { IOUtils.closeWhileHandlingException(consumer); } } }
@Override public void flush(Map<String,TermsHashPerField> fieldsToFlush, final SegmentWriteState state) throws IOException { super.flush(fieldsToFlush, state); // Gather all fields that saw any postings: List<FreqProxTermsWriterPerField> allFields = new ArrayList<>(); for (TermsHashPerField f : fieldsToFlush.values()) { final FreqProxTermsWriterPerField perField = (FreqProxTermsWriterPerField) f; if (perField.bytesHash.size() > 0) { perField.sortPostings(); assert perField.fieldInfo.getIndexOptions() != IndexOptions.NONE; allFields.add(perField); } } // Sort by field name CollectionUtil.introSort(allFields); Fields fields = new FreqProxFields(allFields); applyDeletes(state, fields); FieldsConsumer consumer = state.segmentInfo.getCodec().postingsFormat().fieldsConsumer(state); boolean success = false; try { consumer.write(fields); success = true; } finally { if (success) { IOUtils.close(consumer); } else { IOUtils.closeWhileHandlingException(consumer); } } }
FieldsConsumer consumer = state.segmentInfo.getCodec().postingsFormat().fieldsConsumer(state); boolean success = false; try {
final PostingsFormat format = codec.postingsFormat();
final PostingsFormat format = codec.postingsFormat();
final PostingsFormat format = codec.postingsFormat();