throw new IllegalStateException("missing attribute: " + PER_FIELD_SUFFIX_KEY + " for field: " + fieldName); PostingsFormat format = PostingsFormat.forName(formatName); String segmentSuffix = getSuffix(formatName, suffix); if (!formats.containsKey(segmentSuffix)) { formats.put(segmentSuffix, format.fieldsProducer(new SegmentReadState(readState, segmentSuffix)));
throw new IllegalStateException("invalid null PostingsFormat for field=\"" + field + "\""); String formatName = format.getName();
private void mergeTerms(SegmentWriteState segmentWriteState) throws IOException { try (FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(segmentWriteState)) { consumer.merge(mergeState); } } }
CompletionFieldsConsumer(PostingsFormat delegatePostingsFormat, SegmentWriteState state) throws IOException { this.delegatePostingsFormatName = delegatePostingsFormat.getName(); this.state = state; String dictFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, DICT_EXTENSION); boolean success = false; try { this.delegateFieldsConsumer = delegatePostingsFormat.fieldsConsumer(state); dictOut = state.directory.createOutput(dictFile, state.context); CodecUtil.writeIndexHeader(dictOut, CODEC_NAME, COMPLETION_VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); success = true; } finally { if (success == false) { IOUtils.closeWhileHandlingException(dictOut, delegateFieldsConsumer); } } }
@Override protected PostingsFormat delegatePostingsFormat() { return PostingsFormat.forName("Lucene50"); } }
@Override public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException { return PostingsFormat.forName("Lucene50").fieldsConsumer(state); }
final PostingsFormat format = codec.postingsFormat(); fields = format.fieldsProducer(segmentReadState); assert fields != null;
/** * Reloads all Lucene SPI implementations using the new classloader. * This method must be called after the new classloader has been created to * register the services for use. */ static void reloadLuceneSPI(ClassLoader loader) { // do NOT change the order of these method calls! // Codecs: PostingsFormat.reloadPostingsFormats(loader); DocValuesFormat.reloadDocValuesFormats(loader); Codec.reloadCodecs(loader); // Analysis: CharFilterFactory.reloadCharFilters(loader); TokenFilterFactory.reloadTokenFilters(loader); TokenizerFactory.reloadTokenizers(loader); }
@Override public String toString() { return "Siren10Codec[" + defaultTestFormat.toString() + "]"; }
throw new IllegalStateException("invalid null PostingsFormat for field=\"" + field + "\""); String formatName = format.getName(); FieldsConsumer consumer = format.fieldsConsumer(group.state); toClose.add(consumer); consumer.write(maskedFields);
final PostingsFormat format = codec.postingsFormat(); fields = format.fieldsProducer(segmentReadState); assert fields != null;
/** * Reloads all Lucene SPI implementations using the new classloader. * This method must be called after {@link #addToClassLoader(String)} * and {@link #addToClassLoader(String,FileFilter)} before using * this ResourceLoader. */ void reloadLuceneSPI() { // Codecs: PostingsFormat.reloadPostingsFormats(this.classLoader); Codec.reloadCodecs(this.classLoader); // Analysis: CharFilterFactory.reloadCharFilters(this.classLoader); TokenFilterFactory.reloadTokenFilters(this.classLoader); TokenizerFactory.reloadTokenizers(this.classLoader); }
@Override public String toString() { return "Siren10Codec[" + defaultTestFormat.toString() + "]"; }
@Override public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException { FieldsProducer postings = PostingsFormat.forName("Lucene50").fieldsProducer(state); if (state.context.context != IOContext.Context.MERGE) { FieldsProducer loadedPostings; try { postings.checkIntegrity(); loadedPostings = new DirectFields(state, postings, minSkipCount, lowFreqCutoff); } finally { postings.close(); } return loadedPostings; } else { // Don't load postings for merge: return postings; } }
throw new IllegalStateException("invalid null PostingsFormat for field=\"" + field + "\""); String formatName = format.getName(); FieldsConsumer consumer = format.fieldsConsumer(group.state); toClose.add(consumer); consumer.write(maskedFields);
@Override public void write(Fields fields) throws IOException { Map<PostingsFormat, FieldsGroup> formatToGroups = buildFieldsGroupMapping(fields); // Write postings boolean success = false; try { for (Map.Entry<PostingsFormat, FieldsGroup> ent : formatToGroups.entrySet()) { PostingsFormat format = ent.getKey(); final FieldsGroup group = ent.getValue(); // Exposes only the fields from this group: Fields maskedFields = new FilterFields(fields) { @Override public Iterator<String> iterator() { return group.fields.iterator(); } }; FieldsConsumer consumer = format.fieldsConsumer(group.state); toClose.add(consumer); consumer.write(maskedFields); } success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(toClose); } } }
throw new IllegalStateException("invalid null PostingsFormat for field=\"" + field + "\""); String formatName = format.getName();
final PostingsFormat format = codec.postingsFormat(); fields = format.fieldsProducer(segmentReadState); assert fields != null;