@Override public TermVectorsFormat termVectorsFormat() { return delegate.termVectorsFormat(); }
/** * Merge the TermVectors from each of the segments into the new one. * @throws IOException if there is a low-level IO error */ private int mergeVectors() throws IOException { try (TermVectorsWriter termVectorsWriter = codec.termVectorsFormat().vectorsWriter(directory, mergeState.segmentInfo, context)) { return termVectorsWriter.merge(mergeState); } }
void initTermVectorsWriter() throws IOException { if (writer == null) { IOContext context = new IOContext(new FlushInfo(docWriter.getNumDocsInRAM(), docWriter.bytesUsed())); writer = docWriter.codec.termVectorsFormat().vectorsWriter(docWriter.directory, docWriter.getSegmentInfo(), context); lastDocID = 0; } }
@Override void flush(Map<String, TermsHashPerField> fieldsToFlush, final SegmentWriteState state, Sorter.DocMap sortMap) throws IOException { super.flush(fieldsToFlush, state, sortMap); if (tmpDirectory != null) { if (sortMap == null) { // we're lucky the index is already sorted, just rename the temporary file and return for (Map.Entry<String, String> entry : tmpDirectory.getTemporaryFiles().entrySet()) { tmpDirectory.rename(entry.getValue(), entry.getKey()); } return; } TermVectorsReader reader = docWriter.codec.termVectorsFormat() .vectorsReader(tmpDirectory, state.segmentInfo, state.fieldInfos, IOContext.DEFAULT); TermVectorsReader mergeReader = reader.getMergeInstance(); TermVectorsWriter writer = docWriter.codec.termVectorsFormat() .vectorsWriter(state.directory, state.segmentInfo, IOContext.DEFAULT); try { reader.checkIntegrity(); for (int docID = 0; docID < state.segmentInfo.maxDoc(); docID++) { Fields vectors = mergeReader.get(sortMap.newToOld(docID)); writeTermVectors(writer, vectors, state.fieldInfos); } writer.finish(state.fieldInfos, state.segmentInfo.maxDoc()); } finally { IOUtils.close(reader, writer); IOUtils.deleteFiles(tmpDirectory, tmpDirectory.getTemporaryFiles().values()); } } }
@Override void initTermVectorsWriter() throws IOException { if (writer == null) { IOContext context = new IOContext(new FlushInfo(docWriter.getNumDocsInRAM(), docWriter.bytesUsed())); tmpDirectory = new TrackingTmpOutputDirectoryWrapper(docWriter.directory); writer = docWriter.codec.termVectorsFormat().vectorsWriter(tmpDirectory, docWriter.getSegmentInfo(), context); lastDocID = 0; } }
termVectorsReaderOrig = si.info.getCodec().termVectorsFormat().vectorsReader(cfsDir, si.info, coreFieldInfos, context); } else { termVectorsReaderOrig = null;
@Override public TermVectorsFormat termVectorsFormat() { return delegate.termVectorsFormat(); }
@Override public TermVectorsFormat termVectorsFormat() { return delegate.termVectorsFormat(); }
@Override public TermVectorsFormat termVectorsFormat() { return delegate.termVectorsFormat(); }
/** * Merge the TermVectors from each of the segments into the new one. * @throws IOException if there is a low-level IO error */ private int mergeVectors() throws IOException { try (TermVectorsWriter termVectorsWriter = codec.termVectorsFormat().vectorsWriter(directory, mergeState.segmentInfo, context)) { return termVectorsWriter.merge(mergeState); } }
/** * Merge the TermVectors from each of the segments into the new one. * @throws IOException if there is a low-level IO error */ private int mergeVectors() throws IOException { try (TermVectorsWriter termVectorsWriter = codec.termVectorsFormat().vectorsWriter(directory, mergeState.segmentInfo, context)) { return termVectorsWriter.merge(mergeState); } }
/** * Merge the TermVectors from each of the segments into the new one. * @throws IOException if there is a low-level IO error */ private int mergeVectors() throws IOException { try (TermVectorsWriter termVectorsWriter = codec.termVectorsFormat().vectorsWriter(directory, mergeState.segmentInfo, context)) { return termVectorsWriter.merge(mergeState); } }
@Override void flush(Map<String, TermsHashPerField> fieldsToFlush, final SegmentWriteState state, Sorter.DocMap sortMap) throws IOException { super.flush(fieldsToFlush, state, sortMap); if (tmpDirectory != null) { if (sortMap == null) { // we're lucky the index is already sorted, just rename the temporary file and return for (Map.Entry<String, String> entry : tmpDirectory.getTemporaryFiles().entrySet()) { tmpDirectory.rename(entry.getValue(), entry.getKey()); } return; } TermVectorsReader reader = docWriter.codec.termVectorsFormat() .vectorsReader(tmpDirectory, state.segmentInfo, state.fieldInfos, IOContext.DEFAULT); TermVectorsReader mergeReader = reader.getMergeInstance(); TermVectorsWriter writer = docWriter.codec.termVectorsFormat() .vectorsWriter(state.directory, state.segmentInfo, IOContext.DEFAULT); try { reader.checkIntegrity(); for (int docID = 0; docID < state.segmentInfo.maxDoc(); docID++) { Fields vectors = mergeReader.get(sortMap.newToOld(docID)); writeTermVectors(writer, vectors, state.fieldInfos); } writer.finish(state.fieldInfos, state.segmentInfo.maxDoc()); } finally { IOUtils.close(reader, writer); IOUtils.deleteFiles(tmpDirectory, tmpDirectory.getTemporaryFiles().values()); } } }
private void initTermVectorsWriter() throws IOException { if (writer == null) { IOContext context = new IOContext(new FlushInfo(docWriter.getNumDocsInRAM(), docWriter.bytesUsed())); writer = docWriter.codec.termVectorsFormat().vectorsWriter(docWriter.directory, docWriter.getSegmentInfo(), context); lastDocID = 0; } }
void initTermVectorsWriter() throws IOException { if (writer == null) { IOContext context = new IOContext(new FlushInfo(docWriter.getNumDocsInRAM(), docWriter.bytesUsed())); writer = docWriter.codec.termVectorsFormat().vectorsWriter(docWriter.directory, docWriter.getSegmentInfo(), context); lastDocID = 0; } }
private void initTermVectorsWriter() throws IOException { if (writer == null) { IOContext context = new IOContext(new FlushInfo(docWriter.getNumDocsInRAM(), docWriter.bytesUsed())); writer = docWriter.codec.termVectorsFormat().vectorsWriter(docWriter.directory, docWriter.getSegmentInfo(), context); lastDocID = 0; } }
@Override void initTermVectorsWriter() throws IOException { if (writer == null) { IOContext context = new IOContext(new FlushInfo(docWriter.getNumDocsInRAM(), docWriter.bytesUsed())); tmpDirectory = new TrackingTmpOutputDirectoryWrapper(docWriter.directory); writer = docWriter.codec.termVectorsFormat().vectorsWriter(tmpDirectory, docWriter.getSegmentInfo(), context); lastDocID = 0; } }
termVectorsReaderOrig = si.info.getCodec().termVectorsFormat().vectorsReader(cfsDir, si.info, coreFieldInfos, context); } else { termVectorsReaderOrig = null;
termVectorsReaderOrig = si.info.getCodec().termVectorsFormat().vectorsReader(cfsDir, si.info, coreFieldInfos, context); } else { termVectorsReaderOrig = null;
termVectorsReaderOrig = si.info.getCodec().termVectorsFormat().vectorsReader(cfsDir, si.info, coreFieldInfos, context); } else { termVectorsReaderOrig = null;