@Override public void visitDocument(int docID, StoredFieldVisitor visitor) throws IOException { reader.document(docID, visitor); }
private void updateCurrentDocument( int docID, LeafReader reader ) { try { currentDoc = reader.document( docID ); } catch ( IOException e ) { throw new RuntimeException( e ); } } }
@Override public void document(int docID, StoredFieldVisitor visitor) throws IOException { ensureOpen(); in.document(docID, visitor); }
@Override public void document(int docID, StoredFieldVisitor visitor) throws IOException { ensureOpen(); for (final LeafReader reader: storedFieldsReaders) { reader.document(docID, visitor); } }
@Override public void document(final int docID, final StoredFieldVisitor visitor) throws IOException { in.document(docMap.newToOld(docID), visitor); }
@Override public void collect( int doc ) throws IOException { nodeIds.add( LuceneDocumentStructure.getNodeId( reader.document( doc ) ) ); }
protected void doCollect( int doc ) throws IOException, KernelException, IndexEntryConflictException { Document document = reader.document( doc ); long nodeId = LuceneDocumentStructure.getNodeId( document ); Value value = accessor.getNodePropertyValue( nodeId, propertyKeyId ); duplicateCheckStrategy.checkForDuplicate( value, nodeId ); }
@Override protected void doCollect( int doc ) throws IOException, KernelException, IndexEntryConflictException { Document document = reader.document( doc ); long nodeId = LuceneDocumentStructure.getNodeId( document ); Value[] values = new Value[propertyKeyIds.length]; for ( int i = 0; i < values.length; i++ ) { values[i] = accessor.getNodePropertyValue( nodeId, propertyKeyIds[i] ); } duplicateCheckStrategy.checkForDuplicate( values, nodeId ); } }
@Override protected Document fetchNextOrNull() { if ( ensureValidDisi() ) { try { int doc = currentIdIterator.nextDoc(); if ( doc == DocIdSetIterator.NO_MORE_DOCS ) { currentIdIterator = null; currentScorer = null; currentReader = null; return fetchNextOrNull(); } return currentReader.document( doc ); } catch ( IOException e ) { throw new RuntimeException( e ); } } else { return null; } }
private void updateCurrentDocument( int docID, LeafReader reader ) { try { currentDoc = reader.document( docID ); } catch ( IOException e ) { throw new RuntimeException( e ); } } }
reader.document(0); reader.getTermVectors(0);
boolean matches(int doc) throws IOException { routing = id = null; leftToVisit = 2; leafReader.document(doc, this); assert id != null : "docID must not be null - we might have hit a nested document"; int targetShardId = OperationRouting.generateShardId(indexMetaData, id, routing); return targetShardId != shardId; } }
private void loadStoredFields(SearchContext searchContext, LeafReaderContext readerContext, FieldsVisitor fieldVisitor, int docId) { fieldVisitor.reset(); try { readerContext.reader().document(docId, fieldVisitor); } catch (IOException e) { throw new FetchPhaseExecutionException(searchContext, "Failed to fetch doc id [" + docId + "]", e); } } }
@Override protected Document fetchNextOrNull() { if ( ensureValidDisi() ) { try { int doc = currentIdIterator.nextDoc(); if ( doc == DocIdSetIterator.NO_MORE_DOCS ) { currentIdIterator = null; currentScorer = null; currentReader = null; return fetchNextOrNull(); } return currentReader.document( doc ); } catch ( IOException e ) { throw new RuntimeException( e ); } } else { return null; } }
private FieldLookup loadFieldData(String name) { FieldLookup data = cachedFieldData.get(name); if (data == null) { MappedFieldType fieldType = mapperService.fullName(name); if (fieldType == null) { throw new IllegalArgumentException("No field found for [" + name + "] in mapping with types " + Arrays.toString(types)); } data = new FieldLookup(fieldType); cachedFieldData.put(name, data); } if (data.fields() == null) { String fieldName = data.fieldType().name(); String lookupField = fieldName; if (singleType && UidFieldMapper.NAME.equals(fieldName)) { lookupField = IdFieldMapper.NAME; } fieldVisitor.reset(lookupField); try { reader.document(docId, fieldVisitor); fieldVisitor.postProcess(mapperService); List<Object> storedFields = fieldVisitor.fields().get(fieldName); data.fields(singletonMap(fieldName, storedFields)); } catch (IOException e) { throw new ElasticsearchParseException("failed to load field [{}]", e, name); } } return data; }
private Map<String, Object> loadSourceIfNeeded() { if (source != null) { return source; } if (sourceAsBytes != null) { Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(sourceAsBytes); sourceContentType = tuple.v1(); source = tuple.v2(); return source; } try { FieldsVisitor sourceFieldVisitor = new FieldsVisitor(true); reader.document(docId, sourceFieldVisitor); BytesReference source = sourceFieldVisitor.source(); if (source == null) { this.source = emptyMap(); this.sourceContentType = null; } else { Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(source); this.sourceContentType = tuple.v1(); this.source = tuple.v2(); } } catch (Exception e) { throw new ElasticsearchParseException("failed to parse / load source", e); } return this.source; }
/** * Load field values for highlighting. */ public static List<Object> loadFieldValues(SearchContextHighlight.Field field, MappedFieldType fieldType, SearchContext searchContext, FetchSubPhase.HitContext hitContext) throws IOException { //percolator needs to always load from source, thus it sets the global force source to true boolean forceSource = searchContext.highlight().forceSource(field); List<Object> textsToHighlight; if (!forceSource && fieldType.stored()) { CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(fieldType.name()), false); hitContext.reader().document(hitContext.docId(), fieldVisitor); textsToHighlight = fieldVisitor.fields().get(fieldType.name()); if (textsToHighlight == null) { // Can happen if the document doesn't have the field to highlight textsToHighlight = Collections.emptyList(); } } else { SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId()); textsToHighlight = sourceLookup.extractRawValues(fieldType.name()); } assert textsToHighlight != null; return textsToHighlight; }
if (fieldVisitor != null) { try { docIdAndVersion.reader.document(docIdAndVersion.docId, fieldVisitor); } catch (IOException e) { throw new ElasticsearchException("Failed to get type [" + type + "] and id [" + id + "]", e);
@Override public void document(int docID, StoredFieldVisitor visitor) throws IOException { ensureOpen(); for (final LeafReader reader: storedFieldsReaders) { reader.document(docID, visitor); } }
SourceFieldMapper.NAME; final FieldsVisitor fields = new FieldsVisitor(true, sourceField); leaf.reader().document(segmentDocID, fields); fields.postProcess(mapperService);