@Override public int getPositionIncrementGap(String fieldName) { return getWrappedAnalyzer(fieldName).getPositionIncrementGap(fieldName); }
if (position > 0) position += analyzer.getPositionIncrementGap(field.name());
@Override public int getPositionIncrementGap(String fieldName) { // use default from Analyzer base class if null return (posIncGap == null) ? super.getPositionIncrementGap(fieldName) : posIncGap.intValue(); }
invertState.position += docState.analyzer.getPositionIncrementGap(fieldInfo.name); invertState.offset += docState.analyzer.getOffsetGap(fieldInfo.name);
public int getPositionIncrementGap(String fieldName) { return analyzer.getPositionIncrementGap(fieldName); }
@Override public int getPositionIncrementGap(String fieldName) { return null == positionIncrementGap ? super.getPositionIncrementGap(fieldName) : positionIncrementGap; }
lastPosition += posIncr.getPositionIncrement(); lastPosition += analyzer.getPositionIncrementGap(field); lastOffset += analyzer.getOffsetGap(field); } catch (IOException e) {
public int getPositionIncrementGap(String fieldName) { return analyzer.getPositionIncrementGap(fieldName); }
private void analyze(TokenStream stream, Analyzer analyzer, String field, Set<String> includeAttributes) { try { stream.reset(); CharTermAttribute term = stream.addAttribute(CharTermAttribute.class); PositionIncrementAttribute posIncr = stream.addAttribute(PositionIncrementAttribute.class); OffsetAttribute offset = stream.addAttribute(OffsetAttribute.class); TypeAttribute type = stream.addAttribute(TypeAttribute.class); PositionLengthAttribute posLen = stream.addAttribute(PositionLengthAttribute.class); while (stream.incrementToken()) { int increment = posIncr.getPositionIncrement(); if (increment > 0) { lastPosition = lastPosition + increment; } tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), lastOffset + offset.endOffset(), posLen.getPositionLength(), type.type(), extractExtendedAttributes(stream, includeAttributes))); } stream.end(); lastOffset += offset.endOffset(); lastPosition += posIncr.getPositionIncrement(); lastPosition += analyzer.getPositionIncrementGap(field); lastOffset += analyzer.getOffsetGap(field); } catch (IOException e) { throw new ElasticsearchException("failed to analyze", e); } finally { IOUtils.closeWhileHandlingException(stream); } }
/** Return the positionIncrementGap from the analyzer assigned to fieldName */ public int getPositionIncrementGap(String fieldName) { Analyzer analyzer = (Analyzer) analyzerMap.get(fieldName); if (analyzer == null) analyzer = defaultAnalyzer; return analyzer.getPositionIncrementGap(fieldName); }
/** Return the positionIncrementGap from the analyzer assigned to fieldName */ public int getPositionIncrementGap(String fieldName) { Analyzer analyzer = (Analyzer) analyzerMap.get(fieldName); if (analyzer == null) analyzer = defaultAnalyzer; return analyzer.getPositionIncrementGap(fieldName); }
@Override public int getPositionIncrementGap(String fieldName) { // use default from Analyzer base class if null return (posIncGap == null) ? super.getPositionIncrementGap(fieldName) : posIncGap.intValue(); }
@Override public int getPositionIncrementGap(String fieldName) { return getAnalyzer(fieldName).getPositionIncrementGap(fieldName); } }
@Override public int getPositionIncrementGap(String fieldName) { return getWrappedAnalyzer(fieldName).getPositionIncrementGap(fieldName); }
@Override public int getPositionIncrementGap(String fieldName) { return getWrappedAnalyzer(fieldName).getPositionIncrementGap(fieldName); }
@Override public final int getPositionIncrementGap(String fieldName) { return getWrappedAnalyzer(fieldName).getPositionIncrementGap(fieldName); }
/** * Convenience method; Tokenizes the given field text and adds the resulting * terms to the index; Equivalent to adding an indexed non-keyword Lucene * {@link org.apache.lucene.document.Field} that is tokenized, not stored, * termVectorStored with positions (or termVectorStored with positions and offsets), * * @param fieldName * a name to be associated with the text * @param text * the text to tokenize and index. * @param analyzer * the analyzer to use for tokenization */ public void addField(String fieldName, String text, Analyzer analyzer) { if (fieldName == null) throw new IllegalArgumentException("fieldName must not be null"); if (text == null) throw new IllegalArgumentException("text must not be null"); if (analyzer == null) throw new IllegalArgumentException("analyzer must not be null"); TokenStream stream = analyzer.tokenStream(fieldName, text); storeTerms(getInfo(fieldName, defaultFieldType), stream, analyzer.getPositionIncrementGap(fieldName), analyzer.getOffsetGap(fieldName)); }
private void analyze(TokenStream stream, Analyzer analyzer, String field, Set<String> includeAttributes, boolean shortAttrName) { try { stream.reset(); CharTermAttribute term = stream.addAttribute(CharTermAttribute.class); PositionIncrementAttribute posIncr = stream.addAttribute(PositionIncrementAttribute.class); OffsetAttribute offset = stream.addAttribute(OffsetAttribute.class); TypeAttribute type = stream.addAttribute(TypeAttribute.class); while (stream.incrementToken()) { int increment = posIncr.getPositionIncrement(); if (increment > 0) { lastPosition = lastPosition + increment; } tokens.add(new ExtendedAnalyzeResponse.ExtendedAnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), lastOffset +offset.endOffset(), type.type(), extractExtendedAttributes(stream, includeAttributes, shortAttrName))); } stream.end(); lastOffset += offset.endOffset(); lastPosition += posIncr.getPositionIncrement(); lastPosition += analyzer.getPositionIncrementGap(field); lastOffset += analyzer.getOffsetGap(field); } catch (IOException e) { throw new ElasticsearchException("failed to analyze", e); } finally { IOUtils.closeWhileHandlingException(stream); } }
private void analyze(TokenStream stream, Analyzer analyzer, String field, Set<String> includeAttributes) { try { stream.reset(); CharTermAttribute term = stream.addAttribute(CharTermAttribute.class); PositionIncrementAttribute posIncr = stream.addAttribute(PositionIncrementAttribute.class); OffsetAttribute offset = stream.addAttribute(OffsetAttribute.class); TypeAttribute type = stream.addAttribute(TypeAttribute.class); PositionLengthAttribute posLen = stream.addAttribute(PositionLengthAttribute.class); while (stream.incrementToken()) { int increment = posIncr.getPositionIncrement(); if (increment > 0) { lastPosition = lastPosition + increment; } tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), lastOffset + offset.endOffset(), posLen.getPositionLength(), type.type(), extractExtendedAttributes(stream, includeAttributes))); } stream.end(); lastOffset += offset.endOffset(); lastPosition += posIncr.getPositionIncrement(); lastPosition += analyzer.getPositionIncrementGap(field); lastOffset += analyzer.getOffsetGap(field); } catch (IOException e) { throw new ElasticsearchException("failed to analyze", e); } finally { IOUtils.closeWhileHandlingException(stream); } }
private void analyze(TokenStream stream, Analyzer analyzer, String field, Set<String> includeAttributes) { try { stream.reset(); CharTermAttribute term = stream.addAttribute(CharTermAttribute.class); PositionIncrementAttribute posIncr = stream.addAttribute(PositionIncrementAttribute.class); OffsetAttribute offset = stream.addAttribute(OffsetAttribute.class); TypeAttribute type = stream.addAttribute(TypeAttribute.class); PositionLengthAttribute posLen = stream.addAttribute(PositionLengthAttribute.class); while (stream.incrementToken()) { int increment = posIncr.getPositionIncrement(); if (increment > 0) { lastPosition = lastPosition + increment; } tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), lastOffset + offset.endOffset(), posLen.getPositionLength(), type.type(), extractExtendedAttributes(stream, includeAttributes))); } stream.end(); lastOffset += offset.endOffset(); lastPosition += posIncr.getPositionIncrement(); lastPosition += analyzer.getPositionIncrementGap(field); lastOffset += analyzer.getOffsetGap(field); } catch (IOException e) { throw new ElasticsearchException("failed to analyze", e); } finally { IOUtils.closeWhileHandlingException(stream); } }