@Override public int compare(Suggestion.Entry.Option first, Suggestion.Entry.Option second) { // first criteria: the popularity int cmp = ((TermSuggestion.Entry.Option) second).getFreq() - ((TermSuggestion.Entry.Option) first).getFreq(); if (cmp != 0) { return cmp; } // second criteria (if first criteria is equal): the distance cmp = Float.compare(second.getScore(), first.getScore()); if (cmp != 0) { return cmp; } // third criteria: term text return first.getText().compareTo(second.getText()); }
resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch)); } else { resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score)));
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); innerToXContent(builder, params); builder.endObject(); return builder; }
resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch)); } else { resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score)));
resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch)); } else { resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score)));
resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch)); } else { resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score)));
.getFreq(); final AutoSuggestionEntry autoSuggestionEntry = new AutoSuggestionEntry( option.getText().string(), count); suggestions.add(autoSuggestionEntry);
resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch)); } else { resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score)));
AutoSuggestionEntry autoSuggestionEntry = new AutoSuggestionEntry(option.getText().string(), count); suggestions.add(autoSuggestionEntry);
protected <T extends Entry<O>> Entry<O> reduce(List<T> toReduce) { if (toReduce.size() == 1) { return toReduce.get(0); } final Map<O, O> entries = new HashMap<>(); Entry<O> leader = toReduce.get(0); for (Entry<O> entry : toReduce) { if (!leader.text.equals(entry.text)) { throw new IllegalStateException("Can't merge suggest entries, this might be caused by suggest calls " + "across multiple indices with different analysis chains. Suggest entries have different text actual [" + entry.text + "] expected [" + leader.text +"]"); } assert leader.offset == entry.offset; assert leader.length == entry.length; leader.merge(entry); for (O option : entry) { O merger = entries.get(option); if (merger == null) { entries.put(option, option); } else { merger.mergeInto(option); } } } leader.options.clear(); for (O option: entries.keySet()) { leader.addOption(option); } return leader; }
protected <T extends Entry<O>> Entry<O> reduce(List<T> toReduce) { if (toReduce.size() == 1) { return toReduce.get(0); } final Map<O, O> entries = new HashMap<>(); Entry<O> leader = toReduce.get(0); for (Entry<O> entry : toReduce) { if (!leader.text.equals(entry.text)) { throw new IllegalStateException("Can't merge suggest entries, this might be caused by suggest calls " + "across multiple indices with different analysis chains. Suggest entries have different text actual [" + entry.text + "] expected [" + leader.text +"]"); } assert leader.offset == entry.offset; assert leader.length == entry.length; leader.merge(entry); for (O option : entry) { O merger = entries.get(option); if (merger == null) { entries.put(option, option); } else { merger.mergeInto(option); } } } leader.options.clear(); for (O option: entries.keySet()) { leader.addOption(option); } return leader; }
protected <T extends Entry<O>> Entry<O> reduce(List<T> toReduce) { if (toReduce.size() == 1) { return toReduce.get(0); } final Map<O, O> entries = new HashMap<>(); Entry<O> leader = toReduce.get(0); for (Entry<O> entry : toReduce) { if (!leader.text.equals(entry.text)) { throw new IllegalStateException("Can't merge suggest entries, this might be caused by suggest calls " + "across multiple indices with different analysis chains. Suggest entries have different text actual [" + entry.text + "] expected [" + leader.text +"]"); } assert leader.offset == entry.offset; assert leader.length == entry.length; leader.merge(entry); for (O option : entry) { O merger = entries.get(option); if (merger == null) { entries.put(option, option); } else { merger.mergeInto(option); } } } leader.options.clear(); for (O option: entries.keySet()) { leader.addOption(option); } return leader; }
protected <T extends Entry<O>> Entry<O> reduce(List<T> toReduce) { if (toReduce.size() == 1) { return toReduce.get(0); } final Map<O, O> entries = new HashMap<>(); Entry<O> leader = toReduce.get(0); for (Entry<O> entry : toReduce) { if (!leader.text.equals(entry.text)) { throw new IllegalStateException("Can't merge suggest entries, this might be caused by suggest calls " + "across multiple indices with different analysis chains. Suggest entries have different text actual [" + entry.text + "] expected [" + leader.text +"]"); } assert leader.offset == entry.offset; assert leader.length == entry.length; leader.merge(entry); for (O option : entry) { O merger = entries.get(option); if (merger == null) { entries.put(option, option); } else { merger.mergeInto(option); } } } leader.options.clear(); for (O option: entries.keySet()) { leader.addOption(option); } return leader; }
@Override public List<String> findSimilarWords(String searchQuery) { SearchResponse result = client.prepareSearch(INDEX_NAME) .suggest(new SuggestBuilder() .addSuggestion(FIELD_NAME, SuggestBuilders .termSuggestion(FIELD_NAME) .maxEdits(2) .suggestMode(SuggestMode.ALWAYS) .size(1) .text(searchQuery))) .get(); Suggest suggest = result.getSuggest(); List<String> similarWords = new ArrayList<>(); if (suggest != null) { Suggestion<? extends Entry<? extends Option>> suggestions = suggest.getSuggestion(FIELD_NAME); for (Entry<? extends Option> options : suggestions.getEntries()) { for (Option option : options.getOptions()) { similarWords.add(option.getText().string()); } } } return similarWords; }
@Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); Lucene.writeScoreDoc(out, doc); if (hit != null) { out.writeBoolean(true); hit.writeTo(out); } else { out.writeBoolean(false); } out.writeInt(contexts.size()); for (Map.Entry<String, Set<CharSequence>> entry : contexts.entrySet()) { out.writeString(entry.getKey()); out.writeVInt(entry.getValue().size()); for (CharSequence ctx : entry.getValue()) { out.writeString(ctx.toString()); } } }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); this.doc = Lucene.readScoreDoc(in); if (in.readBoolean()) { this.hit = SearchHit.readSearchHit(in); } int contextSize = in.readInt(); this.contexts = new LinkedHashMap<>(contextSize); for (int i = 0; i < contextSize; i++) { String contextName = in.readString(); int nContexts = in.readVInt(); Set<CharSequence> contexts = new HashSet<>(nContexts); for (int j = 0; j < nContexts; j++) { contexts.add(in.readString()); } this.contexts.put(contextName, contexts); } }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); this.doc = Lucene.readScoreDoc(in); if (in.readBoolean()) { this.hit = SearchHit.readSearchHit(in); } int contextSize = in.readInt(); this.contexts = new LinkedHashMap<>(contextSize); for (int i = 0; i < contextSize; i++) { String contextName = in.readString(); int nContexts = in.readVInt(); Set<CharSequence> contexts = new HashSet<>(nContexts); for (int j = 0; j < nContexts; j++) { contexts.add(in.readString()); } this.contexts.put(contextName, contexts); } }
@Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); Lucene.writeScoreDoc(out, doc); if (hit != null) { out.writeBoolean(true); hit.writeTo(out); } else { out.writeBoolean(false); } out.writeInt(contexts.size()); for (Map.Entry<String, Set<CharSequence>> entry : contexts.entrySet()) { out.writeString(entry.getKey()); out.writeVInt(entry.getValue().size()); for (CharSequence ctx : entry.getValue()) { out.writeString(ctx.toString()); } } }
@Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); Lucene.writeScoreDoc(out, doc); if (hit != null) { out.writeBoolean(true); hit.writeTo(out); } else { out.writeBoolean(false); } out.writeInt(contexts.size()); for (Map.Entry<String, Set<CharSequence>> entry : contexts.entrySet()) { out.writeString(entry.getKey()); out.writeVInt(entry.getValue().size()); for (CharSequence ctx : entry.getValue()) { out.writeString(ctx.toString()); } } }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); this.doc = Lucene.readScoreDoc(in); if (in.readBoolean()) { this.hit = SearchHit.readSearchHit(in); } int contextSize = in.readInt(); this.contexts = new LinkedHashMap<>(contextSize); for (int i = 0; i < contextSize; i++) { String contextName = in.readString(); int nContexts = in.readVInt(); Set<CharSequence> contexts = new HashSet<>(nContexts); for (int j = 0; j < nContexts; j++) { contexts.add(in.readString()); } this.contexts.put(contextName, contexts); } }