tokenizer = @TokenizerDef(factory = KeywordTokenizerFactory.class), filters = { @TokenFilterDef(factory = ClassicFilterFactory.class), @TokenFilterDef(factory = LowerCaseFilterFactory.class) }), @AnalyzerDef(name = LuceneAnalyzers.EXACT_ANALYZER, tokenizer = @TokenizerDef(factory = WhitespaceTokenizerFactory.class), filters = { @TokenFilterDef(factory = ClassicFilterFactory.class), @TokenFilterDef(factory = LowerCaseFilterFactory.class) }), @AnalyzerDef(name = LuceneAnalyzers.START_ANALYZER, tokenizer = @TokenizerDef(factory = WhitespaceTokenizerFactory.class), filters = { @TokenFilterDef(factory = ClassicFilterFactory.class), @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = { @Parameter(name = "minGramSize", value = "2"), @Parameter(name = "maxGramSize", value = "20") }) tokenizer = @TokenizerDef(factory = WhitespaceTokenizerFactory.class), filters = { @TokenFilterDef(factory = ClassicFilterFactory.class), @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = NGramFilterFactory.class, params = { @Parameter(name = "minGramSize", value = "2"), @Parameter(name = "maxGramSize", value = "20") })
@Override public TokenFilterDefinition translate(TokenFilterDef hibernateSearchDef) { Class<? extends TokenFilterFactory> factoryType = hibernateSearchDef.factory(); AnalysisDefinitionFactory<TokenFilterDefinition> factory = luceneTokenFilters.get( factoryType.getName() ); if ( factory == null ) { throw log.unsupportedTokenFilterFactory( factoryType ); } Map<String, String> map = ParameterAnnotationsReader.toNewMutableMap( hibernateSearchDef.params() ); return factory.create( map ); }
private String registerTokenFilterDef(String analyzerDefinitionName, TokenFilterDef hibernateSearchDef) { String remoteName = hibernateSearchDef.name(); TokenFilterDefinition elasticsearchDefinition = translator.translate( hibernateSearchDef ); if ( remoteName.isEmpty() && !hasParameters( elasticsearchDefinition ) ) { // No parameters, and no specific name was provided => Use the builtin, default definition remoteName = elasticsearchDefinition.getType(); } else { if ( remoteName.isEmpty() ) { remoteName = analyzerDefinitionName + "_" + hibernateSearchDef.factory().getSimpleName(); } registry.register( remoteName, elasticsearchDefinition ); } return remoteName; }
@TokenFilterDef(factory = StandardFilterFactory.class), @TokenFilterDef(factory = LowerCaseFilterFactory.class) }) @Analyzer(definition = "ConceptNameAnalyzer") public class ConceptName extends BaseOpenmrsObject implements Auditable, Voidable, java.io.Serializable {
@Override public TokenFilterDefinition translate(TokenFilterDef hibernateSearchDef) { Class<? extends TokenFilterFactory> factoryType = hibernateSearchDef.factory(); AnalysisDefinitionFactory<TokenFilterDefinition> factory = luceneTokenFilters.get( factoryType.getName() ); if ( factory == null ) { throw log.unsupportedTokenFilterFactory( factoryType ); } Map<String, String> map = ParameterAnnotationsReader.toNewMutableMap( hibernateSearchDef.params() ); return factory.create( map ); }
private String registerTokenFilterDef(String analyzerDefinitionName, TokenFilterDef hibernateSearchDef) { String remoteName = hibernateSearchDef.name(); TokenFilterDefinition elasticsearchDefinition = translator.translate( hibernateSearchDef ); if ( remoteName.isEmpty() && !hasParameters( elasticsearchDefinition ) ) { // No parameters, and no specific name was provided => Use the builtin, default definition remoteName = elasticsearchDefinition.getType(); } else { if ( remoteName.isEmpty() ) { remoteName = analyzerDefinitionName + "_" + hibernateSearchDef.factory().getSimpleName(); } registry.register( remoteName, elasticsearchDefinition ); } return remoteName; }
@AnalyzerDef(name = "textAnalyzer", tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class) , filters = { @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = SnowballPorterFilterFactory.class, params = { @Parameter(name = "language", value = "English") }) }) public abstract class AbstractBookEntity {
private Analyzer buildAnalyzer(TokenizerFactory tokenizerFactory, CharFilterDef[] charFilterDefs, TokenFilterDef[] filterDefs) throws IOException { final int tokenFiltersLength = filterDefs.length; TokenFilterFactory[] filters = new TokenFilterFactory[tokenFiltersLength]; for ( int index = 0; index < tokenFiltersLength; index++ ) { TokenFilterDef filterDef = filterDefs[index]; filters[index] = buildAnalysisComponent( TokenFilterFactory.class, filterDef.factory(), filterDef.params() ); } final int charFiltersLength = charFilterDefs.length; CharFilterFactory[] charFilters = new CharFilterFactory[charFiltersLength]; for ( int index = 0; index < charFiltersLength; index++ ) { CharFilterDef charFilterDef = charFilterDefs[index]; charFilters[index] = buildAnalysisComponent( CharFilterFactory.class, charFilterDef.factory(), charFilterDef.params() ); } return new TokenizerChain( charFilters, tokenizerFactory, filters ); }
@Indexed @AnalyzerDef(name = "textAnalyzer", tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class) , filters = { @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = SnowballPorterFilterFactory.class, params = { @Parameter(name = "language", value = "English") }) }) public class BookEntity {
private Analyzer buildAnalyzer(TokenizerFactory tokenizerFactory, CharFilterDef[] charFilterDefs, TokenFilterDef[] filterDefs) throws IOException { final int tokenFiltersLength = filterDefs.length; TokenFilterFactory[] filters = new TokenFilterFactory[tokenFiltersLength]; for ( int index = 0; index < tokenFiltersLength; index++ ) { TokenFilterDef filterDef = filterDefs[index]; filters[index] = buildAnalysisComponent( TokenFilterFactory.class, filterDef.factory(), filterDef.params() ); } final int charFiltersLength = charFilterDefs.length; CharFilterFactory[] charFilters = new CharFilterFactory[charFiltersLength]; for ( int index = 0; index < charFiltersLength; index++ ) { CharFilterDef charFilterDef = charFilterDefs[index]; charFilters[index] = buildAnalysisComponent( CharFilterFactory.class, charFilterDef.factory(), charFilterDef.params() ); } return new TokenizerChain( charFilters, tokenizerFactory, filters ); }
), filters = { @TokenFilterDef( factory = ASCIIFoldingFilterFactory.class ), @TokenFilterDef( factory = LowerCaseFilterFactory.class
@TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = StopFilterFactory.class), @TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = { @Parameter(name = "minGramSize", value = "3"), @Parameter(name = "maxGramSize", value = "30") @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = StopFilterFactory.class), @TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = { @Parameter(name = "minGramSize", value = "4"), @Parameter(name = "maxGramSize", value = "8")
@TokenFilterDef(factory = StandardFilterFactory.class), @TokenFilterDef(factory = LowerCaseFilterFactory.class) @Parameter(name = "minGramSize", value = "1"), @Parameter(name = "maxGramSize", value = "1") }), filters = { @TokenFilterDef( factory = LowerCaseFilterFactory.class) }) })
@Entity @Indexed @Inheritance(strategy = TABLE_PER_CLASS) @AnalyzerDef( name = "customanalyzer", tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), filters = { @TokenFilterDef(factory = LowerCaseFilterFactory.class) }) public class SolrMember extends Member { /** Default value included to remove warning. Remove or modify at will. **/ private static final long serialVersionUID = 1L; }
tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), filters = { @TokenFilterDef(factory = TrimFilterFactory.class), @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = StopFilterFactory.class), @TokenFilterDef(factory = SnowballPorterFilterFactory.class, params = { @Parameter(name = "language", value = "English") }), @TokenFilterDef( factory = RemoveDuplicatesTokenFilterFactory.class) })
), filters = { @TokenFilterDef( factory = ASCIIFoldingFilterFactory.class ), @TokenFilterDef( factory = LowerCaseFilterFactory.class ), @TokenFilterDef( factory = StopFilterFactory.class ), @TokenFilterDef( factory = PorterStemFilterFactory.class
@TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = StopFilterFactory.class, params = { @Parameter(name = "words", @Parameter(name = "ignoreCase", value = "true") }), @TokenFilterDef(factory = StopFilterFactory.class, params = { @Parameter(name = "words", @TokenFilterDef(factory = SnowballPorterFilterFactory.class, params = @Parameter(name = "language", value = "Russian")) tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), filters = { @TokenFilterDef(factory = StandardFilterFactory.class), @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = StopFilterFactory.class, params = { @Parameter(name = "words", @Parameter(name = "ignoreCase", value = "true") }), @TokenFilterDef(factory = StopFilterFactory.class, params = {
tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), filters = { @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = SnowballPorterFilterFactory.class, params = { @Parameter(name = "language", value = "English") }) tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), filters = { @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = GermanStemFilterFactory.class) }) public class Article {
@TokenFilterDef(factory = StopFilterFactory.class) }) @TokenFilterDef(factory = LowerCaseFilterFactory.class)
tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), filters = { @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = SnowballPorterFilterFactory.class tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), filters = { @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = GermanStemFilterFactory.class) }) })