lab0: do { if (!r_mark_regions()) lab3: do { if (!r_perfective_gerund()) lab4: do { if (!r_reflexive()) lab6: do { if (!r_adjectival()) if (!r_verb()) if (!r_noun()) if (!(eq_s_b(1, "\u0438"))) slice_del(); } while (false); if (!r_derivational()) if (!r_tidy_up())
among_var = find_among_b(a_4, 46); if (among_var == 0) lab1: do { if (!(eq_s_b(1, "\u0430"))) cursor = limit - v_1; if (!(eq_s_b(1, "\u044F"))) slice_del(); break; case 2: slice_del(); break;
if (!(in_grouping(g_v, 1072, 1103))) if (!(out_grouping(g_v, 1072, 1103))) if (!(in_grouping(g_v, 1072, 1103))) if (!(out_grouping(g_v, 1072, 1103)))
private boolean r_reflexive() { int among_var; // (, line 128 // [, line 129 ket = cursor; // substring, line 129 among_var = find_among_b(a_3, 2); if (among_var == 0) { return false; } // ], line 129 bra = cursor; switch(among_var) { case 0: return false; case 1: // (, line 132 // delete, line 132 slice_del(); break; } return true; }
private boolean r_derivational() { int among_var; // (, line 175 // [, line 176 ket = cursor; // substring, line 176 among_var = find_among_b(a_6, 2); if (among_var == 0) { return false; } // ], line 176 bra = cursor; // call R2, line 176 if (!r_R2()) { return false; } switch(among_var) { case 0: return false; case 1: // (, line 179 // delete, line 179 slice_del(); break; } return true; }
/** * Creates * {@link org.apache.lucene.analysis.Analyzer.TokenStreamComponents} * used to tokenize all the text in the provided {@link Reader}. * * @return {@link org.apache.lucene.analysis.Analyzer.TokenStreamComponents} * built from a {@link StandardTokenizer} filtered with * {@link LowerCaseFilter}, {@link StopFilter} * , {@link SetKeywordMarkerFilter} if a stem exclusion set is * provided, and {@link SnowballFilter} */ @Override protected TokenStreamComponents createComponents(String fieldName) { final Tokenizer source = new StandardTokenizer(); TokenStream result = new LowerCaseFilter(source); result = new StopFilter(result, stopwords); if (!stemExclusionSet.isEmpty()) result = new SetKeywordMarkerFilter(result, stemExclusionSet); result = new SnowballFilter(result, new org.tartarus.snowball.ext.RussianStemmer()); return new TokenStreamComponents(source, result); }
if (!r_adjective()) among_var = find_among_b(a_2, 8); if (among_var == 0) lab2: do { if (!(eq_s_b(1, "\u0430"))) cursor = limit - v_2; if (!(eq_s_b(1, "\u044F"))) slice_del(); break; case 2: slice_del(); break;
public RussianSnowballStemmer() { super(new RussianStemmer()); } public Language getLanguage() { return Language.RUSSIAN; }
if (!r_adjective()) among_var = find_among_b(a_2, 8); if (among_var == 0) lab2: do { if (!(eq_s_b(1, "\u0430"))) cursor = limit - v_2; if (!(eq_s_b(1, "\u044F"))) slice_del(); break; case 2: slice_del(); break;
private boolean r_noun() { int among_var; // (, line 159 // [, line 160 ket = cursor; // substring, line 160 among_var = find_among_b(a_5, 36); if (among_var == 0) { return false; } // ], line 160 bra = cursor; switch(among_var) { case 0: return false; case 1: // (, line 167 // delete, line 167 slice_del(); break; } return true; }
private boolean r_derivational() { int among_var; // (, line 175 // [, line 176 ket = cursor; // substring, line 176 among_var = find_among_b(a_6, 2); if (among_var == 0) { return false; } // ], line 176 bra = cursor; // call R2, line 176 if (!r_R2()) { return false; } switch(among_var) { case 0: return false; case 1: // (, line 179 // delete, line 179 slice_del(); break; } return true; }
lab0: do { if (!r_mark_regions()) lab3: do { if (!r_perfective_gerund()) lab4: do { if (!r_reflexive()) lab6: do { if (!r_adjectival()) if (!r_verb()) if (!r_noun()) if (!(eq_s_b(1, "\u0438"))) slice_del(); } while (false); if (!r_derivational()) if (!r_tidy_up())
@Override public TokenStream apply(final TokenStream input) { return new SnowballFilter(input, new RussianStemmer()); } };
among_var = find_among_b(a_0, 9); if (among_var == 0) lab1: do { if (!(eq_s_b(1, "\u0430"))) cursor = limit - v_1; if (!(eq_s_b(1, "\u044F"))) slice_del(); break; case 2: slice_del(); break;
if (!r_adjective()) among_var = find_among_b(a_2, 8); if (among_var == 0) lab2: do { if (!(eq_s_b(1, "\u0430"))) cursor = limit - v_2; if (!(eq_s_b(1, "\u044F"))) slice_del(); break; case 2: slice_del(); break;
private boolean r_adjective() { int among_var; // (, line 87 // [, line 88 ket = cursor; // substring, line 88 among_var = find_among_b(a_1, 26); if (among_var == 0) { return false; } // ], line 88 bra = cursor; switch(among_var) { case 0: return false; case 1: // (, line 97 // delete, line 97 slice_del(); break; } return true; }
private boolean r_derivational() { int among_var; // (, line 175 // [, line 176 ket = cursor; // substring, line 176 among_var = find_among_b(a_6, 2); if (among_var == 0) { return false; } // ], line 176 bra = cursor; // call R2, line 176 if (!r_R2()) { return false; } switch(among_var) { case 0: return false; case 1: // (, line 179 // delete, line 179 slice_del(); break; } return true; }
if (!(in_grouping(g_v, 1072, 1103))) if (!(out_grouping(g_v, 1072, 1103))) if (!(in_grouping(g_v, 1072, 1103))) if (!(out_grouping(g_v, 1072, 1103)))
lab0: do { if (!r_mark_regions()) lab3: do { if (!r_perfective_gerund()) lab4: do { if (!r_reflexive()) lab6: do { if (!r_adjectival()) if (!r_verb()) if (!r_noun()) if (!(eq_s_b(1, "\u0438"))) slice_del(); } while (false); if (!r_derivational()) if (!r_tidy_up())
static public TokenStream russian(TokenStream result) { result = new LowerCaseFilter(result); result = new SnowballFilter(result, new org.tartarus.snowball.ext.RussianStemmer()); return result; }