public Term apply(Ambiguity a) { Set<Production> productions = new HashSet<>(); for (Term t : a.items()) { if (t instanceof TermCons) { TermCons tc = (TermCons)t; productions.add(tc.production()); } else { return a; } } Set<Production> candidates = overloads.minimal(productions); Ambiguity result = Ambiguity.apply(a.items().stream().filter(t -> candidates.contains(((ProductionReference)t).production())).collect(Collectors.toSet())); if (result.items().size() == 1) { return result.items().iterator().next(); } return result; } }
public Term preferAvoid(Ambiguity amb) { List<Term> prefer = new ArrayList<>(); List<Term> avoid = new ArrayList<>(); for (Term t : amb.items()) { if (t instanceof ProductionReference) { if (((ProductionReference) t).production().att().contains("prefer")) { prefer.add(t); } else if (((ProductionReference) t).production().att().contains("avoid")) { avoid.add(t); } } } Term result = amb; if (!prefer.isEmpty()) { if (prefer.size() == 1) { result = prefer.get(0); } else { amb.replaceChildren(prefer); } } else if (!avoid.isEmpty()) { if (avoid.size() < amb.items().size()) { amb.items().removeAll(avoid); if (amb.items().size() == 1) result = amb.items().iterator().next(); } } return result; }
/** * Add to this function the mappings resulting from composing mappings in call with the mappings in exit. * * This is used when the child (a {@link NonTerminal}) of a {@link NonTerminalState} finishes parsing. * In that case 'call' is the Function for the {@link StateCall} for that {@link NonTerminalState} and * 'exit' is the Function for the {@link StateReturn} of the {@link ExitState} in the {@link NonTerminal}. * @param call The base function onto which 'exit' should be appended * @param exit The function to append on 'call' * @return 'true' iff the mapping in this function changed */ boolean addNTCall(Function call, final Function exit) { return addAux(call, set -> { Set<Term> result = new HashSet<>(); if (!exit.values.isEmpty()) { // if we found some, make an amb node and append them to the KList for (Term context : set) { if (exit.values.size() == 1) { result.add(((KList)context).add(exit.values.iterator().next())); } else { result.add(((KList) context).add(Ambiguity.apply(exit.values))); } } } return result; }); }
@Override public Term apply(Ambiguity a) { if (a.items().size() == 1) return apply(a.items().iterator().next()); Production prod = null; int arity = 0; for (Term t : a.items()) { if (!(t instanceof ProductionReference)) { return a; boolean sameAtIdx = true; Term sameTerm = null; for (Term t : a.items()) { TermCons tc = (TermCons)t; if (sameTerm == null) { TermCons first = (TermCons)a.items().iterator().next(); for (int i = 0; i < arity; i++) { final int idx = i; return apply(first.with(i, new Ambiguity(a.items().stream().map(t -> (TermCons)t).map(t -> t.get(idx)).collect(Collectors.toSet()))));
@Override public Term apply(Ambiguity node) { Set<Multimap<VarKey, Sort>> newVars = new HashSet<>(); for (Term t : node.items()) { CollectExpectedVariablesVisitor viz = new CollectExpectedVariablesVisitor(declaredNames); viz.apply(t); // create the split for (Multimap<VarKey, Sort> elem : vars) { // for every local type restrictions for (Multimap<VarKey, Sort> elem2 : viz.vars) { // create a combination with every ambiguity detected Multimap<VarKey, Sort> clone = HashMultimap.create(); clone.putAll(elem); clone.putAll(elem2); newVars.add(clone); } if (viz.vars.size() == 0) newVars.addAll(vars); } if (vars.size() == 0) newVars.addAll(viz.vars); } if (!newVars.isEmpty()) vars = newVars; return node; }
@Test public void testAmb2() throws Exception { Ambiguity two = Ambiguity.apply(foo, bar); assertCleanup(two, two); }
boolean equal = true; Tuple2<Either<Set<ParseFailedException>, Term>, Set<ParseFailedException>> candidate = null; for (Term t : amb.items()) { candidate = this.apply(t); K next = new TreeNodesToKORE(Outer::parseSort, strict).apply(new RemoveBracketVisitor().apply(candidate._1().right().get())); for (int i = 0; i < amb.items().size(); i++) { msg += "\n" + (i + 1) + ": "; Term elem = (Term) amb.items().toArray()[i]; if (elem instanceof ProductionReference) { ProductionReference tc = (ProductionReference) elem; new KException(ExceptionType.WARNING, KExceptionGroup.INNER_PARSER, msg, amb.items().iterator().next().source().get(), amb.items().iterator().next().location().get())); Tuple2<Either<Set<ParseFailedException>, Term>, Set<ParseFailedException>> rez = this.apply(amb.items().iterator().next()); return new Tuple2<>(Right.apply(rez._1().right().get()), Sets.union(Sets.newHashSet(w), rez._2()));
@Override public Either<java.util.Set<ParseFailedException>, Term> apply(Ambiguity amb) { // if the ambiguity has rewrites at the top, prefer them, and eliminate the rest scala.collection.Set<Term> rewrites = amb.items().stream().filter(o -> o instanceof TermCons && ((TermCons) o).production().klabel().isDefined() && ((TermCons) o).production().klabel().get().name().equals("#KRewrite")).collect(Collections.toSet()); if (rewrites.size() == 0 || rewrites.size() == amb.items().size()) return super.apply(amb); if (rewrites.size() == 1) return Right.apply(rewrites.head()); return super.apply(Ambiguity.apply(mutable(rewrites))); }
@Test public void testAmb() throws Exception { assertCleanup(Ambiguity.apply(foo), foo); }
Term loc = t; if (loc instanceof Ambiguity) { loc = ((Ambiguity)loc).items().iterator().next();
@Override public Either<java.util.Set<ParseFailedException>, Term> apply(Ambiguity amb) { // if the ambiguity has KSeq at the top, prefer them, and eliminate the rest scala.collection.Set<Term> rewrites = amb.items().stream().filter(o -> o instanceof TermCons && ((TermCons) o).production().klabel().isDefined() && ((TermCons) o).production().klabel().get().name().equals("#KSequence")).collect(Collections.toSet()); if (rewrites.size() == 0 || rewrites.size() == amb.items().size()) return super.apply(amb); if (rewrites.size() == 1) return Right.apply(rewrites.head()); return super.apply(Ambiguity.apply(mutable(rewrites))); }
@Test public void testKList() throws Exception { assertCleanup(Ambiguity.apply(KList.apply(ConsPStack.singleton(foo))), foo); }
@Override public Term apply(TermCons tc) { if (tc.production().sort().name().equals("RuleContent")) { Term t = new PushTopAmbiguityUp2().apply(tc.get(0)); if (t instanceof Ambiguity) { Ambiguity old = (Ambiguity)t; Set<Term> newTerms = new HashSet<>(); for (Term child : old.items()) { Term newTerm = tc.with(0, child); newTerms.add(newTerm); } return Ambiguity.apply(newTerms); } } return super.apply(tc); }
@Override public Term apply(TermCons tc) { if (tc.production().klabel().isDefined() && tc.production().klabel().get().equals(KLabels.KREWRITE)) { Term t = tc.get(0); if (t instanceof Ambiguity) { Ambiguity old = (Ambiguity)t; Set<Term> newTerms = new HashSet<>(); for (Term child : old.items()) { Term newTerm = tc.with(0, child); newTerms.add(newTerm); } return Ambiguity.apply(newTerms); } } return super.apply(tc); } }
for(StateReturn stateReturn : s.ntCalls.computeIfAbsent(new NonTerminalCall.Key(nt, position), NonTerminalCall.Key::create).exitStateReturns) { if (stateReturn.key.stateEnd == s.input.length) { resultSet.add(KList.apply(ConsPStack.singleton(Ambiguity.apply(stateReturn.function.values)))); Ambiguity result = Ambiguity.apply(resultSet); if(result.items().size() == 0) { ParseError perror = getErrors();