public K preprocess(K k) { if (options.noExpandMacros) { return k; } return expandMacros.expand(k); }
public ExpandMacros(Module mod, FileUtil files, KompileOptions kompileOptions, boolean reverse) { this.mod = mod; this.reverse = reverse; this.cover = kompileOptions.coverage; files.resolveKompiled(".").mkdirs(); macros = stream(mod.rules()).filter(r -> isMacro(r.att(), reverse)).sorted(Comparator.comparing(r -> r.att().contains("owise"))).collect(Collectors.groupingBy(r -> ((KApply)getLeft(r, reverse)).klabel())); if (cover) { try { FileOutputStream os = new FileOutputStream(files.resolveKompiled("coverage.txt"), true); channel = os.getChannel(); coverage = new PrintWriter(new BufferedWriter(new OutputStreamWriter(os))); } catch (IOException e) { throw KEMException.internalError("Could not write list of rules to coverage document.", e); } } else { channel = null; coverage = null; } }
private String unparseInternal(Module mod, K input, ColorSetting colorize) { ExpandMacros expandMacros = new ExpandMacros(mod, files, kompileOptions, true); return Formatter.format( new AddBrackets(mod).addBrackets((ProductionReference) ParseInModule.disambiguateForUnparse(mod, KOREToTreeNodes.apply(KOREToTreeNodes.up(mod, expandMacros.expand(input)), mod))), options.color(tty.stdout, files.getEnv())); }
public void initialize(DefinitionToOcaml serialized, CompiledDefinition def) { mainModule = serialized.mainModule; collectionFor = serialized.collectionFor; filteredMapConstructors = serialized.filteredMapConstructors; matchThreadSet = serialized.matchThreadSet; rewriteThreadSet = serialized.rewriteThreadSet; makeStuck = serialized.makeStuck; makeUnstuck = serialized.makeUnstuck; functions = serialized.functions; anywhereKLabels = serialized.anywhereKLabels; options = serialized.options; constants = serialized.constants; realStepFunctions = serialized.realStepFunctions; if (serialized.expandMacros == null) { serialized.expandMacros = new ExpandMacros(def.executionModule(), files, kompileOptions, false); } if (serialized.convertDataStructure == null) { serialized.convertDataStructure = new ConvertDataStructureToLookup(def.executionModule(), true); } expandMacros = serialized.expandMacros; convertDataStructure = serialized.convertDataStructure; threadCellExists = serialized.threadCellExists; if (serialized.exitCodePattern == null) { serialized.exitCodePattern = def.exitCodePattern; } exitCodePattern = serialized.exitCodePattern; sortHooks = userSortHooks; klabelsForEachPredicate = serialized.klabelsForEachPredicate; }
if (pattern.att().contains(Sort.class)) { Sort patternSort = pattern.att().get(Sort.class); if (sort(subject, r).stream().anyMatch(s -> mod.subsorts().lessThanEq(s, patternSort))) { subst.put((KVariable)pattern, subject); return true; match = match && match(subst, p.klist().items().get(i), s.klist().items().get(i), r);
private Set<Sort> sort(K k, Rule r) { if (k instanceof KVariable) { return Collections.singleton(k.att().get(Sort.class)); } else if (k instanceof KToken) { return Collections.singleton(((KToken)k).sort()); } else if (k instanceof KApply) { KApply kapp = (KApply)k; if (kapp.klabel() instanceof KVariable) { throw KEMException.compilerError("Cannot compute macros with klabel variables.", r); } Set<Production> prods = new HashSet<>(mutable(mod.productionsFor().apply(kapp.klabel()))); prods.removeIf(p -> p.arity() != kapp.items().size()); for (int i = 0; i < kapp.items().size(); i++) { final int idx = i; Set<Sort> sorts = sort(kapp.items().get(idx), r); prods.removeIf(p -> sorts.stream().noneMatch(s -> mod.subsorts().lessThanEq(s, p.nonterminal(idx).sort()))); } Set<Sort> candidates = prods.stream().map(Production::sort).collect(Collectors.toSet()); return candidates; } else { throw KEMException.compilerError("Cannot compute macros with sort check on terms that are not KApply, KToken, or KVariable.", r); } }
@Override public K search(K initialConfiguration, Optional<Integer> depth, Optional<Integer> bound, Rule pattern, SearchType searchType) { rewritingContext.stateLog.open("search-" + Integer.toString(Math.abs(initialConfiguration.hashCode()))); TermContext termContext = TermContext.builder(rewritingContext).freshCounter(initCounterValue).build(); KOREtoBackendKIL converter = new KOREtoBackendKIL(module, definition, termContext.global(), false); ResolveSemanticCasts resolveCasts = new ResolveSemanticCasts(true); ExpandMacros macroExpander = new ExpandMacros(module, files, kompileOptions, false); termContext.setKOREtoBackendKILConverter(converter); Term javaTerm = converter.convert(macroExpander.expand(resolveCasts.resolve(initialConfiguration))).evaluate(termContext); org.kframework.backend.java.kil.Rule javaPattern = converter.convert(Optional.empty(), transformFunction(JavaBackend::convertKSeqToKApply, pattern)); SymbolicRewriter rewriter = new SymbolicRewriter(rewritingContext, transitions, converter); K result = rewriter.search(javaTerm, javaPattern, bound.orElse(NEGATIVE_VALUE), depth.orElse(NEGATIVE_VALUE), searchType, termContext); rewritingContext.stateLog.close(); return result; }
ModuleTransformer convertLookups = ModuleTransformer.fromSentenceTransformer(convertDataStructure::convert, "convert data structures to lookups"); ModuleTransformer liftToKSequence = ModuleTransformer.fromSentenceTransformer(new LiftToKSequence()::lift, "lift K into KSequence"); this.expandMacros = new ExpandMacros(def.executionModule(), files, kompileOptions, false); ModuleTransformer expandMacros = ModuleTransformer.fromSentenceTransformer(this.expandMacros::expand, "expand macro rules"); ModuleTransformer deconstructInts = ModuleTransformer.fromSentenceTransformer(new DeconstructIntegerAndFloatLiterals()::convert, "remove matches on integer literals in left hand side");
@Override public RewriterResult execute(K k, Optional<Integer> depth) { rewritingContext.stateLog.open("execute-" + Integer.toString(Math.abs(k.hashCode()))); TermContext termContext = TermContext.builder(rewritingContext).freshCounter(initCounterValue).build(); KOREtoBackendKIL converter = new KOREtoBackendKIL(module, definition, termContext.global(), false); ResolveSemanticCasts resolveCasts = new ResolveSemanticCasts(true); ExpandMacros macroExpander = new ExpandMacros(module, files, kompileOptions, false); termContext.setKOREtoBackendKILConverter(converter); Term backendKil = converter.convert(macroExpander.expand(resolveCasts.resolve(k))).evaluate(termContext); SymbolicRewriter rewriter = new SymbolicRewriter(rewritingContext, transitions, converter); RewriterResult result = rewriter.rewrite(new ConstrainedTerm(backendKil, termContext), depth.orElse(-1)); rewritingContext.stateLog.close(); return result; }
private void ocamlTermInput(KRun.InitialConfiguration k, StringBuilder sb) { sb.append("let input = Lexer.parse_k_binary_string\n"); K config = k.theConfig; k.theConfig = null; config = expandMacros.expand(config); byte[] binary = ToBinary.apply(config); config = null; String str = enquoteString(binary); binary = null; sb.append(str); str = null; sb.append("\n"); }
parsed = new ExpandMacros(compiledMod, files, def.kompileOptions, false).expand(parsed);
@Override public Function<Module, Module> specificationSteps(Definition def) { BiFunction<Module, K, K> convertCellCollections = (Module m, K k) -> new ConvertDataStructureToLookup(m, false).convert(k); return m -> ModuleTransformer.fromSentenceTransformer(new ResolveAnonVar()::resolve, "resolve anonymous varaibles") .andThen(ModuleTransformer.fromSentenceTransformer(s -> new ResolveSemanticCasts(kompileOptions.backend.equals(Backends.JAVA)).resolve(s), "resolve semantic casts")) .andThen(AddImplicitComputationCell::transformModule) .andThen(ConcretizeCells::transformModule) .andThen(ModuleTransformer.fromRuleBodyTransformer(RewriteToTop::bubbleRewriteToTopInsideCells, "bubble out rewrites below cells")) .andThen(AddBottomSortForListsWithIdenticalLabels.singleton()) .andThen(ModuleTransformer.fromSentenceTransformer((mod, s) -> new ExpandMacros(mod, files, kompileOptions, false).expand(s), "expand macros")) .andThen(ModuleTransformer.fromKTransformerWithModuleInfo(convertCellCollections::apply, "convert cell to the underlying collections")) .andThen(ModuleTransformer.fromRuleBodyTransformer(JavaBackend::ADTKVariableToSortedVariable, "ADT.KVariable to SortedVariable")) .andThen(ModuleTransformer.fromRuleBodyTransformer(JavaBackend::convertKSeqToKApply, "kseq to kapply")) .andThen(ModuleTransformer.fromRuleBodyTransformer(NormalizeKSeq.self(), "normalize kseq")) .andThen(mod -> JavaBackend.markRegularRules(def, mod)) .andThen(ModuleTransformer.fromSentenceTransformer(new AddConfigurationRecoveryFlags()::apply, "add refers_THIS_CONFIGURATION_marker")) .apply(m); }
/** * @param the generic {@link Kompile} * @return the special steps for the Java backend */ @Override public Function<Definition, Definition> steps() { DefinitionTransformer convertDataStructureToLookup = DefinitionTransformer.fromSentenceTransformer((m, s) -> new ConvertDataStructureToLookup(m, false).convert(s), "convert data structures to lookups"); return d -> DefinitionTransformer.fromRuleBodyTransformer(RewriteToTop::bubbleRewriteToTopInsideCells, "bubble out rewrites below cells") .andThen(DefinitionTransformer.fromSentenceTransformer(JavaBackend::convertListItemToNonFunction, "remove function attribute from ListItem production")) .andThen(DefinitionTransformer.fromSentenceTransformer(new NormalizeAssoc(KORE.c()), "normalize assoc")) .andThen(DefinitionTransformer.from(AddBottomSortForListsWithIdenticalLabels.singleton(), "add bottom sorts for lists")) .andThen(DefinitionTransformer.fromSentenceTransformer((m, s) -> new ExpandMacros(m, files, kompileOptions, false).expand(s), "expand macros")) .andThen(DefinitionTransformer.fromSentenceTransformer(new NormalizeAssoc(KORE.c()), "normalize assoc")) .andThen(convertDataStructureToLookup) .andThen(DefinitionTransformer.fromRuleBodyTransformer(JavaBackend::ADTKVariableToSortedVariable, "ADT.KVariable to SortedVariable")) .andThen(DefinitionTransformer.fromRuleBodyTransformer(JavaBackend::convertKSeqToKApply, "kseq to kapply")) .andThen(DefinitionTransformer.fromRuleBodyTransformer(NormalizeKSeq.self()::apply, "normalize kseq")) .andThen(JavaBackend::markRegularRules) .andThen(DefinitionTransformer.fromSentenceTransformer(new AddConfigurationRecoveryFlags(), "add refers_THIS_CONFIGURATION_marker")) .andThen(DefinitionTransformer.fromSentenceTransformer(JavaBackend::markSingleVariables, "mark single variables")) .andThen(DefinitionTransformer.from(new AssocCommToAssoc(), "convert AC matching to A matching")) .andThen(DefinitionTransformer.from(new MergeRules(), "merge rules into one rule with or clauses")) .apply(Kompile.defaultSteps(kompileOptions, kem, files).apply(d)); // .andThen(KoreToMiniToKore::apply) // for serialization/deserialization test }
DefinitionTransformer generateSortPredicateSyntax = DefinitionTransformer.from(new GenerateSortPredicateSyntax()::gen, "adding sort predicate productions"); DefinitionTransformer subsortKItem = DefinitionTransformer.from(Kompile::subsortKItem, "subsort all sorts to KItem"); DefinitionTransformer expandMacros = DefinitionTransformer.fromSentenceTransformer((m, s) -> new ExpandMacros(m, files, kompileOptions, false).expand(s), "expand macros"); Function1<Definition, Definition> resolveFreshConstants = d -> DefinitionTransformer.from(new ResolveFreshConstants(d, true)::resolve, "resolving !Var variables").apply(d); DefinitionTransformer generatedTopFormat = DefinitionTransformer.from(GeneratedTopFormat::resolve, "setting generatedTop format attribute");