diff --git a/CHANGES.md b/CHANGES.md index 0bf4aa14e..c6559c772 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -3,6 +3,16 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/), and this project adheres to [Semantic Versioning](https://semver.org/). +## Unreleased + +### Added + +- Add tactic `set`. + +### Fixed + +- Induction tactic. + ## 2.5.1 (2024-07-22) ### Added diff --git a/doc/Makefile.bnf b/doc/Makefile.bnf index e9e7d6a91..5c1bc9b62 100644 --- a/doc/Makefile.bnf +++ b/doc/Makefile.bnf @@ -59,6 +59,7 @@ lambdapi.bnf: Makefile.bnf ../src/parsing/lpParser.mly -e 's/REQUIRE/"require"/g' \ -e 's/RESOLVE/"resolve"/g' \ -e 's/REWRITE/"rewrite"/g' \ + -e 's/SET/"set"/g' \ -e 's/UNIF_RULE/"unif_rule"/g' \ -e 's/RULE/"rule"/g' \ -e 's/SIDE//g' \ diff --git a/doc/lambdapi.bnf b/doc/lambdapi.bnf index 3f794d123..546461b7a 100644 --- a/doc/lambdapi.bnf +++ b/doc/lambdapi.bnf @@ -146,6 +146,7 @@ QID ::= [UID "."]+ UID | "fail" | "generalize" | "have" ":" + | "set" "≔" | "induction" | "refine" | "reflexivity" diff --git a/doc/tactics.rst b/doc/tactics.rst index 021502b3d..3d02ab6cb 100644 --- a/doc/tactics.rst +++ b/doc/tactics.rst @@ -133,6 +133,13 @@ unification algorithms will then try to instantiate some of the metavariables. The new metavariables that cannot be solved are added as new goals. +.. _set: + +``set`` +------- + +``set x ≔ t`` extends the current context with ``x ≔ t``. + .. _simplify: ``simplify`` diff --git a/editors/emacs/lambdapi-smie.el b/editors/emacs/lambdapi-smie.el index 0802c6d5e..cd273e87b 100644 --- a/editors/emacs/lambdapi-smie.el +++ b/editors/emacs/lambdapi-smie.el @@ -20,6 +20,7 @@ "reflexivity" "remove" "rewrite" + "set" "simplify" "solve" "symmetry" @@ -98,12 +99,13 @@ Indent by `lambdapi-indent-basic' in proofs, and 0 otherwise." ("fail") ("focus" ident) ("generalize" ident) - ("have" sterm) + ("have" ident ":" sterm) ("induction") ("refine" sterm) ("reflexivity") ("remove" ident) ("rewrite" "[" rw-patt "]") + ("set" ident "≔" sterm) ("simplify") ("simplify" ident) ("solve") @@ -197,6 +199,7 @@ The default lexer is used because the syntax is primarily made of sexps." (`(:before . "reflexivity") `(column . ,lambdapi-indent-basic)) (`(:before . "remove") `(column . ,lambdapi-indent-basic)) (`(:before . "rewrite") `(column . ,lambdapi-indent-basic)) + (`(:before . "set") `(column . ,lambdapi-indent-basic)) (`(:before . "simplify") `(column . ,lambdapi-indent-basic)) (`(:before . "solve") `(column . ,lambdapi-indent-basic)) (`(:before . "symmetry") `(column . ,lambdapi-indent-basic)) @@ -222,7 +225,7 @@ The default lexer is used because the syntax is primarily made of sexps." (`(:after . ,(or "symbol" "inductive")) lambdapi-indent-basic) (`(:after . ,(or "apply" "assume" "fail" "focus" "generalize" "have" "induction" "refine" "reflexivity" "remove" "rewrite" - "simplify" "solve" "symmetry" "why3")) + "set" "simplify" "solve" "symmetry" "why3")) lambdapi-indent-basic) ;; Toplevel diff --git a/editors/emacs/lambdapi-vars.el b/editors/emacs/lambdapi-vars.el index b51bd6810..25c542276 100644 --- a/editors/emacs/lambdapi-vars.el +++ b/editors/emacs/lambdapi-vars.el @@ -15,6 +15,7 @@ "reflexivity" "remove" "rewrite" + "set" "simplify" "solve" "symmetry" diff --git a/editors/vim/syntax/lambdapi.vim b/editors/vim/syntax/lambdapi.vim index 4fe2371e4..719e4157b 100644 --- a/editors/vim/syntax/lambdapi.vim +++ b/editors/vim/syntax/lambdapi.vim @@ -49,8 +49,8 @@ syntax keyword KeywordOK contained end syntax keyword KeywordOK contained fail syntax keyword KeywordOK contained flag syntax keyword KeywordOK contained focus -syntax keyword KeywordOK contained have syntax keyword KeywordOK contained generalize +syntax keyword KeywordOK contained have syntax keyword KeywordOK contained in syntax keyword KeywordOK contained induction syntax keyword KeywordOK contained inductive @@ -82,6 +82,7 @@ syntax keyword KeywordOK contained rule syntax keyword KeywordOK contained search syntax keyword KeywordOK contained search-query syntax keyword KeywordOK contained sequential +syntax keyword KeywordOK contained set syntax keyword KeywordOK contained simplify syntax keyword KeywordOK contained solve syntax keyword KeywordOK contained symbol @@ -116,8 +117,8 @@ syntax keyword KeywordKO contained end syntax keyword KeywordKO contained fail syntax keyword KeywordKO contained flag syntax keyword KeywordKO contained focus -syntax keyword KeywordKO contained have syntax keyword KeywordKO contained generalize +syntax keyword KeywordKO contained have syntax keyword KeywordKO contained in syntax keyword KeywordKO contained induction syntax keyword KeywordKO contained inductive @@ -149,6 +150,7 @@ syntax keyword KeywordKO contained rule syntax keyword KeywordKO contained search syntax keyword KeywordKO contained search-query syntax keyword KeywordKO contained sequential +syntax keyword KeywordKO contained set syntax keyword KeywordKO contained simplify syntax keyword KeywordKO contained solve syntax keyword KeywordKO contained symbol diff --git a/editors/vscode/syntaxes/lp.tmLanguage.json b/editors/vscode/syntaxes/lp.tmLanguage.json index 2ad60a20e..5a0a9eefd 100644 --- a/editors/vscode/syntaxes/lp.tmLanguage.json +++ b/editors/vscode/syntaxes/lp.tmLanguage.json @@ -25,7 +25,7 @@ ], "repository": { "commands": { - "match": "\\b(abort|admit|admitted|apply|as|assert|assertnot|associative|assume|begin|builtin|coerce_rule|commutative|compute|constant|debug|end|fail|flag|focus|generalize|have|in|induction|inductive|infix|injective|left|let|notation|off|on|open|opaque|postfix|prefix|print|private|proofterm|protected|prover|prover_timeout|quantifier|refine|reflexivity|remove|require|rewrite|rule|search|sequential|simplify|symbol|symmetry|try|type|TYPE|unif_rule|why3|with)\\b", + "match": "\\b(abort|admit|admitted|apply|as|assert|assertnot|associative|assume|begin|builtin|coerce_rule|commutative|compute|constant|debug|end|fail|flag|focus|generalize|have|in|induction|inductive|infix|injective|left|let|notation|off|on|open|opaque|postfix|prefix|print|private|proofterm|protected|prover|prover_timeout|quantifier|refine|reflexivity|remove|require|rewrite|rule|search|sequential|set|simplify|symbol|symmetry|try|type|TYPE|unif_rule|why3|with)\\b", "name": "keyword.control.lp" }, "comments": { @@ -44,7 +44,7 @@ }, "tactics": { - "match": "\\b(apply|assume|fail|focus|generalize|have|induction|refine|reflexivity|remove|rewrite|simplify|solve|symmetry|try|why3)\\b", + "match": "\\b(apply|assume|fail|focus|generalize|have|induction|refine|reflexivity|remove|rewrite|set|simplify|solve|symmetry|try|why3)\\b", "name": "keyword.control.tactics.lp" }, diff --git a/misc/lambdapi.tex b/misc/lambdapi.tex index ef83d82b9..09e2eb808 100644 --- a/misc/lambdapi.tex +++ b/misc/lambdapi.tex @@ -9,7 +9,7 @@ tabsize=2, basicstyle={\ttfamily\small\upshape}, backgroundcolor=\color{lightgrey}, - keywords={abort,admit,admitted,apply,as,assert,assertnot,associative,assume,begin,builtin,commutative,compute,constant,debug,end,fail,flag,focus,generalize,have,in,induction,inductive,infix,injective,left,let,notation,off,on,opaque,open,prefix,print,private,proofterm,protected,prover,prover_timeout,quantifier,refine,reflexivity,require,rewrite,right,rule,sequential,simplify,solve,symbol,symmetry,type,TYPE,unif_rule,verbose,why3,with}, + keywords={abort,admit,admitted,apply,as,assert,assertnot,associative,assume,begin,builtin,commutative,compute,constant,debug,end,fail,flag,focus,generalize,have,in,induction,inductive,infix,injective,left,let,notation,off,on,opaque,open,prefix,print,private,proofterm,protected,prover,prover_timeout,quantifier,refine,reflexivity,require,rewrite,right,rule,sequential,set,simplify,solve,symbol,symmetry,type,TYPE,unif_rule,verbose,why3,with}, sensitive=true, keywordstyle=\color{blue}, morecomment=[l]{//}, diff --git a/src/core/ctxt.ml b/src/core/ctxt.ml index e21d620ce..8e936b26d 100644 --- a/src/core/ctxt.ml +++ b/src/core/ctxt.ml @@ -28,7 +28,7 @@ let to_prod : ctxt -> term -> term * int = fun ctx t -> let b = Bindlib.bind_var x t in match v with | None -> _Prod (lift a) b, c + 1 - | Some v -> _LLet (lift a) (lift v) b, c + | Some v -> _LLet (lift a) (lift v) b, c + 1 in let t, c = List.fold_left f (lift t, 0) ctx in Bindlib.unbox t, c @@ -36,16 +36,17 @@ let to_prod : ctxt -> term -> term * int = fun ctx t -> (** [to_prod_box bctx t] is similar to [to_prod bctx t] but operates on boxed contexts and terms. *) let to_prod_box : bctxt -> tbox -> tbox * int = fun bctx t -> - let f (t, c) (x, a) = + let f (t,c) (x,a,v) = let b = Bindlib.bind_var x t in - (_Prod a b, c + 1) + match v with + | None -> _Prod a b, c + 1 + | Some v -> _LLet a v b, c + 1 in List.fold_left f (t, 0) bctx (** [box_context ctx] lifts context [ctx] to a boxed context. *) let box_context : ctxt -> bctxt = - List.filter_map - (fun (x, t, u) -> if u = None then Some (x, lift t) else None) + List.map (fun (x,t,u) -> (x,lift t,Option.map lift u)) (** [to_abst ctx t] builds a sequence of abstractions over the context [ctx], in the term [t]. *) diff --git a/src/core/env.ml b/src/core/env.ml index b16facf2e..73c68dd07 100644 --- a/src/core/env.ml +++ b/src/core/env.ml @@ -20,7 +20,7 @@ let empty : env = [] (** [add v a t env] extends the environment [env] by mapping the string [Bindlib.name_of v] to [(v,a,t)]. *) let add : tvar -> tbox -> tbox option -> env -> env = fun v a t env -> - (Bindlib.name_of v, (v, a, t)) :: env + (Bindlib.name_of v, (v,a,t)) :: env (** [find n env] returns the Bindlib variable associated to the variable name [n] in the environment [env]. If none is found, [Not_found] is raised. *) @@ -30,16 +30,15 @@ let find : string -> env -> tvar = fun n env -> (** [mem n env] returns [true] iff [n] is mapped to a variable in [env]. *) let mem : string -> env -> bool = List.mem_assoc -(** [to_prod env t] builds a sequence of products / let-bindings whose domains - are the variables of the environment [env] (from left to right), and whose - body is the term [t]. By calling [to_prod [(xn,an,None);⋯;(x1,a1,None)] t] - you obtain a term of the form [Πx1:a1,..,Πxn:an,t]. *) +(** [to_prod env t] builds a sequence of products/lets whose domains are the + variables of the environment [env] (from left to right), and whose body is + the term [t]. By calling [to_prod [(xn,an,None);⋯;(x1,a1,None)] t] you + obtain a term of the form [Πx1:a1,..,Πxn:an,t]. *) let to_prod_box : env -> tbox -> tbox = fun env t -> - let add_prod t (_,(x,a,u)) = - let b = Bindlib.bind_var x t in - match u with - | Some u -> _LLet a u b - | None -> _Prod a b + let add_prod t (_,(x,a,d)) = + match d with + | None -> _Prod a (Bindlib.bind_var x t) + | Some d -> _LLet a d (Bindlib.bind_var x t) in List.fold_left add_prod t env @@ -61,24 +60,21 @@ let to_abst : env -> tbox -> term = fun env t -> in Bindlib.unbox (List.fold_left add_abst t env) -(** [vars env] extracts the array of the {e not defined} Bindlib variables in - [env]. Note that the order is reversed: [vars [(xn,an);..;(x1,a1)] = - [|x1;..;xn|]]. *) +(** [vars env] extracts the array of the Bindlib variables in [env]. Note that + the order is reversed: [vars [(xn,an);..;(x1,a1)] = [|x1;..;xn|]]. *) let vars : env -> tvar array = fun env -> - let f (_, (x, _, u)) = if u = None then Some(x) else None in - Array.of_list (List.filter_rev_map f env) + Array.of_list (List.rev_map (fun (_,(x,_,_)) -> x) env) (** [appl t env] applies [t] to the variables of [env]. *) let appl : tbox -> env -> tbox = fun t env -> List.fold_right (fun (_,(x,_,_)) t -> _Appl t (_Vari x)) env t -(** [to_tbox env] extracts the array of the {e not defined} variables in [env] - and injects them in the [tbox] type. This is the same as [Array.map _Vari - (vars env)]. Note that the order is reversed: [to_tbox [(xn,an);..;(x1,a1)] - = [|x1;..;xn|]]. *) +(** [to_tbox env] extracts the array of the variables in [env] and injects + them in [tbox]. This is the same as [Array.map _Vari (vars env)]. Note + that the order is reversed: [to_tbox [(xn,an);..;(x1,a1)] = + [|x1;..;xn|]]. *) let to_tbox : env -> tbox array = fun env -> - let f (_, (x, _, u)) = if u = None then Some(_Vari x) else None in - Array.of_list (List.filter_rev_map f env) + Array.of_list (List.rev_map (fun (_,(x,_,_)) -> _Vari x) env) (** [to_ctxt e] converts an environment into a context. *) let to_ctxt : env -> ctxt = @@ -88,12 +84,14 @@ let to_ctxt : env -> ctxt = (** [match_prod c t f] returns [f a b] if [t] matches [Prod(a,b)] possibly after reduction. @raise [Invalid_argument] if [t] is not a product. *) -let match_prod : ctxt -> term -> (term -> tbinder -> 'a) -> 'a = fun c t f -> +let match_prod : ctxt -> term -> (term -> term option -> tbinder -> 'a) -> 'a + = fun c t f -> match unfold t with - | Prod(a,b) -> f a b + | Prod(a,b) -> f a None b + | LLet(a,d,b) -> f a (Some d) b | _ -> match Eval.whnf c t with - | Prod(a,b) -> f a b + | Prod(a,b) -> f a None b | _ -> invalid_arg __LOC__ (** [of_prod c s t] returns a tuple [(env,b)] where [b] is constructed from @@ -105,10 +103,11 @@ let match_prod : ctxt -> term -> (term -> tbinder -> 'a) -> 'a = fun c t f -> let of_prod : ctxt -> string -> term -> env * term = fun c s t -> let i = Stdlib.ref (-1) in let rec build_env env t = - try match_prod c t (fun a b -> + try match_prod c t (fun a d b -> let name = Stdlib.(incr i; s ^ string_of_int !i) in let x, b = LibTerm.unbind_name name b in - build_env (add x (lift a) None env) b) + build_env (add x (lift a) (Option.map lift d) env) b + ) with Invalid_argument _ -> env, t in build_env [] t @@ -123,9 +122,10 @@ let of_prod : ctxt -> string -> term -> env * term = fun c s t -> let of_prod_nth : ctxt -> int -> term -> env * term = fun c n t -> let rec build_env i env t = if i >= n then env, t - else match_prod c t (fun a b -> + else match_prod c t (fun a d b -> let x, b = Bindlib.unbind b in - build_env (i+1) (add x (lift a) None env) b) + build_env (i+1) (add x (lift a) (Option.map lift d) env) b + ) in build_env 0 [] t (** [of_prod_using c xs t] is similar to [of_prod s c n t] where [n = @@ -137,7 +137,8 @@ let of_prod_using : ctxt -> tvar array -> term -> env * term = fun c xs t -> let n = Array.length xs in let rec build_env i env t = if i >= n then env, t - else match_prod c t (fun a b -> - let env = add xs.(i) (lift a) None env in - build_env (i+1) env (Bindlib.subst b (mk_Vari(xs.(i))))) + else match_prod c t (fun a d b -> + let env = add xs.(i) (lift a) (Option.map lift d) env in + build_env (i+1) env (Bindlib.subst b (mk_Vari(xs.(i)))) + ) in build_env 0 [] t diff --git a/src/core/eval.ml b/src/core/eval.ml index 13b9f808b..8872d2777 100644 --- a/src/core/eval.ml +++ b/src/core/eval.ml @@ -122,9 +122,12 @@ let eq_modulo : (config -> term -> term) -> config -> term -> term -> bool = | [] -> () | (a,b)::l -> if Logger.log_enabled () then log_conv "eq: %a ≡ %a" term a term b; - let a = Config.unfold cfg a and b = Config.unfold cfg b in if a == b then eq cfg l else match a, b with + | Vari x, Vari y when Bindlib.eq_vars x y -> eq cfg l + | _ -> + let a = Config.unfold cfg a and b = Config.unfold cfg b in + match a, b with | LLet(_,t,u), _ -> let x,u = Bindlib.unbind u in eq {cfg with varmap = VarMap.add x t cfg.varmap} ((u,b)::l) @@ -526,20 +529,21 @@ let whnf : reducer = fun ?tags c t -> let whnf = time_reducer whnf -(** [simplify t] computes a beta whnf of [t] belonging to the set S such that: -- terms of S are in beta whnf normal format -- if [t] is a product, then both its domain and codomain are in S. *) -let rec simplify : term -> term = fun t -> +(** [simplify c t] computes a beta whnf of [t] in context [c] belonging to the + set S such that (1) terms of S are in beta whnf normal format, (2) if [t] + is a product, then both its domain and codomain are in S. *) +let simplify : ctxt -> term -> term = fun c -> let tags = [`NoRw; `NoExpand ] in - match get_args (whnf ~tags [] t) with - | Prod(a,b), _ -> - let x, b = Bindlib.unbind b in - mk_Prod (simplify a, bind x lift (simplify b)) - | h, ts -> add_args_map h (whnf ~tags []) ts + let rec simp t = + match get_args (whnf ~tags c t) with + | Prod(a,b), _ -> + let x, b = Bindlib.unbind b in mk_Prod (simp a, bind x lift (simp b)) + | h, ts -> add_args_map h (whnf ~tags c) ts + in simp let simplify = - let open Stdlib in let r = ref mk_Kind in fun t -> - Debug.(record_time Rewriting (fun () -> r := simplify t)); !r + let open Stdlib in let r = ref mk_Kind in fun c t -> + Debug.(record_time Rewriting (fun () -> r := simplify c t)); !r (** If [s] is a non-opaque symbol having a definition, [unfold_sym s t] replaces in [t] all the occurrences of [s] by its definition. *) diff --git a/src/core/eval.mli b/src/core/eval.mli index e942789e6..1220ce9de 100644 --- a/src/core/eval.mli +++ b/src/core/eval.mli @@ -65,10 +65,10 @@ val snf : ?dtree:(sym -> dtree) -> ?tags:rw_tag list -> ctxt -> term -> term context [c]. *) val hnf : ?tags:rw_tag list -> ctxt -> term -> term -(** [simplify t] computes a beta whnf of [t] belonging to the set S such that: - - terms of S are in beta whnf normal format - - if [t] is a product, then both its domain and codomain are in S. *) -val simplify : term -> term +(** [simplify c t] computes a beta whnf of [t] in context [c] belonging to the + set S such that (1) terms of S are in beta whnf normal format, (2) if [t] + is a product, then both its domain and codomain are in S. *) +val simplify : ctxt -> term -> term (** If [s] is a non-opaque symbol having a definition, [unfold_sym s t] replaces in [t] all the occurrences of [s] by its definition. *) diff --git a/src/core/infer.ml b/src/core/infer.ml index afa90382f..ac23cef03 100644 --- a/src/core/infer.ml +++ b/src/core/infer.ml @@ -15,18 +15,16 @@ type octxt = ctxt * bctxt let boxed = snd let classic = fst let extend (cctx, bctx) v ?def ty = - ((v, ty, def) :: cctx, if def <> None then bctx else (v, lift ty) :: bctx) + ((v, ty, def) :: cctx, (v, lift ty, Option.map lift def) :: bctx) let unbox = Bindlib.unbox (** Exception that may be raised by type inference. *) exception NotTypable -(** [unif pb c a b] solves the unification problem [c ⊢ a ≡ b]. Current - implementation collects constraints in {!val:constraints} then solves - them at the end of type checking. *) -let unif : problem -> octxt -> term -> term -> unit = - fun pb c a b -> +(** [unif p c a b] adds the constraint [c |- a==b] in [p] if [a] is not + convertible to [b]. *) +let unif : problem -> octxt -> term -> term -> unit = fun pb c a b -> if not (Eval.pure_eq_modulo (classic c) a b) then (* NOTE: eq_modulo is used because the unification algorithm counts on the fact that no constraint is added in some cases (see test @@ -34,8 +32,7 @@ let unif : problem -> octxt -> term -> term -> unit = [eq_modulo]. *) begin if Logger.log_enabled () then - log (Color.yel "add constraint %a") constr - (classic c, a, b); + log (Color.yel "add constraint %a") constr (classic c, a, b); pb := {!pb with to_solve = (classic c, a, b) :: !pb.to_solve} end @@ -177,8 +174,10 @@ and infer_aux : problem -> octxt -> term -> term * term * bool = let (tsi, cuf) = force pb c ts.(i) ai in ts.(i) <- tsi; Stdlib.(cu := !cu || cuf); - let b = Bindlib.subst b ts.(i) in - ref_esubst (i + 1) b + ref_esubst (i+1) (Bindlib.subst b tsi) + | LLet(_,d,b) -> + unif pb c ts.(i) d; + ref_esubst (i+1) (Bindlib.subst b d) | _ -> (* Meta type must be a product of arity greater or equal to the environment *) diff --git a/src/core/libMeta.ml b/src/core/libMeta.ml index a1e72ea3a..13aea9bd9 100644 --- a/src/core/libMeta.ml +++ b/src/core/libMeta.ml @@ -47,34 +47,32 @@ let set : problem -> meta -> tmbinder -> unit = fun p m v -> (** [make p ctx a] creates a fresh metavariable term of type [a] in the context [ctx], and adds it to [p]. *) let make : problem -> ctxt -> term -> term = fun p ctx a -> - let a, k = Ctxt.to_prod ctx a in + let a,k = Ctxt.to_prod ctx a in let m = fresh p a k in - let get_var (x,_,d) = if d = None then Some (mk_Vari x) else None in - mk_Meta(m, Array.of_list (List.filter_rev_map get_var ctx)) + mk_Meta(m, Array.of_list (List.rev_map (fun (x,_,_) -> mk_Vari x) ctx)) (** [bmake p bctx a] is the boxed version of {!make}: it creates a fresh {e boxed} metavariable in {e boxed} context [bctx] of {e boxed} type [a]. It is the same as [lift (make p c b)] (provided that [bctx] is boxed [c] and [a] is boxed [b]), but more efficient. *) let bmake : problem -> bctxt -> tbox -> tbox = fun p bctx a -> - let (a, k) = Ctxt.to_prod_box bctx a in + let a,k = Ctxt.to_prod_box bctx a in let m = fresh_box p a k in - let get_var (x, _) = _Vari x in - _Meta_full m (Array.of_list (List.rev_map get_var bctx)) + _Meta_full m (Array.of_list (List.rev_map (fun (x,_,_) -> _Vari x) bctx)) (** [make_codomain p ctx a] creates a fresh metavariable term of type [Type] in the context [ctx] extended with a fresh variable of type [a], and updates [p] with generated metavariables. *) let make_codomain : problem -> ctxt -> term -> tbinder = fun p ctx a -> let x = new_tvar "x" in - bind x lift (make p ((x, a, None) :: ctx) mk_Type) + bind x lift (make p ((x,a,None)::ctx) mk_Type) (** [bmake_codomain p bctx a] is [make_codomain p bctx a] but on boxed terms. *) let bmake_codomain : problem -> bctxt -> tbox -> tbinder Bindlib.box = fun p bctx a -> let x = new_tvar "x" in - let b = bmake p ((x, a) :: bctx) _Type in + let b = bmake p ((x,a,None)::bctx) _Type in Bindlib.bind_var x b (** [iter b f c t] applies the function [f] to every metavariable of [t] and, diff --git a/src/core/libTerm.ml b/src/core/libTerm.ml index da2793c24..39d3e7efe 100644 --- a/src/core/libTerm.ml +++ b/src/core/libTerm.ml @@ -98,20 +98,17 @@ let distinct_vars : ctxt -> term array -> tvar array option = fun ctx ts -> try Some (Array.map to_var ts) with Not_unique_var -> None (** If [ts] is not made of variables or function symbols prefixed by ['$'] - only, then [nl_distinct_vars ctx ts] returns [None]. Otherwise, it returns + only, then [nl_distinct_vars ts] returns [None]. Otherwise, it returns a pair [(vs, map)] where [vs] is an array of variables made of the linear variables of [ts] and fresh variables for the non-linear variables and the symbols prefixed by ['$'], and [map] records by which variable each linear symbol prefixed by ['$'] is replaced. - Variables defined in [ctx] are unfolded. - The symbols prefixed by ['$'] are introduced by [infer.ml] which converts metavariables into fresh symbols, and those metavariables are introduced by [sr.ml] which replaces pattern variables by metavariables. *) -let nl_distinct_vars - : ctxt -> term array -> (tvar array * tvar StrMap.t) option = - fun ctx ts -> +let nl_distinct_vars : term array -> (tvar array * tvar StrMap.t) option = + fun ts -> let exception Not_a_var in let open Stdlib in let vars = ref VarSet.empty (* variables already seen (linear or not) *) @@ -119,7 +116,7 @@ let nl_distinct_vars and patt_vars = ref StrMap.empty in (* map from pattern variables to actual Bindlib variables *) let rec to_var t = - match Ctxt.unfold ctx t with + match unfold t with | Vari(v) -> if VarSet.mem v !vars then nl_vars := VarSet.add v !nl_vars else vars := VarSet.add v !vars; diff --git a/src/core/term.ml b/src/core/term.ml index ae85b0d3b..57b2a3ca8 100644 --- a/src/core/term.ml +++ b/src/core/term.ml @@ -281,11 +281,8 @@ end important. *) type ctxt = (tvar * term * term option) list -(** Typing context with lifted terms. Used to optimise type checking and avoid - lifting terms several times. Definitions are not included because these - contexts are used to create meta variables types, which do not use [let] - definitions. *) -type bctxt = (tvar * tbox) list +(** Typing context with lifted terms. *) +type bctxt = (tvar * tbox * tbox option) list (** Type of unification constraints. *) type constr = ctxt * term * term @@ -507,9 +504,7 @@ let mk_Patt (i,s,ts) = Patt (i,s,ts) let mk_Wild = Wild let mk_Plac b = Plac b let mk_TRef x = TRef x - -let mk_LLet (a,t,u) = - if Bindlib.binder_constant u then Bindlib.subst u Kind else LLet (a,t,u) +let mk_LLet (a,t,u) = LLet (a,t,u) let mk_TEnv (te,ts) = match te with @@ -715,7 +710,7 @@ let _Plac : bool -> tbox = fun b -> let _TRef : term option ref -> tbox = fun r -> Bindlib.box (TRef(r)) -(** [_LLet t a u] lifts let binding [let x := t : a in u]. *) +(** [_LLet a t u] lifts let binding [let x : a := t in u]. *) let _LLet : tbox -> tbox -> tbinder Bindlib.box -> tbox = Bindlib.box_apply3 (fun a t u -> mk_LLet(a, t, u)) diff --git a/src/core/term.mli b/src/core/term.mli index f6ba19690..f17e722b8 100644 --- a/src/core/term.mli +++ b/src/core/term.mli @@ -260,11 +260,8 @@ end first). *) type ctxt = (tvar * term * term option) list -(** Typing context with lifted terms. Used to optimise type checking and avoid - lifting terms several times. Definitions are not included because these - contexts are used to create meta variables types, which do not use [let] - definitions. *) -type bctxt = (tvar * tbox) list +(** Typing context with lifted terms. *) +type bctxt = (tvar * tbox * tbox option) list (** Type of unification constraints. *) type constr = ctxt * term * term @@ -488,7 +485,7 @@ val _Plac : bool -> tbox should be the case that [!r] is [None]. *) val _TRef : term option ref -> tbox -(** [_LVal t a u] lifts val binding [val x := t : a in u]. *) +(** [_LLet a t u] lifts let binding [let x : a := t in u]. *) val _LLet : tbox -> tbox -> tbinder Bindlib.box -> tbox (** [_TE_Vari x] injects a term environment variable [x] into the {!type:tbox} diff --git a/src/core/unif.ml b/src/core/unif.ml index 8b96d18aa..de901a55c 100644 --- a/src/core/unif.ml +++ b/src/core/unif.ml @@ -39,10 +39,15 @@ let set_to_prod : problem -> meta -> unit = fun p m -> in context [c] where [x] is any term of type [a] if [x] can be applied to at least [List.length ts] arguments, and [None] otherwise. *) let rec type_app : ctxt -> term -> term list -> term option = fun c a ts -> - match Eval.whnf c a, ts with - | Prod(_,b), t::ts -> type_app c (Bindlib.subst b t) ts + match a, ts with | _, [] -> Some a - | _, _ -> None + | Prod(_,b), t::ts -> type_app c (Bindlib.subst b t) ts + | LLet(_,d,b), t::ts -> + assert (Eval.pure_eq_modulo c d t); type_app c (Bindlib.subst b d) ts + | _ -> + match Eval.whnf c a, ts with + | Prod(_,b), t::ts -> type_app c (Bindlib.subst b t) ts + | _ -> None (** [add_constr p c] adds the constraint [c] into [p.to_solve]. *) let add_constr : problem -> constr -> unit = fun p c -> @@ -92,7 +97,7 @@ let try_unif_rules : problem -> ctxt -> term -> term -> bool = be instantiated. It does not check whether the instantiation is closed though. *) let instantiable : ctxt -> meta -> term array -> term -> bool = - fun c m ts u -> nl_distinct_vars c ts <> None && not (LibMeta.occurs m c u) + fun c m ts u -> nl_distinct_vars ts <> None && not (LibMeta.occurs m c u) (** [instantiation c m ts u] tells whether, in a problem [m[ts]=u], [m] can be instantiated and returns the corresponding instantiation, simplified. It @@ -100,11 +105,11 @@ let instantiable : ctxt -> meta -> term array -> term -> bool = let instantiation : ctxt -> meta -> term array -> term -> tmbinder Bindlib.box option = fun c m ts u -> - match nl_distinct_vars c ts with + match nl_distinct_vars ts with | None -> None | Some(vs, map) -> if LibMeta.occurs m c u then None - else let u = Eval.simplify (Ctxt.to_let c (sym_to_var map u)) in + else let u = Eval.simplify c (sym_to_var map u) in Some (Bindlib.bind_mvar vs (lift u)) (** Checking type or not during meta instanciation. *) @@ -253,7 +258,7 @@ let imitate_lam : problem -> ctxt -> meta -> unit = fun p c m -> let x, a, env', b = match Eval.whnf c t with | Prod(a,b) -> of_prod a b - | Meta(n,ts) as t when nl_distinct_vars c ts <> None -> + | Meta(n,ts) as t when nl_distinct_vars ts <> None -> begin set_to_prod p n; match unfold t with @@ -406,10 +411,10 @@ let solve : problem -> unit = fun p -> imitate_prod p c m h1 h2 | Meta(m,ts), _ when imitate_lam_cond h1 ts1 - && nl_distinct_vars c ts <> None -> + && nl_distinct_vars ts <> None -> imitate_lam p c m; add_constr p (c,t1,t2) | _, Meta(m,ts) when imitate_lam_cond h2 ts2 - && nl_distinct_vars c ts <> None -> + && nl_distinct_vars ts <> None -> imitate_lam p c m; add_constr p (c,t1,t2) | _ -> @@ -460,10 +465,10 @@ let solve : problem -> unit = fun p -> imitate_prod p c m h1 h2 | Meta(m,ts), _ when imitate_lam_cond h1 ts1 - && nl_distinct_vars c ts <> None -> + && nl_distinct_vars ts <> None -> imitate_lam p c m; add_constr p (c,t1,t2) | _, Meta(m,ts) when imitate_lam_cond h2 ts2 - && nl_distinct_vars c ts <> None -> + && nl_distinct_vars ts <> None -> imitate_lam p c m; add_constr p (c,t1,t2) | Meta(m,ts), Symb s -> diff --git a/src/handle/proof.ml b/src/handle/proof.ml index 3a6516509..9d50ef05e 100644 --- a/src/handle/proof.ml +++ b/src/handle/proof.ml @@ -46,10 +46,11 @@ module Goal = struct Typ {goal_meta = m; goal_hyps; goal_type} (** [simpl f g] simplifies the goal [g] with the function [f]. *) - let simpl : (term -> term) -> goal -> goal = fun f g -> + let simpl : (ctxt -> term -> term) -> goal -> goal = fun f g -> match g with - | Typ gt -> Typ {gt with goal_type = f gt.goal_type} - | Unif (c,t,u) -> Unif (c, f t, f u) + | Typ gt -> + Typ {gt with goal_type = f (Env.to_ctxt gt.goal_hyps) gt.goal_type} + | Unif (c,t,u) -> Unif (c, f c t, f c u) (** [bindlib_ctxt g] computes a Bindlib context from a goal. *) let bindlib_ctxt : goal -> Bindlib.ctxt = fun g -> @@ -83,8 +84,10 @@ module Goal = struct in match g with | Typ gt -> - let elt ppf (s,(_,t,_)) = - out ppf "%a: %a" uid s term (Bindlib.unbox t) + let elt ppf (s,(_,t,u)) = + match u with + | None -> out ppf "%a: %a" uid s term (Bindlib.unbox t) + | Some u -> out ppf "%a ≔ %a" uid s term (Bindlib.unbox u) in hyps elt ppf gt.goal_hyps | Unif (c,_,_) -> diff --git a/src/handle/tactic.ml b/src/handle/tactic.ml index c14f55c5d..69224f8bc 100644 --- a/src/handle/tactic.ml +++ b/src/handle/tactic.ml @@ -198,10 +198,10 @@ let rec handle : | P_tac_query _ -> assert false (* done before *) (* Tactics that apply to both unification and typing goals: *) | P_tac_simpl None -> - {ps with proof_goals = Goal.simpl (Eval.snf []) g :: gs} + {ps with proof_goals = Goal.simpl Eval.snf g :: gs} | P_tac_simpl (Some qid) -> let s = Sig_state.find_sym ~prt:true ~prv:true ss qid in - {ps with proof_goals = Goal.simpl (Eval.unfold_sym s) g :: gs} + {ps with proof_goals = Goal.simpl (fun _ -> Eval.unfold_sym s) g :: gs} | P_tac_solve -> tac_solve pos ps | _ -> (* Tactics that apply to typing goals only: *) @@ -281,7 +281,7 @@ let rec handle : let p = new_problem() in let t = scope t in (* Generate the constraints for [t] to be of type [Type]. *) - let c = Env.to_ctxt gt.goal_hyps in + let c = Env.to_ctxt env in begin match Infer.check_noexn p c t mk_Type with | None -> fatal pos "%a is not of type Type." term t @@ -300,6 +300,30 @@ let rec handle : let u = Bindlib.unbox (_Meta m2 (Array.append ts [|_Meta m1 ts|])) in tac_refine pos ps gt gs p u end + | P_tac_set(id,t) -> + (* From a goal [e ⊢ ?[e]:a], generate a new goal [e,id:b ⊢ ?1[e,x]:a], + where [b] is the type of [t], and refine [?[e]] with [?1[e,t]]. *) + check id; + let p = new_problem() in + let t = scope t in + let c = Env.to_ctxt env in + begin + match Infer.infer_noexn p c t with + | None -> fatal pos "%a is not typable." term t + | Some (t,b) -> + let x = new_tvar id.elt in + let bt = lift t in + let e' = Env.add x (lift b) (Some bt) env in + let a = lift gt.goal_type in + let m = LibMeta.fresh p (Env.to_prod e' a) (List.length e') in + let u = _Meta m (Array.append (Env.to_tbox env) [|bt|]) in + (*tac_refine pos ps gt gs p (Bindlib.unbox u)*) + LibMeta.set p gt.goal_meta + (Bindlib.unbox (Bindlib.bind_mvar (Env.vars env) u)); + (*let g = Goal.of_meta m in*) + let g = Typ {goal_meta=m; goal_hyps=e'; goal_type=gt.goal_type} in + {ps with proof_goals = g :: gs} + end | P_tac_induction -> tac_induction pos ps gt gs | P_tac_refine t -> tac_refine pos ps gt gs (new_problem()) (scope t) | P_tac_refl -> @@ -325,11 +349,10 @@ let rec handle : | Unif _ -> assert false | Typ gt -> let k = - try List.pos (fun (s,_) -> s = id.elt) gt.goal_hyps + try List.pos (fun (s,_) -> s = id.elt) env with Not_found -> fatal id.pos "Unknown hypothesis." in let m = gt.goal_meta in - let env = gt.goal_hyps in let n = m.meta_arity - 1 in let a = cleanup !(m.meta_type) in (* cleanup necessary *) let b = LibTerm.codom_binder (n - k) a in @@ -349,7 +372,7 @@ let rec handle : (* Reorder [ids] wrt their positions. *) let n = gt.goal_meta.meta_arity - 1 in let id_pos id = - try id, n - List.pos (fun (s,_) -> s = id.elt) gt.goal_hyps + try id, n - List.pos (fun (s,_) -> s = id.elt) env with Not_found -> fatal id.pos "Unknown hypothesis." in let cmp (_,k1) (_,k2) = Stdlib.compare k2 k1 in @@ -373,7 +396,7 @@ let rec handle : let mt = mk_Appl(mk_Symb cfg.symb_P, add_args (mk_Symb cfg.symb_eq) [a; r; l]) in - let meta_term = LibMeta.make p (Env.to_ctxt gt.goal_hyps) mt in + let meta_term = LibMeta.make p (Env.to_ctxt env) mt in (* The proofterm is [eqind a r l M (λx,eq a l x) (refl a l)]. *) Rewrite.swap cfg a r l meta_term in diff --git a/src/lsp/lp_doc.ml b/src/lsp/lp_doc.ml index 4bd40cce8..df92ce330 100644 --- a/src/lsp/lp_doc.ml +++ b/src/lsp/lp_doc.ml @@ -65,7 +65,8 @@ let process_pstep (pstate,diags,logs) tac nb_subproofs = pstate, (tac_loc, 4, qres, goals) :: diags, logs | Tac_Error(loc,msg) -> let loc = option_default loc tac_loc in - pstate, (loc, 1, msg, None) :: diags, ((1, msg), loc) :: logs + let goals = Some (current_goals pstate) in + pstate, (loc, 1, msg, goals) :: diags, ((1, msg), loc) :: logs let process_proof pstate tacs logs = Pure.ProofTree.fold process_pstep (pstate,[],logs) tacs diff --git a/src/parsing/lpLexer.ml b/src/parsing/lpLexer.ml index 6c23af3c9..ef1e3432d 100644 --- a/src/parsing/lpLexer.ml +++ b/src/parsing/lpLexer.ml @@ -76,6 +76,7 @@ type token = | RULE | SEARCH | SEQUENTIAL + | SET | SIMPLIFY | SOLVE | SYMBOL @@ -249,6 +250,7 @@ let rec token lb = | "rule" -> RULE | "search" -> SEARCH | "sequential" -> SEQUENTIAL + | "set" -> SET | "simplify" -> SIMPLIFY | "solve" -> SOLVE | "symbol" -> SYMBOL diff --git a/src/parsing/lpParser.mly b/src/parsing/lpParser.mly index c25a151fc..c95a03c2d 100644 --- a/src/parsing/lpParser.mly +++ b/src/parsing/lpParser.mly @@ -69,6 +69,7 @@ %token RULE %token SEARCH %token SEQUENTIAL +%token SET %token SIMPLIFY %token SOLVE %token SYMBOL @@ -340,6 +341,7 @@ tactic: | FAIL { make_pos $sloc P_tac_fail } | GENERALIZE i=uid { make_pos $sloc (P_tac_generalize i) } | HAVE i=uid COLON t=term { make_pos $sloc (P_tac_have(i,t)) } + | SET i=uid ASSIGN t=term { make_pos $sloc (P_tac_set(i,t)) } | INDUCTION { make_pos $sloc P_tac_induction } | REFINE t=term { make_pos $sloc (P_tac_refine t) } | REFLEXIVITY { make_pos $sloc P_tac_refl } diff --git a/src/parsing/pretty.ml b/src/parsing/pretty.ml index f41b97ba2..4b8a02415 100644 --- a/src/parsing/pretty.ml +++ b/src/parsing/pretty.ml @@ -266,6 +266,7 @@ let rec tactic : p_tactic pp = fun ppf { elt; _ } -> | P_tac_fail -> out ppf "fail" | P_tac_generalize id -> out ppf "generalize %a" ident id | P_tac_have (id, t) -> out ppf "have %a: %a" ident id term t + | P_tac_set (id, t) -> out ppf "set %a ≔ %a" ident id term t | P_tac_induction -> out ppf "induction" | P_tac_query q -> query ppf q | P_tac_refine t -> out ppf "refine %a" term t diff --git a/src/parsing/syntax.ml b/src/parsing/syntax.ml index 3da081297..5ceca04f4 100644 --- a/src/parsing/syntax.ml +++ b/src/parsing/syntax.ml @@ -240,6 +240,7 @@ type p_tactic_aux = | P_tac_fail | P_tac_generalize of p_ident | P_tac_have of p_ident * p_term + | P_tac_set of p_ident * p_term | P_tac_induction | P_tac_query of p_query | P_tac_refine of p_term @@ -605,7 +606,8 @@ let fold_idents : ('a -> p_qident -> 'a) -> 'a -> p_command list -> 'a = | P_tac_assume idopts -> (add_idopts vs idopts, a) | P_tac_remove ids -> (List.fold_left (fun vs id -> StrSet.add id.elt vs) vs ids, a) - | P_tac_have(id,t) -> (StrSet.add id.elt vs, fold_term_vars vs a t) + | P_tac_have(id,t) + | P_tac_set(id,t) -> (StrSet.add id.elt vs, fold_term_vars vs a t) | P_tac_simpl (Some qid) -> (vs, f a qid) | P_tac_simpl None | P_tac_admit diff --git a/src/pure/pure.ml b/src/pure/pure.ml index 192f6e7cd..0ab9a9d16 100644 --- a/src/pure/pure.ml +++ b/src/pure/pure.ml @@ -101,8 +101,20 @@ let string_of_goal : Proof.goal -> goal = let open Print in let bctx = Proof.Goal.bindlib_ctxt g in let term = term_in bctx in - let env_elt (s,(_,t,_)) = s, to_string term (Bindlib.unbox t) in - let ctx_elt (x,a,_) = to_string var x, to_string term a in + let env_elt (s,(_,t,d)) = + let t = to_string term (Bindlib.unbox t) in + s, + match d with + | None -> t + | Some d -> t^" ≔ "^to_string term (Bindlib.unbox d) + in + let ctx_elt (x,a,d) = + let a = to_string term a in + to_string var x, + match d with + | None -> a + | Some d -> a^" ≔ "^to_string term d + in match g with | Proof.Typ gt -> let meta = to_string meta gt.goal_meta in diff --git a/tests/OK/1026.lp b/tests/OK/1026.lp index 1feb037e1..b1dd297a1 100644 --- a/tests/OK/1026.lp +++ b/tests/OK/1026.lp @@ -16,13 +16,13 @@ builtin "eq" ≔ =; builtin "refl" ≔ eq_refl; builtin "eqind" ≔ ind_eq; -symbol set: Set; -symbol cset [T]: (τ T → Prop) → τ set; +symbol _set: Set; +symbol cset [T]: (τ T → Prop) → τ _set; symbol T: Set; symbol P: τ T → Prop; -symbol U: τ set; -symbol Q: τ set → Prop; +symbol U: τ _set; +symbol Q: τ _set → Prop; symbol th: π (cset (λ x, P x) = U) → π (Q U) → π (Q (cset (λ x, P x))) ≔ begin assume h1 h2; diff --git a/tests/OK/1101.lp b/tests/OK/1101.lp new file mode 100644 index 000000000..c428b9a0a --- /dev/null +++ b/tests/OK/1101.lp @@ -0,0 +1,73 @@ +constant symbol Set : TYPE; +injective symbol τ : Set → TYPE; // `t or \tau +builtin "T" ≔ τ; +constant symbol ↦ : Set → Set → Set; // \mapsto +notation ↦ infix right 20; +rule τ($a ↦ $b) ↪ τ $a → τ $b; + +constant symbol Prop : TYPE; +builtin "Prop" ≔ Prop; +injective symbol π : Prop → TYPE; // `p +builtin "P" ≔ π; + +constant symbol ⊤ : Prop; // \top +constant symbol top : π ⊤; +constant symbol ⊥ : Prop; // \bot +constant symbol ⊥ₑ [p] : π ⊥ → π p; +constant symbol ⇒ : Prop → Prop → Prop; // => +notation ⇒ infix right 5; +rule π ($p ⇒ $q) ↪ π $p → π $q; +symbol ¬ p ≔ p ⇒ ⊥; // ~~ or \neg +constant symbol ∧ : Prop → Prop → Prop; // /\ or \wedge +notation ∧ infix left 7; +constant symbol ∧ᵢ [p q] : π p → π q → π (p ∧ q); +symbol ∧ₑ₁ [p q] : π (p ∧ q) → π p; +symbol ∧ₑ₂ [p q] : π (p ∧ q) → π q; +constant symbol ∨ : Prop → Prop → Prop; // \/ or \vee +notation ∨ infix left 6; +constant symbol ∨ᵢ₁ [p q] : π p → π (p ∨ q); +constant symbol ∨ᵢ₂ [p q] : π q → π (p ∨ q); +symbol ∨ₑ [p q r] : π (p ∨ q) → (π p → π r) → (π q → π r) → π r; + +constant symbol = [a] : τ a → τ a → Prop; +notation = infix 10; +constant symbol eq_refl [a] (x:τ a) : π (x = x); +constant symbol ind_eq [a] [x y:τ a] : π (x = y) → Π p, π (p y) → π (p x); +builtin "eq" ≔ =; +builtin "refl" ≔ eq_refl; +builtin "eqind" ≔ ind_eq; + +symbol nat:Set; +symbol 0:τ nat; +symbol +:τ(nat ↦ nat ↦ nat); +notation + infix right 20; +rule $x + 0 ↪ $x; +symbol f:τ(nat ↦ (nat ↦ nat) ↦ nat); +symbol add_com a b : π(a + b = b + a); + +symbol test1 a b : π (f (a + b) (λ _, 0) = f (b + a) (λ _, 0)) +≔ begin + assume a b; + rewrite add_com; + reflexivity +end; + +symbol test2' a b : π (f (a + b) (λ _, 0) = f (b + a) (λ _, 0)) +≔ begin + assume a b; + simplify; + set p ≔ (a + b); + set g ≔ (a + b + 0 + 0); + //type p; + //compute p; + //compute g; + have foo: π (p = g) { + //simplify p; + //simplify q; + //compute p; + //compute g; + reflexivity; + }; + rewrite add_com; + reflexivity +end; diff --git a/tests/OK/Escaped.lp b/tests/OK/Escaped.lp index 84a910339..bf0a5eb63 100644 --- a/tests/OK/Escaped.lp +++ b/tests/OK/Escaped.lp @@ -8,5 +8,5 @@ symbol {|KIND|} : TYPE; symbol {|KIND.OF.BLUE|}: TYPE; // Escaped with some rules -symbol {|set|} : TYPE; -rule {|set|} ↪ {|KIND|}; +symbol {|_set|} : TYPE; +rule {|_set|} ↪ {|KIND|}; diff --git a/tests/dtrees.sh b/tests/dtrees.sh index 7ada1a09b..e86c02be6 100755 --- a/tests/dtrees.sh +++ b/tests/dtrees.sh @@ -41,11 +41,11 @@ if [ -z "$out" ]; then fi # Escaped identifier with rules -out="$(${LAMBDAPI} 'tests.OK.Escaped.{|set|}' 2>/dev/null)" +out="$(${LAMBDAPI} 'tests.OK.Escaped.{|_set|}' 2>/dev/null)" if [ "$?" = 1 ] || [ -z "$out" ]; then - ko 'tests.OK.Escaped.{|set|}' + ko 'tests.OK.Escaped.{|_set|}' else - ok 'tests.OK.Escaped.{|set|}' + ok 'tests.OK.Escaped.{|_set|}' fi # Ghost symbol diff --git a/tests/export_raw_dk.sh b/tests/export_raw_dk.sh index 3425632b2..05bcf28d7 100755 --- a/tests/export_raw_dk.sh +++ b/tests/export_raw_dk.sh @@ -52,7 +52,7 @@ do # "as" 729);; # "notation" - xor|Set|quant*|Prop|prefix|parametricCoercions|opaque|nat_id*|michael|max-suc-alg|lpparse|861|infix|infer|indrec|implicitArgs[34]|group|cr_qu|cp*|coercions|builtin_zero_succ|plus_ac|693|693_assume|679|665|655|655b|649_fo_27|595_and_elim|584_c_slow|579_or_elim_long|579_long_no_duplicate|359|328|245|245b|244|1026|706);; + xor|Set|quant*|Prop|prefix|parametricCoercions|opaque|nat_id*|michael|max-suc-alg|lpparse|861|infix|infer|indrec|implicitArgs[34]|group|cr_qu|cp*|coercions|builtin_zero_succ|plus_ac|693|693_assume|679|665|655|655b|649_fo_27|595_and_elim|584_c_slow|579_or_elim_long|579_long_no_duplicate|359|328|245|245b|244|1026|706|1101);; # "quantifier" 683|650|573|565|430);; # nested module name diff --git a/tests/regressions/hrs.expected b/tests/regressions/hrs.expected index 9de812644..14af77d9f 100644 --- a/tests/regressions/hrs.expected +++ b/tests/regressions/hrs.expected @@ -3,47 +3,40 @@ A : t -> t -> t L : t -> (t -> t) -> t P : t -> (t -> t) -> t B : t -> t -> (t -> t) -> t -tests_OK_boolean_B : t -tests_OK_boolean_bool_and : t -tests_OK_boolean_bool_impl : t -tests_OK_boolean_bool_neg : t -tests_OK_boolean_bool_or : t -tests_OK_boolean_bool_xor : t -tests_OK_boolean_false : t -tests_OK_boolean_true : t +tests_OK_group_1 : t +tests_OK_group_inv : t +tests_OK_group_⋅ : t ) (VAR $x : t $y : t -> t $z : t -$1_0 : t $2_0 : t $3_0 : t +$3_1 : t $4_0 : t +$4_1 : t +$4_2 : t +$5_0 : t +$6_0 : t $7_0 : t $8_0 : t $9_0 : t +$9_1 : t $10_0 : t -82 : t -83 : t -84 : t -85 : t -86 : t -87 : t +$10_1 : t ) (RULES A(L($x,$y),$z) -> $y($z), B($x,$z,$y) -> $y($z), -A(A(tests_OK_boolean_bool_and,tests_OK_boolean_true),$1_0) -> $1_0, -A(A(tests_OK_boolean_bool_and,tests_OK_boolean_false),$2_0) -> tests_OK_boolean_false, -A(A(tests_OK_boolean_bool_and,$3_0),tests_OK_boolean_true) -> $3_0, -A(A(tests_OK_boolean_bool_and,$4_0),tests_OK_boolean_false) -> tests_OK_boolean_false, -tests_OK_boolean_bool_impl -> L(tests_OK_boolean_B,\82.L(tests_OK_boolean_B,\83.A(A(tests_OK_boolean_bool_or,83),A(tests_OK_boolean_bool_neg,82)))), -A(tests_OK_boolean_bool_neg,tests_OK_boolean_true) -> tests_OK_boolean_false, -A(tests_OK_boolean_bool_neg,tests_OK_boolean_false) -> tests_OK_boolean_true, -A(A(tests_OK_boolean_bool_or,tests_OK_boolean_true),$7_0) -> tests_OK_boolean_true, -A(A(tests_OK_boolean_bool_or,tests_OK_boolean_false),$8_0) -> $8_0, -A(A(tests_OK_boolean_bool_or,$9_0),tests_OK_boolean_true) -> tests_OK_boolean_true, -A(A(tests_OK_boolean_bool_or,$10_0),tests_OK_boolean_false) -> $10_0, -tests_OK_boolean_bool_xor -> L(tests_OK_boolean_B,\84.L(tests_OK_boolean_B,\85.B(tests_OK_boolean_B,A(A(tests_OK_boolean_bool_and,84),A(tests_OK_boolean_bool_neg,85)),\86.B(tests_OK_boolean_B,A(A(tests_OK_boolean_bool_and,85),A(tests_OK_boolean_bool_neg,84)),\87.A(A(tests_OK_boolean_bool_or,86),87))))) +A(tests_OK_group_inv,tests_OK_group_1) -> tests_OK_group_1, +A(tests_OK_group_inv,A(tests_OK_group_inv,$2_0)) -> $2_0, +A(tests_OK_group_inv,A(A(tests_OK_group_⋅,$3_0),$3_1)) -> A(A(tests_OK_group_⋅,A(tests_OK_group_inv,$3_1)),A(tests_OK_group_inv,$3_0)), +A(A(tests_OK_group_⋅,A(A(tests_OK_group_⋅,$4_0),$4_1)),$4_2) -> A(A(tests_OK_group_⋅,$4_0),A(A(tests_OK_group_⋅,$4_1),$4_2)), +A(A(tests_OK_group_⋅,tests_OK_group_1),$5_0) -> $5_0, +A(A(tests_OK_group_⋅,$6_0),tests_OK_group_1) -> $6_0, +A(A(tests_OK_group_⋅,A(tests_OK_group_inv,$7_0)),$7_0) -> tests_OK_group_1, +A(A(tests_OK_group_⋅,$8_0),A(tests_OK_group_inv,$8_0)) -> tests_OK_group_1, +A(A(tests_OK_group_⋅,A(tests_OK_group_inv,$9_0)),A(A(tests_OK_group_⋅,$9_0),$9_1)) -> $9_1, +A(A(tests_OK_group_⋅,$10_0),A(A(tests_OK_group_⋅,A(tests_OK_group_inv,$10_0)),$10_1)) -> $10_1 ) diff --git a/tests/regressions/hrs.ml b/tests/regressions/hrs.ml index e87e0f209..cff2ca8d3 100644 --- a/tests/regressions/hrs.ml +++ b/tests/regressions/hrs.ml @@ -6,5 +6,5 @@ open Handle let () = Library.set_lib_root (Some "/tmp"); Timed.(Console.verbose := 0); - let sign = Compile.PureUpToSign.compile_file "../OK/boolean.lp" in + let sign = Compile.PureUpToSign.compile_file "../OK/group.lp" in Export.Hrs.sign Format.std_formatter sign diff --git a/tests/regressions/xtc.expected b/tests/regressions/xtc.expected index 7510feae8..dfe72a5d4 100644 --- a/tests/regressions/xtc.expected +++ b/tests/regressions/xtc.expected @@ -5,170 +5,162 @@ - tests.OK.boolean.bool_andtests.OK.boolean.true1_0 + tests.OK.group.invtests.OK.group.1 - 1_0 + tests.OK.group.1 - tests.OK.boolean.bool_andtests.OK.boolean.false2_0 + tests.OK.group.invtests.OK.group.inv2_0 - tests.OK.boolean.false + 2_0 - tests.OK.boolean.bool_and3_0tests.OK.boolean.true + tests.OK.group.invtests.OK.group.⋅3_03_1 - 3_0 + tests.OK.group.⋅tests.OK.group.inv3_1tests.OK.group.inv3_0 - tests.OK.boolean.bool_and4_0tests.OK.boolean.false + tests.OK.group.⋅tests.OK.group.⋅4_04_14_2 - tests.OK.boolean.false + tests.OK.group.⋅4_0tests.OK.group.⋅4_14_2 - tests.OK.boolean.bool_impl + tests.OK.group.⋅tests.OK.group.15_0 - 86tests.OK.boolean.B87tests.OK.boolean.Btests.OK.boolean.bool_or87tests.OK.boolean.bool_neg86 + 5_0 - tests.OK.boolean.bool_negtests.OK.boolean.true + tests.OK.group.⋅6_0tests.OK.group.1 - tests.OK.boolean.false + 6_0 - tests.OK.boolean.bool_negtests.OK.boolean.false + tests.OK.group.⋅tests.OK.group.inv7_07_0 - tests.OK.boolean.true + tests.OK.group.1 - tests.OK.boolean.bool_ortests.OK.boolean.true7_0 + tests.OK.group.⋅8_0tests.OK.group.inv8_0 - tests.OK.boolean.true + tests.OK.group.1 - tests.OK.boolean.bool_ortests.OK.boolean.false8_0 + tests.OK.group.⋅tests.OK.group.inv9_0tests.OK.group.⋅9_09_1 - 8_0 + 9_1 - tests.OK.boolean.bool_or9_0tests.OK.boolean.true + tests.OK.group.⋅10_0tests.OK.group.⋅tests.OK.group.inv10_010_1 - tests.OK.boolean.true - - - - - tests.OK.boolean.bool_or10_0tests.OK.boolean.false - - - 10_0 - - - - - tests.OK.boolean.bool_xor - - - 92tests.OK.boolean.B93tests.OK.boolean.B94tests.OK.boolean.B95tests.OK.boolean.Btests.OK.boolean.bool_or9495tests.OK.boolean.bool_and93tests.OK.boolean.bool_neg92tests.OK.boolean.bool_and92tests.OK.boolean.bool_neg93 + 10_1 + + $10_1 + tests.OK.group.G + $10_0 - tests.OK.boolean.B + tests.OK.group.G + + + $9_1 + tests.OK.group.G $9_0 - tests.OK.boolean.B + tests.OK.group.G $8_0 - tests.OK.boolean.B + tests.OK.group.G $7_0 - tests.OK.boolean.B + tests.OK.group.G + + + $6_0 + tests.OK.group.G + + + $5_0 + tests.OK.group.G + + + $4_2 + tests.OK.group.G + + + $4_1 + tests.OK.group.G $4_0 - tests.OK.boolean.B + tests.OK.group.G - $3_0 - tests.OK.boolean.B + $3_1 + tests.OK.group.G - $2_0 - tests.OK.boolean.B + $3_0 + tests.OK.group.G - $1_0 - tests.OK.boolean.B + $2_0 + tests.OK.group.G - tests.OK.boolean.bool_and - tests.OK.boolean.Btests.OK.boolean.Btests.OK.boolean.B - - - tests.OK.boolean.bool_impl - tests.OK.boolean.Btests.OK.boolean.Btests.OK.boolean.B - - - tests.OK.boolean.bool_neg - tests.OK.boolean.Btests.OK.boolean.B - - - tests.OK.boolean.bool_or - tests.OK.boolean.Btests.OK.boolean.Btests.OK.boolean.B - - - tests.OK.boolean.bool_xor - tests.OK.boolean.Btests.OK.boolean.Btests.OK.boolean.B + tests.OK.group.1 + tests.OK.group.G - tests.OK.boolean.false - tests.OK.boolean.B + tests.OK.group.inv + tests.OK.group.Gtests.OK.group.G - tests.OK.boolean.true - tests.OK.boolean.B + tests.OK.group.⋅ + tests.OK.group.Gtests.OK.group.Gtests.OK.group.G FULL - tests.OK.boolean.lp + tests.OK.group.lp diff --git a/tests/regressions/xtc.ml b/tests/regressions/xtc.ml index d9be44223..035e33bd3 100644 --- a/tests/regressions/xtc.ml +++ b/tests/regressions/xtc.ml @@ -4,5 +4,5 @@ open Handle let () = Library.set_lib_root (Some "/tmp"); Timed.(Console.verbose := 0); - let sign = Compile.PureUpToSign.compile_file "../OK/boolean.lp" in + let sign = Compile.PureUpToSign.compile_file "../OK/group.lp" in Export.Xtc.sign Format.std_formatter sign