diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..3283fd2d3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +_build/ +**/*.exe diff --git a/components/dedukti/deduktiImport.ml b/components/dedukti/deduktiImport.ml new file mode 100644 index 000000000..435a091c8 --- /dev/null +++ b/components/dedukti/deduktiImport.ml @@ -0,0 +1,25 @@ +(* TODO: Forse baseuri e' gia' in status *) +let rec eval_from_dedukti_stream ~asserted ~baseuri status buf = + try + let entry = Parsers.Parser.read buf in + Parsers.Entry.pp_entry Format.err_formatter entry; + let objs = ObjectConstruction.obj_of_entry status ~baseuri buf entry in + let check_and_add ((uri, _, _, _, _) as obj) = + HLog.message ("Tradotto!" ^ status#ppobj obj); + let status = NCicLibrary.add_obj status obj in + let xxaliases = GrafiteDisambiguate.aliases_for_objs status [ uri ] in + let mode = GrafiteAst.WithPreferences in + (* MATITA 1.0: fixme *) + let status = GrafiteEngine.eval_alias status (mode, xxaliases) in + status + in + (* TODO per fixare gli alias guardare matitaengine.ml da righe circa 230 235 *) + let status = + Option.fold ~none:status + ~some:(fun list -> + List.fold_left (fun _ obj -> check_and_add obj) status list) + objs + in + + eval_from_dedukti_stream ~asserted ~baseuri status buf + with End_of_file -> (asserted, status) diff --git a/components/dedukti/deduktiImport.mli b/components/dedukti/deduktiImport.mli new file mode 100644 index 000000000..72fc01bd3 --- /dev/null +++ b/components/dedukti/deduktiImport.mli @@ -0,0 +1,7 @@ +(* TODO *) +val eval_from_dedukti_stream : + asserted:'a -> + baseuri:string -> + (#GrafiteTypes.status as 'b) -> + Parsers.Parser.stream -> + 'a * 'b diff --git a/components/dedukti/dune b/components/dedukti/dune new file mode 100644 index 000000000..cb364015c --- /dev/null +++ b/components/dedukti/dune @@ -0,0 +1,7 @@ +(library + (name helm_dedukti) + (libraries helm_ng_library dedukti.parsers helm_ng_disambiguation helm_grafite_engine) + (wrapped false)) +(env + (_ + (flags (:standard -rectypes -w @A-52-4-34-37-45-9-44-48-6-32-20-58-7-57-3-68-69-70-50-42)))) diff --git a/components/dedukti/objectConstruction.ml b/components/dedukti/objectConstruction.ml new file mode 100644 index 000000000..503fadaf1 --- /dev/null +++ b/components/dedukti/objectConstruction.ml @@ -0,0 +1,404 @@ +let const_tbl = Hashtbl.create 0 + +let failwith_log mex = + HLog.error mex; + failwith mex + +let mkuri ~baseuri name ext = + NUri.uri_of_string (baseuri ^ "/" ^ name ^ "." ^ ext) + +let first (a, _, _) = a +let cic_cic = Kernel.Basic.mk_mident "cic" + +let cic_Term = + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident "cic") + (Kernel.Basic.mk_ident "Term") + +let cic_lift = + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident "cic") + (Kernel.Basic.mk_ident "lift") + +let cic_prod = + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident "cic") + (Kernel.Basic.mk_ident "prod") + +let cic_Univ = + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident "cic") + (Kernel.Basic.mk_ident "Univ") + +let cic_univ = + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident "cic") + (Kernel.Basic.mk_ident "univ") + +let cic_type = + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident "cic") + (Kernel.Basic.mk_ident "type") + +let cic_prop = + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident "cic") + (Kernel.Basic.mk_ident "prop") + +let cic_z = + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident "cic") + (Kernel.Basic.mk_ident "z") + +let cic_succ = + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident "cic") + (Kernel.Basic.mk_ident "s") + +let rec calc_univ_dept = function + | Kernel.Term.Const (_, name) when Kernel.Basic.name_eq name cic_z -> 0 + | Kernel.Term.App (Kernel.Term.Const (_, f_name), a, []) + when Kernel.Basic.name_eq f_name cic_succ -> + 1 + calc_univ_dept a + | _ -> failwith_log "Error loading universe dept" + +let make_type_n_uri term = + let univ_dept = calc_univ_dept term in + if univ_dept >= 0 && univ_dept <= 5 then + NUri.uri_of_string (Printf.sprintf "cic:/matita/pts/Type%d.univ" univ_dept) + else + failwith_log + (Format.sprintf "Univers number must be between 0 and 5. Got %d" univ_dept) + +let rec construct_debrujin index = NCic.Rel (index + 1) + +and construct_type term = + NCic.Sort (NCic.Type [ (`Type, make_type_n_uri term) ]) + +and construct_prop = NCic.Sort NCic.Prop + +and construct_const ~baseuri:_ name = + match Hashtbl.find_opt const_tbl name with + | Some reference -> NCic.Const reference + (* It should not happen; the reference is bogus *) + | None -> + failwith_log + ("name " + ^ Kernel.Basic.string_of Kernel.Basic.pp_name name + ^ " not found in const table") + +and construct_sort = function + | Kernel.Term.App (Kernel.Term.Const (_, name), a1, []) + when Kernel.Basic.name_eq name cic_type -> + construct_type a1 + | Kernel.Term.Const (_, name) when Kernel.Basic.name_eq name cic_prop -> + construct_prop + | _ -> assert false + +and construct_appl ~baseuri f a1 args = + match (f, args) with + | Kernel.Term.Const (_, name), [ t ] when Kernel.Basic.name_eq name cic_Term + -> + construct_term ~baseuri t + | ( Kernel.Term.Const (_, name), + [ _; _; Kernel.Term.Lam (_, ident, Some typ, body) ] ) + when Kernel.Basic.name_eq name cic_prod -> + construct_prod ~baseuri (Kernel.Basic.string_of_ident ident) typ body + | Kernel.Term.Const (_, name), [ _; a ] + when Kernel.Basic.name_eq name cic_lift -> + construct_term ~baseuri a + | Kernel.Term.Const (_, name), [ _; _; Kernel.Term.Lam (_, _, None, _) ] + when Kernel.Basic.name_eq name cic_prod -> + assert false + | Kernel.Term.Const (_, name), [] + when Kernel.Basic.name_eq name cic_univ + || Kernel.Basic.name_eq name cic_Univ -> + construct_sort a1 + | Kernel.Term.Const (_, name), _ + when Kernel.Basic.mident_eq (Kernel.Basic.md name) cic_cic -> + assert false + | _ -> + let translator = construct_term ~baseuri in + let t = List.map translator (f :: a1 :: args) in + NCic.Appl t + +and construct_lambda ~baseuri binder typ body = + let translator = construct_term ~baseuri in + let typ' = translator typ in + let body' = translator body in + NCic.Lambda (binder, typ', body') + +and construct_prod ~baseuri binder typ body = + let translator = construct_term ~baseuri in + let typ' = translator typ in + let body' = translator body in + NCic.Prod (binder, typ', body') + +and construct_term ~baseuri = function + | Kernel.Term.DB (_, _, i) -> construct_debrujin i + | Kernel.Term.Const (_, name) -> construct_const ~baseuri name + | Kernel.Term.App (f, a, args) -> construct_appl ~baseuri f a args + | Kernel.Term.Lam (_, ident, Some typ, body) -> + construct_lambda ~baseuri (Kernel.Basic.string_of_ident ident) typ body + | Kernel.Term.Lam (_, _, None, _) -> assert false + | Kernel.Term.Pi (_, ident, typ, body) -> + construct_prod ~baseuri (Kernel.Basic.string_of_ident ident) typ body + | Kernel.Term.Kind -> assert false + | Kernel.Term.Type _ -> assert false + +let construct_constant_kind ~baseuri typ body ident = + let typ = construct_term ~baseuri typ in + let body = Option.map (construct_term ~baseuri) body in + let attrs = (`Implied, `Axiom, `Regular) in + NCic.Constant ([], ident, body, typ, attrs) + +let construct_constant status ~baseuri ident typ body = + let str_ident = Kernel.Basic.string_of_ident ident in + let name = + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident (Filename.basename baseuri)) + ident + in + let uri = mkuri ~baseuri str_ident "con" in + let obj_kind = construct_constant_kind ~baseuri typ body str_ident in + let height = + NCicTypeChecker.height_of_obj_kind status uri ~subst:[] obj_kind + in + let reference = + NReference.reference_of_spec uri + (if body <> None then NReference.Def height else NReference.Decl) + in + assert (not (Hashtbl.mem const_tbl name)); + Hashtbl.add const_tbl name reference; + (uri, height, [], [], obj_kind) + +let rec extract_idents_from_pattern = function + | Kernel.Rule.Var (_, ident, _, []) -> [ Kernel.Basic.string_of_ident ident ] + | Kernel.Rule.Var (_, ident, _, pat_list) -> + let str_ident = Kernel.Basic.string_of_ident ident in + let others = + List.flatten + (List.map (fun pat -> extract_idents_from_pattern pat) pat_list) + in + str_ident :: others + | Kernel.Rule.Pattern (_, _, []) -> [] + | Kernel.Rule.Pattern (_, _, pat_list) -> + List.flatten + (List.map (fun pat -> extract_idents_from_pattern pat) pat_list) + | Kernel.Rule.Lambda (_, ident, pattern) -> + let str_ident = Kernel.Basic.string_of_ident ident in + str_ident :: extract_idents_from_pattern pattern + | Kernel.Rule.Brackets _ -> [] + +let construct_fixpoint_body ~baseuri (rule : 'a Kernel.Rule.rule) typ recno = + let rec aux ~baseuri (rule : 'a Kernel.Rule.rule) idents typ recno recindex = + match (typ, idents) with + | NCic.Prod (_, source, target), h :: t when recindex < recno -> + NCic.Lambda (h, source, aux ~baseuri rule t target recno (recindex + 1)) + | NCic.Prod (_, source, _), [ h ] when recindex >= recno -> + let body = construct_term ~baseuri rule.Kernel.Rule.rhs in + NCic.Lambda (h, source, body) + | _, [] -> failwith_log "Not enough names when parsing fixpoint" + | _ -> assert false + in + let idents = extract_idents_from_pattern rule.Kernel.Rule.pat in + aux ~baseuri rule idents typ recno 0 + +let construct_fixpoint_function ~baseuri (typ_entry, body_entry, attrs) = + let name, recno = attrs in + match (typ_entry, body_entry) with + | Parsers.Entry.Decl (_, _, _, _, typ), Parsers.Entry.Rules (_, rule_list) -> + let typ' = construct_term ~baseuri typ in + let body' = + construct_fixpoint_body ~baseuri (List.hd rule_list) typ' recno + in + ([], name, recno, typ', body') + | _ -> failwith_log "Malformed error reconstructing fixpoint " + +let name_of_decl ~baseuri decl = + match decl with + | Parsers.Entry.Decl (_, ident, _, _, _) -> + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident (Filename.basename baseuri)) + ident + | _ -> failwith_log "Cant generate a name from this type of entry" + +let mkuri_from_decl ~baseuri decl ext = + match decl with + | Parsers.Entry.Decl (_, ident, _, _, _) -> + let str_ident = Kernel.Basic.string_of_ident ident in + mkuri ~baseuri str_ident ext + | _ -> failwith_log "Cant generate an uri from this type of entry" + +let construct_fixpoint status ~baseuri fixpoint_list = + let names = + List.map + (fun (typ, _, (_, leftno)) -> (name_of_decl ~baseuri typ, leftno)) + fixpoint_list + in + let uri = mkuri_from_decl ~baseuri (first (List.nth fixpoint_list 0)) "con" in + List.iteri + (fun i (name, leftno) -> + let reference = + NReference.reference_of_spec uri (NReference.Fix (i, leftno, 0)) + in + if Hashtbl.mem const_tbl name then + failwith_log + ("Fixpoint " + ^ Kernel.Basic.string_of Kernel.Basic.pp_name name + ^ "already registed in const table") + else Hashtbl.add const_tbl name reference) + names; + let functions = + List.map (construct_fixpoint_function ~baseuri) fixpoint_list + in + let f_attr = (`Implied, `Axiom, `Regular) in + let obj_kind = NCic.Fixpoint (true, functions, f_attr) in + HLog.warn ("XXX " ^ status#ppobj (uri, 999, [], [], obj_kind)); + let height = + NCicTypeChecker.height_of_obj_kind status uri ~subst:[] obj_kind + in + Some [ (uri, height, [], [], obj_kind) ] + +let construct_inductive_constructor ~baseuri block_uri indtyno leftno consno = + function + | Parsers.Entry.Decl (_, ident, _, _, term) -> + let t = construct_term ~baseuri term in + let name = + Kernel.Basic.mk_name + (Kernel.Basic.mk_mident (Filename.basename baseuri)) + ident + in + let ident = Kernel.Basic.string_of_ident ident in + let reference = + NReference.reference_of_spec block_uri + (NReference.Con (indtyno, consno + 1, leftno)) + in + if Hashtbl.mem const_tbl name then + failwith_log + ("Inductive constructor " + ^ Kernel.Basic.string_of Kernel.Basic.pp_name name + ^ "already registed in const table") + else Hashtbl.add const_tbl name reference; + ([], ident, t) + | _ -> failwith_log "Malformed inductive type constructor" + +let construct_inductive_type ~baseuri block_uri leftno indtypno + (typ, conss, attrs) = + match typ with + | Parsers.Entry.Decl (_, _, _, _, typ_term) -> + let name, _ = attrs in + let typ' = construct_term ~baseuri typ_term in + let conss' = + List.mapi + (construct_inductive_constructor ~baseuri block_uri indtypno leftno) + conss + in + ([], name, typ', conss') + | _ -> assert false + +let construct_inductive status ~baseuri leftno types = + let names = List.map (fun (typ, _, _) -> name_of_decl ~baseuri typ) types in + let uri = mkuri_from_decl ~baseuri (first (List.nth types 0)) "ind" in + List.iteri + (fun i name -> + let reference = + NReference.reference_of_spec uri (NReference.Ind (true, i, leftno)) + in + if Hashtbl.mem const_tbl name then + failwith_log + ("Inductive type " + ^ Kernel.Basic.string_of Kernel.Basic.pp_name name + ^ "already registed in const table") + else Hashtbl.add const_tbl name reference) + names; + + let i_attr = (`Implied, `Regular) in + let types' = List.mapi (construct_inductive_type ~baseuri uri leftno) types in + let obj_kind = NCic.Inductive (true, leftno, types', i_attr) in + let height = + NCicTypeChecker.height_of_obj_kind status uri ~subst:[] obj_kind + in + (uri, height, [], [], obj_kind) + +let split_match_const match_const = + let rec extract_cases entry leftno = + match entry with + | NCic.Prod (_, typ, oth) when leftno > 0 -> + let cases, ind = extract_cases oth (leftno - 1) in + (typ :: cases, ind) + | NCic.Prod (_, typ, _) -> ([], typ) + | _ -> assert false + in + match match_const with + | NCic.Prod (_, rt, cases) -> + let return_type = rt in + let leftno = 2 in + (* TODO *) + let cases, ind = extract_cases cases leftno in + (return_type, cases, ind) + | _ -> assert false + +let construct_match status ~baseuri = function + | Parsers.Entry.Decl (_, ident, _, _, typ) -> + construct_constant status ~baseuri ident typ None + (* TODO *) + (* let ident' = Kernel.Basic.string_of_ident ident in *) + (* let uri = mkuri ~baseuri ident' "FIXMEMORE" in *) + (* let typ' = construct_term ~baseuri typ in *) + (* (* let ret_typ, cases, ind = split_match_const typ' in *) *) + (* let reference = NReference.reference_of_string ("cic:/" ^ ident' ^ "#dec") in *) + (* (* let match_term = NCic.Match(reference, ret_typ, ind, cases) in *) *) + (* let attrs = (`Implied, `Axiom, `Regular) in *) + (* let obj_kind = NCic.Constant([], ident', None (*Some match_term*), typ', attrs) in *) + (* let height = NCicTypeChecker.height_of_obj_kind status uri ~subst:[] obj_kind in *) + (* (uri, height, [], [], obj_kind) *) + | _ -> failwith_log "match const must be a declaration" + +let rec read_until_end_pragma pragma_name buf = + try + match Parsers.Parser.read buf with + | Parsers.Entry.Pragma (_, str) + when PragmaParsing.is_valid_export_pragma str + && String.starts_with "PRAGMA END" str + && PragmaParsing.pragma_name str = pragma_name -> + HLog.debug ("END of pragka " ^ str); + [] + | _ as entry -> entry :: read_until_end_pragma pragma_name buf + with End_of_file -> failwith_log ("PRAGMA '" ^ pragma_name ^ "'not closed") + +let handle_pragma_block status ~baseuri buf pragma_string = + let pragma_name = PragmaParsing.pragma_name pragma_string in + let entries = read_until_end_pragma pragma_name buf in + match PragmaParsing.parse_block pragma_name pragma_string entries with + | Some export_pragma -> ( + match export_pragma with + | PragmaParsing.GeneratedPragma -> None + | PragmaParsing.FixpointPragma fixpoint_list -> + construct_fixpoint status ~baseuri fixpoint_list + | PragmaParsing.InductivePragma (leftno, types, match_const) -> + let ind = construct_inductive status ~baseuri leftno types in + HLog.debug "QUAUAUAUAUUAUAAUUA"; + Some (ind :: List.map (construct_match status ~baseuri) match_const)) + | _ -> failwith "Unable to parse pragma block" + +let obj_of_entry status ~baseuri buf = function + | Parsers.Entry.Def (_, ident, _, _, Some typ, body) -> + Some [ construct_constant status ~baseuri ident typ (Some body) ] + | Parsers.Entry.Def (_, _, _, _, None, _) -> assert false + | Parsers.Entry.Decl (_, ident, _, _, typ) -> + Some [ construct_constant status ~baseuri ident typ None ] + | Parsers.Entry.Pragma (_, str) -> + if + PragmaParsing.is_valid_export_pragma str + && String.starts_with "PRAGMA BEGIN" str + then handle_pragma_block status ~baseuri buf str + else ( + HLog.warn ("Found unknow pragma " ^ str); + None) + | Parsers.Entry.Rules (_, _) -> + HLog.warn "Ignoring found rewriting rule"; + None + | _ -> + HLog.message "NOT IMPLEMENTED (other)"; + None (*TODO*) diff --git a/components/dedukti/objectConstruction.mli b/components/dedukti/objectConstruction.mli new file mode 100644 index 000000000..7b680776a --- /dev/null +++ b/components/dedukti/objectConstruction.mli @@ -0,0 +1,6 @@ +val obj_of_entry : + #NCic.status -> + baseuri:string -> + Parsers.Parser.stream -> + Parsers.Entry.entry -> + NCic.obj list option diff --git a/components/dedukti/pragmaParsing.ml b/components/dedukti/pragmaParsing.ml new file mode 100644 index 000000000..ff40d51b2 --- /dev/null +++ b/components/dedukti/pragmaParsing.ml @@ -0,0 +1,242 @@ +(* name recno *) +type fp_pragma_attrs = string * int + +(* name cons name list *) +type ind_pragma_attrs = string * string list + +type export_pragma = + | GeneratedPragma + (* type body attrs *) + | FixpointPragma of + (Parsers.Entry.entry * Parsers.Entry.entry * fp_pragma_attrs) list + (* leftno type constructors attrs match const entry *) + | InductivePragma of + int + * (Parsers.Entry.entry * Parsers.Entry.entry list * ind_pragma_attrs) list + * Parsers.Entry.entry list + +let generated_pragma = "GENERATED" +let fixpoint_pragma = "FIXPOINT" +let fixpoint_body_pragma = "FIXPOINT_BODY" +let inductive_pragma = "INDUCTIVE" +let match_pragma = "MATCH" +let name_attr = "NAME" +let ref_attr = "REF" +let leftno_attr = "LEFTNO" +let recno_regex = Str.regexp {|.*RECNO:[a-zA-Z0-9_]+=[0-9]+|} +let body_regex = Str.regexp {|.*BODY:[a-zA-Z0-9_]+=[a-zA-Z0-9_]+|} +let cons_regex = Str.regexp {|.*CONS:[a-zA-Z0-9_]+=[a-zA-Z0-9_]+|} + +let pragma_name_regex = + Str.regexp + {|PRAGMA\( BEGIN\| END\)? \([A-Za-z_]+\)\(( \|[A-Z])+(:[A-Za-z0-9]+)*=([a-zA-Z0-9_]+)( )*\)*|} + +let failwith_log mex = + HLog.error mex; + failwith mex + +(** Given a string of type 'PRAGMA [ATTR=...]' returns the `NAME` part *) +let pragma_name pragma_str = + if Str.string_match pragma_name_regex pragma_str 0 then + Str.matched_group 2 pragma_str + else failwith "Unable to get name of pragma '" ^ pragma_str ^ "'" + +let is_valid_export_pragma pragma_str = + Str.string_match pragma_name_regex pragma_str 0 + +let filter_matching list regex = + List.filter (fun s -> Str.string_match regex s 0) list + +(** [parse_attr_named str] when s is in the form `:=` returns + [(KEY, NAME, VALUE)] + *) +let parse_attr_named str = + let pattern = + Str.regexp {|\([A-Z_]+\):\([a-zA-Z0-9_]+\)=\([a-zA-Z0-9_]+\)|} + in + if Str.string_match pattern str 0 then + let key = Str.matched_group 1 str in + let name = Str.matched_group 2 str in + let value = Str.matched_group 3 str in + (key, name, value) + else failwith_log ("Cannot extract attributes from " ^ str) + +(** [parse_attr str] when s is in the form `=` returns [(KEY, VALUE)] *) +let parse_attr str = + let pattern = Str.regexp {|\([A-Z_]+\)=\([a-zA-Z0-9_]+\)|} in + if Str.string_match pattern str 0 then + let key = Str.matched_group 1 str in + let value = Str.matched_group 2 str in + (key, value) + else failwith_log ("Cannot extract attributes from " ^ str) + +let parse_attr_by_key key str = + let pattern = Str.regexp ({|.*|} ^ key ^ {|=\([a-zA-Z0-9_]+\)|}) in + if Str.string_match pattern str 0 then Some (Str.matched_group 1 str) + else None + +(** [find_snd_by_fst fst list] find all seconds members of a list of paris which have + the first member equal to [fst]*) +let rec find_snd_by_fst fst = function + | [] -> [] + | (k, v) :: t when k = fst -> v :: find_snd_by_fst fst t + | _ :: t -> find_snd_by_fst fst t + +(* [find_decl_with_name name entries] finds the [Decl] entry inside the [entries] list which has + an ident that matches with [name]. *) +let rec find_decl_with_name name entries = + match entries with + | [] -> None + | (Parsers.Entry.Decl (_, ident, _, _, _) as e) :: _ + when name = Kernel.Basic.string_of_ident ident -> + Some e + | _ :: t -> find_decl_with_name name t + +(** [construct_fp_attr recnos names] construct a [fp_pragma_attrs list] by + coupling togheter names and attributes *) +let rec construct_fp_attr recnos = function + | [] -> [] + | name :: tail -> ( + let recno_list = find_snd_by_fst name recnos in + match recno_list with + | [ recno ] -> + (name, int_of_string recno) :: construct_fp_attr recnos tail + | [] -> + failwith_log + ("fixpoint pragma without RECNO attr for name '" ^ name ^ "'") + | _ -> + failwith_log + ("fixpoint pragma with too many RECNO attributes for name '" ^ name + ^ "'")) + +(** [parse_fp_attrs pragma_str] return a [fp_pragma_attrs] with the attributes + found parsing [pragma_str] *) +let parse_fp_attrs pragma_str = + let splitted = String.split_on_char ' ' pragma_str in + let names_opt = List.map (parse_attr_by_key name_attr) splitted in + let names = List.map Option.get (List.filter Option.is_some names_opt) in + let recnos = filter_matching splitted recno_regex in + let recnos' = + List.map + (fun r -> + let _, n, v = parse_attr_named r in + (n, v)) + recnos + in + construct_fp_attr recnos' names + +let rec construct_fixpoint_pragma attributes entries = + (* Given a list of strings find the one matching REF attribute regex and return the value*) + let rec find_ref_attr list = + match list with + | [] -> None + | h :: t -> + let ref_val_opt = parse_attr_by_key ref_attr h in + if Option.is_some ref_val_opt then ref_val_opt else find_ref_attr t + in + (* Find the entry of the list which holds the body of the fixpoint referenced by the name *) + let rec find_body_entry name entries = + match entries with + | [] -> None + | Parsers.Entry.Pragma (_, str) :: e :: t + when pragma_name str = fixpoint_body_pragma -> ( + let splitted = String.split_on_char ' ' str in + let ref_opt = find_ref_attr splitted in + match ref_opt with + | Some ref_val when ref_val = name -> Some e + | _ -> find_body_entry name (e :: t)) + | _ :: t -> find_body_entry name t + in + match attributes with + | [] -> [] + | ((name, _) as attr) :: tail -> ( + let typ = find_decl_with_name name entries in + let body = find_body_entry name entries in + match (typ, body) with + | Some t, Some b -> (t, b, attr) :: construct_fixpoint_pragma tail entries + | None, _ -> failwith_log "Missing type while constructing fixpoint" + | _, None -> failwith_log "Missing body while constructing fixpoint") + +let parse_fixpoint_pragma pragma_str entries = + let attributes = parse_fp_attrs pragma_str in + FixpointPragma (construct_fixpoint_pragma attributes entries) + +let rec construct_ind_attr cons = function + | [] -> [] + | name :: tail -> + let cons_values = find_snd_by_fst name cons in + (name, cons_values) :: construct_ind_attr cons tail + +let parse_ind_attrs pragma_str = + let splitted = String.split_on_char ' ' pragma_str in + let names_opt = List.map (parse_attr_by_key name_attr) splitted in + let names = List.map Option.get (List.filter Option.is_some names_opt) in + let cons = filter_matching splitted cons_regex in + let cons = + List.map + (fun c -> + let _, n, v = parse_attr_named c in + (n, v)) + cons + in + construct_ind_attr cons names + +let construct_ind_pragma leftno attributes entries = + let find_type_entry name entries = + match find_decl_with_name name entries with + | Some typ -> typ + | None -> + failwith_log + ("Unable to find type entry for inductive with name '" ^ name ^ "'") + in + let find_cons_entry entries cons = + match find_decl_with_name cons entries with + | Some e -> e + | None -> + failwith_log + ("Unable to find constructor entry for constructor '" ^ cons ^ "'") + in + let rec construct_ind_types attributes entries = + match attributes with + | [] -> [] + | ((name, conss) as att) :: t -> + let type_entry = find_type_entry name entries in + let cons_entries = List.map (find_cons_entry entries) conss in + (type_entry, cons_entries, att) :: construct_ind_types t entries + in + let types = construct_ind_types attributes entries in + (leftno, types) + +let rec construct_match_pragma entries = + match entries with + | Parsers.Entry.Pragma (_, str) + :: (Parsers.Entry.Decl (_, _, _, _, _) as match_const) + :: t + when pragma_name str = match_pragma -> + match_const :: construct_match_pragma t + | _ :: t -> construct_match_pragma t + | [] -> [] + +let parse_inductive_pragma pragma_str entries = + match parse_attr_by_key leftno_attr pragma_str with + | None -> + failwith_log + ("Unable to find 'LEFTNO' attribute in inductive pragma with value: '" + ^ pragma_str ^ "'") + | Some value -> + let leftno = int_of_string value in + let attrs = parse_ind_attrs pragma_str in + construct_ind_pragma leftno attrs entries + +let parse_block name pragma_str entries = + if name = generated_pragma then Some GeneratedPragma + else if name = fixpoint_pragma then + Some (parse_fixpoint_pragma pragma_str entries) + else if name = inductive_pragma then + let match_const = construct_match_pragma entries in + let leftno, types = parse_inductive_pragma pragma_str entries in + Some (InductivePragma (leftno, types, match_const)) + else ( + HLog.message + ("Found uknown pragma block beginning with '" ^ pragma_str ^ "'"); + None) diff --git a/components/dedukti/pragmaParsing.mli b/components/dedukti/pragmaParsing.mli new file mode 100644 index 000000000..cd819d50c --- /dev/null +++ b/components/dedukti/pragmaParsing.mli @@ -0,0 +1,23 @@ +(* name recno *) +type fp_pragma_attrs = string * int + +(* name cons name *) +type ind_pragma_attrs = string * string list + +type export_pragma = + | GeneratedPragma + (* type body attrs *) + | FixpointPragma of + (Parsers.Entry.entry * Parsers.Entry.entry * fp_pragma_attrs) list + (* leftno type constructors attrs match const entry *) + | InductivePragma of + int + * (Parsers.Entry.entry * Parsers.Entry.entry list * ind_pragma_attrs) list + * Parsers.Entry.entry list + +val pragma_name : string -> string + +val parse_block : + string -> string -> Parsers.Entry.entry list -> export_pragma option + +val is_valid_export_pragma : string -> bool diff --git a/components/grafite_engine/grafiteEngine.mli b/components/grafite_engine/grafiteEngine.mli index ae2c9e814..e0dc82eb8 100644 --- a/components/grafite_engine/grafiteEngine.mli +++ b/components/grafite_engine/grafiteEngine.mli @@ -31,3 +31,6 @@ val eval_ast : include_paths:string list -> ?do_heavy_checks:bool -> GrafiteTypes.status -> GrafiteAst.statement disambiguator_input -> GrafiteTypes.status + +val eval_alias : (#GrafiteTypes.status as 'a) -> + GrafiteAst.inclusion_mode * (DisambiguateTypes.domain_item * GrafiteAst.alias_spec) list -> 'a diff --git a/components/ng_extraction/.depend b/components/ng_extraction/.depend index 144c13d55..2f01f5aa8 100644 --- a/components/ng_extraction/.depend +++ b/components/ng_extraction/.depend @@ -30,3 +30,9 @@ ocamlExtraction.cmi : ocamlExtractionTable.cmi ocamlExtractionTable.cmo : miniml.cmo coq.cmi ocamlExtractionTable.cmi ocamlExtractionTable.cmx : miniml.cmx coq.cmx ocamlExtractionTable.cmi ocamlExtractionTable.cmi : miniml.cmo coq.cmi +dedukti.cmo : dedukti.cmi +dedukti.cmx : dedukti.cmi +deduktiPrint.cmo : dedukti.cmi deduktiPrint.cmi +deduktiPrint.cmx : dedukti.cmx deduktiPrint.cmi +deduktiExtraction.cmo : deduktiPrint.cmi dedukti.cmi deduktiExtraction.cmi +deduktiExtraction.cmx : deduktiPrint.cmx dedukti.cmx deduktiExtraction.cmi diff --git a/components/ng_extraction/.depend.opt b/components/ng_extraction/.depend.opt index a527ad0c4..51de5e65f 100644 --- a/components/ng_extraction/.depend.opt +++ b/components/ng_extraction/.depend.opt @@ -59,3 +59,9 @@ ocamlExtractionTable.cmx : \ ocamlExtractionTable.cmi : \ miniml.cmx \ coq.cmi +dedukti.cmo : dedukti.cmi +dedukti.cmx : dedukti.cmi +deduktiPrint.cmo : dedukti.cmi deduktiPrint.cmi +deduktiPrint.cmx : dedukti.cmx deduktiPrint.cmi +deduktiExtraction.cmo : deduktiPrint.cmi dedukti.cmi deduktiExtraction.cmi +deduktiExtraction.cmx : deduktiPrint.cmx dedukti.cmx deduktiExtraction.cmi diff --git a/components/ng_extraction/dedukti.ml b/components/ng_extraction/dedukti.ml new file mode 100644 index 000000000..e96a5b3e8 --- /dev/null +++ b/components/ng_extraction/dedukti.ml @@ -0,0 +1,111 @@ +type comment = string +type var = string +type constname = string +type pragma = string +type modname = string +type const = modname * constname +type sort = Type | Kind + +type term = + | Var of var + | Const of const + | Sort of sort + | Prod of var * term * term + | Lam of var * term * term + | App of term * term + +let prods bs a = List.fold_right (fun (x, b) a -> Prod (x, b, a)) bs a +let lams bs m = List.fold_right (fun (x, b) m -> Lam (x, b, m)) bs m +let apps m ns = List.fold_left (fun m n -> App (m, n)) m ns +let theory_modname = "cic" +let theory_const c args = apps (Const (theory_modname, c)) args +let univ_term s = theory_const "univ" [ s ] +let succ_sort s = theory_const "succ" [ s ] +let lift_term s1 s2 a = theory_const "lift" [ s1; s2; a ] + +let is_sort ty = + match ty with App (Const (_, s), _) when s = "Univ" -> true | _ -> false + +let extract_sort ty = + match ty with App (Const (_, s), t) when s = "Univ" -> t | _ -> assert false + +let rec is_sort_product ty = + match ty with + | App (Const (_, s), _) when s = "Univ" || s = "univ" -> true + | App (App (Const (_, _), _), a) -> is_prod_product a + | _ -> false + +and is_prod_product ty = + match ty with + | App (App (App (App (Const (_, s), _), _), _), Lam (_, _, ty)) + when s = "prod" -> + is_sort_product ty + | _ -> false + +let get_sort_product ty = + match ty with App (App (Const (_, _), s), _) -> s | _ -> assert false + +let extract_type ty = + match ty with App (App (Const (_, _), _), ty) -> ty | _ -> assert false + +let app_bindings m bs = + let translate_var x = + let ty = List.assoc x bs in + if is_sort ty then + let ty' = extract_sort ty in + lift_term ty' ty' (Var x) + else if is_sort_product ty then Var x + else Var x + in + let xs = fst (List.split bs) in + apps m (List.map translate_var xs) + +type pattern = + | PVar of var + | PConst of const + | PLam of var * pattern * pattern + | PApp of pattern * pattern + | PGuard of term + +let plams bs m = List.fold_right (fun (x, b) m -> PLam (x, PGuard b, m)) bs m +let papps m ns = List.fold_left (fun m n -> PApp (m, n)) m ns + +let papp_bindings m bs = + let xs = fst (List.split bs) in + papps m (List.map (fun x -> PVar x) xs) + +type context = (var * term) list + +let app_context m context = + (* Contexts are stored in reverse order. *) + app_bindings m (List.rev context) + +let papp_context m context = + (* Contexts are stored in reverse order. *) + papp_bindings m (List.rev context) + +type command = Require of modname + +type entry = + | StcDeclaration of constname * term + | DefDeclaration of constname * term + | Definition of constname * term option * term + | RewriteRule of context * pattern * term + | Command of command + | Comment of string + | Pragma of pragma + +type signature = entry list + +let dk_keywords = + [ + "Type"; + "defac"; + "defacu"; + "private"; + "require"; + "assert"; + "thm"; + "def"; + "injective"; + ] diff --git a/components/ng_extraction/dedukti.mli b/components/ng_extraction/dedukti.mli new file mode 100644 index 000000000..6aa7d9291 --- /dev/null +++ b/components/ng_extraction/dedukti.mli @@ -0,0 +1,70 @@ +(** Syntax of Dedukti **) + +type comment = string +type var = string +type constname = string +type pragma = string + +type modname = string +(** Names of modules **) + +type const = modname * constname +(** Constants are qualified by the name of the module in which they are defined. **) + +type sort = Type | Kind + +type term = + | Var of var + | Const of const + | Sort of sort + | Prod of var * term * term + | Lam of var * term * term + | App of term * term + +val prods : (var * term) list -> term -> term +(** Shortcuts for n-ary term constructors **) + +val lams : (var * term) list -> term -> term +val apps : term -> term list -> term +val app_bindings : term -> (var * term) list -> term + +(** Left-hand side of rewrite rules **) +type pattern = + | PVar of var + | PConst of const + | PLam of var * pattern * pattern + | PApp of pattern * pattern + | PGuard of term + +val plams : (var * term) list -> pattern -> pattern +(** Shortcuts for n-ary term constructors **) + +val papps : pattern -> pattern list -> pattern +val papp_bindings : pattern -> (var * term) list -> pattern + +type context = (var * term) list +(** Context of rewrite rules + WARNING: Contexts are stored in reverse order. **) + +val app_context : term -> context -> term +(** Shortcut for applying a term to the variables of a context **) + +val papp_context : pattern -> context -> pattern + +(** Commands such as module name declaration, evaluation, ... **) +type command = Require of modname + +type entry = + | StcDeclaration of constname * term + | DefDeclaration of constname * term + | Definition of constname * term option * term + | RewriteRule of context * pattern * term + | Command of command + | Comment of comment + | Pragma of pragma + +type signature = entry list +(** Content of a dedukti file + WARNING: Signatures are stored in reverse order. **) + +val dk_keywords : string list diff --git a/components/ng_extraction/deduktiExtraction.ml b/components/ng_extraction/deduktiExtraction.ml new file mode 100644 index 000000000..f7687f324 --- /dev/null +++ b/components/ng_extraction/deduktiExtraction.ml @@ -0,0 +1,1292 @@ +module U = NUri +module C = NCic +module R = NReference +module P = NCicPp +module D = Dedukti +module F = Format + +let pp ?(ctx = []) fmt term = + Format.fprintf fmt "%s@." ((new P.status)#ppterm ctx [] [] term) + +(* TODO commented for test scopes *) +(* let begin_gen = D.Pragma "BEGIN GENERATED." *) +(* let end_gen = D.Pragma "END GENERATED." *) +let begin_gen = D.Pragma "." +let end_gen = D.Pragma "." + +(**** Utilities ****) + +exception NotImplemented of string + +let not_implemented str = raise (NotImplemented str) + +(* Find a fresh variant of the string [name] such that the function + [avoid] returns false. **) +let fresh avoid name = + let rec try_variant i = + let variant = Format.sprintf "%s%d" name i in + if avoid variant then try_variant (i + 1) else variant + in + if avoid name then try_variant 1 else name + +(**** Global context ****) + +(** Uris with the same base uri are extracted to the same Dedukti module. + To keep track of the mapping, we use a hash table that maps base uris + to module names and another from module names to module signatures. + REMINDER : uris are represented as an abstract datatype while base uris + are reparesented as strings. **) + +(** Map from baseuris to dedukti module names **) +let baseuri_table : (string, D.modname) Hashtbl.t = Hashtbl.create 100 + +(** Map from dedukti module names to module signatures *) +let modules_table : (D.modname, D.signature) Hashtbl.t = Hashtbl.create 100 + +(** Add an entry to the signature of the module. **) +let add_entry modname entry = + let signature = Hashtbl.find modules_table modname in + Hashtbl.replace modules_table modname (entry :: signature) + +(** Escape prefix and slashes in the base uri string. **) +let escape_baseuri baseuri = + (* let len = String.length baseuri in *) + (* Cut out the cic:/ at the start *) + let cic_start = Str.string_match (Str.regexp "cic:/\\(.*\\)") baseuri 0 in + let name = + if not cic_start then + let () = HLog.warn "baseuri %s does not begin with cic:/" in + baseuri + else Str.matched_group 1 baseuri + in + let name = Str.global_replace (Str.regexp "/") "_" name in + name + +(** Generate a fresh Dedukti module name from the given base uri string. **) +let fresh_modname baseuri = + let avoid modname' = Hashtbl.mem modules_table modname' in + fresh avoid (escape_baseuri baseuri) + +let translate_baseuri baseuri = + try Hashtbl.find baseuri_table baseuri + with Not_found -> + let modname' = fresh_modname baseuri in + let () = + Hashtbl.add modules_table modname' + [ D.Comment "This file was automatically generated from Matita." ] + in + let () = Hashtbl.add baseuri_table baseuri modname' in + modname' + +(** A global Matita constant can be uniquely represented by a base uri + (the name of the module) and a name (the name of the constant). + Each pair is mapped to a unique Dedukti constants. + To keep track of the mapping, we use a hash table that maps these pairs + to Dedukti constants and another to represent the set of used constants. **) + +(** Map from references to dedukti constants **) +let reference_table : (string * string, D.const) Hashtbl.t = Hashtbl.create 100 + +(** Set of dedukti constants **) +let constants_table : (D.const, unit) Hashtbl.t = Hashtbl.create 100 + +let escape_keyword name = + if List.mem name D.dk_keywords then name ^ "_" else name + +(** Escape illegal names. **) +let escape_name name = if name = "_" then "__" else escape_keyword name + +let fresh_const (baseuri, name) = + let modname' = translate_baseuri baseuri in + let avoid constname' = Hashtbl.mem constants_table (modname', constname') in + let constname' = fresh avoid (escape_name name) in + (modname', constname') + +let translate_const (baseuri, name) = + try Hashtbl.find reference_table (baseuri, name) + with Not_found -> + let const' = fresh_const (baseuri, name) in + let () = Hashtbl.add constants_table const' () in + let () = Hashtbl.add reference_table (baseuri, name) const' in + const' + +let generate_body_const_name name = Format.sprintf "%s_body" name + +(** Return the name of the body function associated to the fixpoint. **) +let translate_body_const (baseuri, name) = + translate_const (baseuri, generate_body_const_name name) + +let gen_fixpoint_attrs (_, name, recno, _, _) = + Format.sprintf "RECNO:%s=%d" name recno + +let create_fixpoint_pragmas functions = + let attrs_str = List.map gen_fixpoint_attrs functions in + let attrs = String.concat " " attrs_str in + let names_str = + List.map (fun (_, name, _, _, _) -> "NAME=" ^ name) functions + in + let names = String.concat " " names_str in + ( D.Pragma (Format.sprintf "BEGIN FIXPOINT %s %s." names attrs), + D.Pragma "END FIXPOINT." ) + +let create_fixpoint_body_pragma name = + D.Pragma (Format.sprintf "FIXPOINT_BODY REF=%s." name) + +(* A global Matita universe is mapped to a Dedukti constant. + Since the universe constraints can change during the translation, universes + are translated to a separate module. *) + +(** Map from universe names to dedukti constants *) +let universe_table : (string, D.const) Hashtbl.t = Hashtbl.create 100 + +(** Map from dedukti universe constants to lesser dedukti universe constants *) +let constraints_table : (D.const, D.const list) Hashtbl.t = Hashtbl.create 100 + +(** Add a the constraint c1 < c2 between the two dedukti universe constants. **) +let add_constraint c1 c2 = + let constraints = Hashtbl.find constraints_table c2 in + Hashtbl.replace constraints_table c2 (c1 :: constraints) + +let univs_modname = "univs" + +let fresh_univ name = + let modname' = univs_modname in + let avoid constname' = Hashtbl.mem constraints_table (modname', constname') in + let constname' = fresh avoid (escape_name name) in + (modname', constname') + +let translate_univ_uri (_, name) = + try Hashtbl.find universe_table name + with Not_found -> + let const' = fresh_univ name in + let () = Hashtbl.add constraints_table const' [] in + let () = Hashtbl.add universe_table name const' in + const' + +(**** Local context ****) + +(** Not every Matita variable is mapped to a Dedukti variable. Let-in variables + are translated to applied global constants, i.e. Dedukti terms. + We need to keep track of 3 things: + 1. The current Matita local context, needed for type-checking. + 2. The current Dedukti local context, needed for fresh-name generation + and for lambda-lifting. + 3. The mapping between Matita local variables and Dedukti terms, needed + for the translation. **) + +type context = { cic : C.context; dk : D.context; map : D.term list } + +let empty_context = { cic = []; dk = []; map = [] } + +(** Generate a Dedukti variable name from the given name that is fresh in + the given Dedukti context. **) +let fresh_var context name = + let avoid name' = List.mem_assoc name' context in + fresh avoid (escape_name name) + +(**** CIC constants ****) + +let theory_modname = "cic" +let theory_const c args = D.apps (D.Const (theory_modname, c)) args +let nat_type = theory_const "Nat" [] +let zero_nat = theory_const "z" [] +let succ_nat i = theory_const "s" [ i ] +let max_nat i j = theory_const "m" [ i; j ] +let sort_type = theory_const "Sort" [] +let prop_sort = theory_const "prop" [] +let type_sort i = theory_const "type" [ i ] +let succ_sort s = theory_const "succ" [ s ] +let next_sort s = theory_const "next" [ s ] +let rule_sort s1 s2 = theory_const "rule" [ s1; s2 ] +let max_sort s1 s2 = theory_const "max" [ s1; s2 ] +let univ_type s = theory_const "Univ" [ s ] +let term_type s a = theory_const "Term" [ s; a ] +let univ_term s = theory_const "univ" [ s ] +let join_term s1 s2 a b = theory_const "join" [ s1; s2; a; b ] +let cast_term s1 s2 a = theory_const "lift" [ s1; s2; a ] +let prod_term s1 s2 a b = theory_const "prod" [ s1; s2; a; b ] + +type univ = Prop | Type of int + +module UMSet = Set.Make (struct + type t = D.constname * univ + + let compare = compare +end) + +let translated_match = ref UMSet.empty + +module UFSet = Set.Make (struct + type t = D.constname * univ + + let compare = compare +end) + +let translated_filter = ref UFSet.empty + +let pp_univ fmt u = + match u with + | Prop -> Format.fprintf fmt "Prop" + | Type i -> Format.fprintf fmt "Type%d" i + +let string_of_pp pp u = Format.asprintf "%a" pp u + +let term_of_univ u = + let rec type_univ i = + if i = 0 then zero_nat else succ_nat (type_univ (i - 1)) + in + match u with Prop -> prop_sort | Type i -> type_sort (type_univ i) + +let back_to_sort s = + let to_algebra i = + [ + (`Type, U.uri_of_string (Format.sprintf "cic:/matita/pts/Type%d.univ" i)); + ] + in + match s with Prop -> C.Prop | Type i -> C.Type (to_algebra i) + +let univ_of_string u = + let str_cprop = Str.regexp "CProp*" in + let str_type = Str.regexp "Type*" in + let n = int_of_string (Str.last_chars u 1) in + if Str.string_match str_cprop u 0 then if n = 0 then Type 0 else Type n + else if Str.string_match str_type u 0 then Type n + else failwith "I don't know that universe" + +let succ s = match s with Prop -> Type 0 | Type i -> Type (i + 1) + +let max_sort s s' = + match (s, s') with + | Prop, Prop -> Prop + | Type i, Prop | Prop, Type i -> Type i + | Type i, Type j -> Type (max i j) + +let back_to_univ t = + let rec to_nat t = + match t with + | D.Const (_, z) when z = "z" -> 0 + | D.App (D.Const (_, s), t) when s = "s" -> 1 + to_nat t + | _ -> assert false + in + match t with + | D.Const (_, s) when s = "prop" -> Prop + | D.App (D.Const (_, t), s) when t = "type" -> Type (to_nat s) + | _ -> assert false + +let rule_sort s s' = + match (s, s') with + | _, Prop -> Prop + | Prop, _ -> s' + | Type i, Type j -> Type (max i j) + +let rec max_sorts sorts = + match sorts with + | [] -> Type 0 + | [ s ] -> s + | s :: ss -> max_sort s (max_sorts ss) + +(**** Sorts ****) + +let translate_level i = + let univ_algebra, uri = i in + let name = U.name_of_uri uri in + match univ_algebra with + | `Type | `CProp -> univ_of_string name + | `Succ -> succ (univ_of_string name) + +let translate_univ u = + let sorts' = List.map translate_level u in + max_sorts sorts' + +let translate_sort' s = + match s with C.Prop -> Prop | C.Type u -> translate_univ u + +let translate_sort s = + match s with + | C.Prop -> prop_sort + | C.Type u -> term_of_univ (translate_univ u) + +let inductive_registry = Hashtbl.create 81 +let string_of_univ univ = string_of_pp pp_univ univ + +(** Return the name of the match function associated to the inductive type. **) +let translate_match_const (baseuri, name) univ = + let univ' = translate_sort' univ in + let univ_name = string_of_univ univ' in + translate_const (baseuri, Format.sprintf "match_%s_%s" name univ_name) + +(** Return the name of the filter function associated to the inductive type. **) +let translate_filter_const (baseuri, name) univ = + let univ' = translate_sort' univ in + let univ_name = string_of_pp pp_univ univ' in + translate_const (baseuri, Format.sprintf "filter_%s_%s" name univ_name) + +let translate_constraint u1 u2 = + match (u1, u2) with + | [ (`Type, uri1) ], [ (`Type, uri2) ] -> + let baseuri1 = U.baseuri_of_uri uri1 in + let baseuri2 = U.baseuri_of_uri uri2 in + let name1 = U.name_of_uri uri1 in + let name2 = U.name_of_uri uri2 in + let c1' = translate_univ_uri (baseuri1, name1) in + let c2' = translate_univ_uri (baseuri2, name2) in + add_constraint c1' c2' + | _ -> + (* There should be no constraints between other shapes of universes. *) + assert false + +(** Topologically sort and return the universes according to the constraints. **) +let sorted_universes () = + (* Keep track of universes being traversed to avoid cycles. *) + let visiting = ref [] in + (* The resulting list of sorted universes, in reverse order. *) + let sorted = ref [] in + (* Topologically insert the universe in the sorted list. *) + let rec insert univ = + if List.mem univ !sorted then (* Universe has already been added *) + () + else if List.mem univ !visiting then + (* There is a cycle in the constraints *) + failwith "Cycle detected between universes" + else + (* Recursively insert the smaller universes first. *) + let smaller_univs = Hashtbl.find constraints_table univ in + visiting := univ :: !visiting; + List.iter insert smaller_univs; + visiting := List.tl !visiting; + sorted := univ :: !sorted + in + Hashtbl.iter (fun univ _ -> insert univ) constraints_table; + (* Reverse the sorted list to obtain the correct order. *) + List.rev !sorted + +(** Compute the signature of the universe module from the stored constraints. **) +let univs_signature () = + let signature = + [ D.Comment "This file was automatically generated from Matita." ] + in + let sorted_univs = sorted_universes () in + let add_entry signature u = + let vs = Hashtbl.find constraints_table u in + let vs = List.map (fun (_, n) -> succ (univ_of_string n)) vs in + let mvs = term_of_univ (max_sorts vs) in + D.Definition (snd u, Some sort_type, mvs) :: signature + in + List.fold_left add_entry signature sorted_univs + +(**** Terms and types ****) + +let of_term t = + match t with + | D.App (D.App (D.Const (_, t), s), a) when t = "Term" -> (back_to_univ s, a) + | D.App (D.Const (_, t), a) when t = "Univ" -> + let s = back_to_univ a in + (succ s, univ_term a) + | _ -> + Format.printf "debug term:%a@." DeduktiPrint.print_term t; + assert false + +let rec to_cic_prods l a = + match l with + | [] -> a + | (x, t) :: l -> + let s', a = of_term a in + let s, t' = of_term t in + let ss = term_of_univ s in + let ss' = term_of_univ s' in + to_cic_prods l + (term_type + (term_of_univ (rule_sort s s')) + (prod_term ss ss' t' (D.Lam (x, t, a)))) + +(** The translation of terms and types is parameterized by: + - The baseuri of the current Matita object, used to compute the name + of the current context. + - A status object, needed for typechecking and evaluation. **) +module type INFO = sig + val baseuri : string + val status : C.status +end + +module Translation (I : INFO) = struct + let translate_reference reference = + let (R.Ref (uri, _)) = reference in + let baseuri = U.baseuri_of_uri uri in + let name = NCicPp.r2s I.status true reference in + translate_const (baseuri, name) + + let lift i term = NCicSubstitution.lift I.status i term + let subst term1 term2 = NCicSubstitution.subst I.status term1 term2 + + let are_convertible context term1 term2 = + NCicReduction.are_convertible I.status ~metasenv:[] ~subst:[] + ~test_eq_only:true context.cic term1 term2 + + let whd context term = NCicReduction.whd I.status ~subst:[] context.cic term + + let split_prods context n term = + NCicReduction.split_prods I.status ~subst:[] context.cic n term + + let type_of context term = + NCicTypeChecker.typeof I.status ~metasenv:[] ~subst:[] context.cic term + + let rec split_ind_arity context bindings arity = + match whd context arity with + | C.Prod (x, a, b) -> + let context = { context with cic = (x, C.Decl a) :: context.cic } in + split_ind_arity context ((x, a) :: bindings) b + | C.Sort s -> (List.rev bindings, s) + | _ -> failwith "not an inductive arity" + + let rec split_cons_type context bindings ty = + match whd context ty with + | C.Prod (y, b, c) -> + let context = { context with cic = (y, C.Decl b) :: context.cic } in + split_cons_type context ((y, b) :: bindings) c + | C.Appl (C.Const (R.Ref (_, R.Ind _)) :: args) -> (List.rev bindings, args) + | C.Const (R.Ref (_, R.Ind _)) -> (List.rev bindings, []) + | _ -> failwith "invalid constructor type" + + let get_inductive_type uri = + let _, _, inductives, _, i = + NCicEnvironment.get_checked_indtys I.status uri + in + List.nth inductives i + + let sort_of context term = + match term with + | C.Sort C.Prop -> C.Type [] + | C.Sort (C.Type t) -> + let u = translate_univ t in + back_to_sort (succ u) + | _ -> ( + match whd context (type_of context term) with + | C.Sort s -> s + | _ -> failwith "not a sort") + + (** Split list into left and right parameters or arguments **) + let rec split_list i left right = + match (i, right) with + | 0, _ -> (List.rev left, right) + | n, x :: right when n > 0 -> split_list (i - 1) (x :: left) right + | _ -> raise (Invalid_argument "split_list") + + let rec is_sort b = + match b with C.Sort _ -> true | C.Prod (_, _, b) -> is_sort b | _ -> false + + and translate_term context term = + match term with + | C.Rel i -> List.nth context.map (i - 1) + | C.Meta _ -> + (* There should be no unresolved meta-variables at this point. *) + assert false + | C.Appl [] -> + (* There should be no empty list in an application. *) + assert false + | C.Appl (m :: ns) -> + let a = type_of context m in + let m' = translate_term context m in + let ns' = translate_args context ns a in + D.apps m' ns' + | C.Prod (x, a, b) -> + let s1 = sort_of context a in + let s1' = translate_sort s1 in + let a' = translate_term context a in + let a'' = translate_type context a in + let x' = fresh_var context.dk x in + let context_x = + { + cic = (x, C.Decl a) :: context.cic; + dk = (x', a'') :: context.dk; + map = D.Var x' :: context.map; + } + in + let s2 = sort_of context_x b in + let s2' = translate_sort s2 in + let b' = translate_term context_x b in + prod_term s1' s2' a' (D.Lam (x', a'', b')) + | C.Lambda (x, a, m) -> + let a'' = translate_type context a in + let x' = fresh_var context.dk x in + let context_x = + { + cic = (x, C.Decl a) :: context.cic; + dk = (x', a'') :: context.dk; + map = D.Var x' :: context.map; + } + in + let m' = translate_term context_x m in + D.Lam (x', a'', m') + | C.LetIn (x, a, n, m) -> + let a' = translate_type context a in + let n' = translate_term context n in + let c' = fresh_const (I.baseuri, Format.sprintf "let_%s" x) in + let () = Hashtbl.add constants_table c' () in + let lifted_a' = to_cic_prods context.dk a' in + let lifted_n' = D.lams (List.rev context.dk) n' in + let () = + add_entry (fst c') (D.Definition (snd c', Some lifted_a', lifted_n')) + in + let applied_c' = D.app_context (D.Const c') context.dk in + let context_x = + { + context with + cic = (x, C.Def (n, a)) :: context.cic; + map = applied_c' :: context.map; + } + in + translate_term context_x m + | C.Const reference -> + let const' = translate_reference reference in + D.Const const' + | C.Sort s -> + let s' = translate_sort s in + univ_term s' + | C.Implicit _ -> + (* There should be no implicits at this point. *) + assert false + | C.Match + ( (R.Ref (uri, R.Ind (_, _, leftno)) as ind_ref), + return_type, + matched, + cases ) -> + let ind_baseuri = U.baseuri_of_uri uri in + let ind_name = U.name_of_uri uri in + let ind_args = + match whd context (type_of context matched) with + | C.Appl (C.Const (R.Ref (_, R.Ind _)) :: args) -> args + | C.Const (R.Ref (_, R.Ind _)) -> [] + | _ -> failwith "invalid matched type" + in + (* Need the type of the parameters because the arguments might have + a strict subtype. *) + let _, _, ind_arity, _ = get_inductive_type ind_ref in + let return_sort = + match + snd (split_prods context (-1) (type_of context return_type)) + with + | C.Sort s -> s + | _ -> failwith "invalid return type" + in + let match_const' = + translate_match_const (ind_baseuri, ind_name) return_sort + in + let return_type' = translate_term context return_type in + let cases' = List.map (translate_term context) cases in + let ind_args' = translate_args context ind_args ind_arity in + let left_ind_args', right_ind_args' = split_list leftno [] ind_args' in + let matched' = translate_term context matched in + D.apps (D.Const match_const') + (left_ind_args' @ [ return_type' ] @ cases' @ right_ind_args' + @ [ matched' ]) + | C.Match _ -> failwith "invalid match" + + and translate_type context ty = + match ty with + | C.Prod (x, a, b) -> + let a'' = translate_type context a in + let x' = fresh_var context.dk x in + let context_x = + { + cic = (x, C.Decl a) :: context.cic; + dk = (x', a'') :: context.dk; + map = D.Var x' :: context.map; + } + in + let b'' = translate_type context_x b in + to_cic_prods [ (x', a'') ] b'' + | C.Sort s -> + let s' = translate_sort s in + univ_type s' + | _ -> + let s = sort_of context ty in + let s' = translate_sort s in + let ty' = translate_term context ty in + term_type s' ty' + + (** Translate a term according to the given type. If the type does not + correspond to the minimal type of the term, a coercion is added. **) + and translate_term_as context term ty = + let minimal_ty = type_of context term in + if are_convertible context minimal_ty ty then + match whd context ty with + | C.Sort C.Prop -> translate_term context term + | C.Sort s -> + let s' = translate_sort s in + let term' = translate_term context term in + cast_term s' s' term' + | _ -> translate_term context term + else translate_cast context term ty + + (** Add a coercion to life a term to the given type. **) + and translate_cast context term ty = + let apply m n = + match m with C.Appl ms -> C.Appl (ms @ [ n ]) | _ -> C.Appl [ m; n ] + in + match whd context ty with + | C.Prod (x, a, b) -> + let a'' = translate_type context a in + let x' = fresh_var context.dk x in + let context_x = + { + cic = (x, C.Decl a) :: context.cic; + dk = (x', a'') :: context.dk; + map = D.Var x' :: context.map; + } + in + let mx = apply (lift 1 term) (C.Rel 1) in + let mx' = translate_cast context_x mx b in + D.Lam (x', a'', mx') + | C.Sort s2 -> + let s1 = sort_of context term in + let s1' = translate_sort s1 in + let s2' = translate_sort s2 in + let term' = translate_term context term in + cast_term s1' s2' term' + | _ -> assert false + + (** Translate the arguments of an application according to the type + of the applied function. **) + and translate_args context terms ty = + match terms with + | [] -> [] + | n :: ns -> + let a, b = + match whd context ty with + | C.Prod (_, a, b) -> (a, b) + | _ -> failwith "Left term of an application should have product type" + in + let n' = translate_term_as context n a in + let ns' = translate_args context ns (subst n b) in + n' :: ns' + + let translate_binding (context, (x, a)) : context * (D.var * D.term) = + let a'' = translate_type context a in + let x' = fresh_var context.dk x in + let context_x = + { + cic = (x, C.Decl a) :: context.cic; + dk = (x', a'') :: context.dk; + map = D.Var x' :: context.map; + } + in + (context_x, (x', a'')) + + let rec translate_bindings context (bindings : (string * C.term) list) + translated : context * (D.var * D.term) list = + match bindings with + | (x, a) :: bindings -> + let context_x, (x', a'') = translate_binding (context, (x, a)) in + translate_bindings context_x bindings ((x', a'') :: translated) + | [] -> (context, List.rev translated) + + let translate_declaration name ty = + Format.eprintf "%s@." name; + let const' = translate_const (I.baseuri, name) in + let ty' = translate_type empty_context ty in + add_entry (fst const') (D.StcDeclaration (snd const', ty')) + + let translate_definition name ty body = + Format.eprintf "%s@." name; + let ty' = translate_type empty_context ty in + let body' = translate_term empty_context body in + let const' = translate_const (I.baseuri, name) in + add_entry (fst const') (D.Definition (snd const', Some ty', body')) + + let translate_constructor _ (_, name, ty) = translate_declaration name ty + + (** Translate the match elimination scheme for the given inductive type. + + For the inductive type + + ind : p_1 : C_1 -> ... -> p_k : C_k -> x_1 : A_1 -> ... -> x_n : A_n -> ind_sort + c_1 : p_1 : C_1 -> ... -> p_k : C_k -> y_1_1 : B_1_1 -> ... -> y_1_n1 : B_1_n1 -> I p_1 ... p_k M_1_1 ... M_1_n + ... + c_m : p_1 : C_1 -> ... -> p_k : C_k -> y_m_1 : B_m_1 -> ... -> y_m_nm : B_m_nm -> I p_1 ... p_k M_m_1 ... M_m_n + + the match scheme looks is + + match_ind : + p_1 : ||C_1|| -> ... -> p_k : ||C_k|| -> + return_sort : Sort -> + return_type : ( + x_1 : ||A_1|| -> ... -> x_n : ||A_n|| -> + T ind_sort (ind p_1 ... p_k x_1 ... x_n) -> U return_sort) -> + case_1 : ( + y_1_1 : ||B_1_1|| -> ... -> y_1_n1 : ||B_1_n1|| -> + T return_sort (return_type |M_1_1| ... |M_1_n| (c_1 p_1 ... p_k y_1_1 ... y_1_n1))) -> + ... + case_m : ( + y_m_1 : ||B_m_1|| -> ... -> y_m_nm : ||B_m_nm|| -> + T return_sort (return_type |M_m_1| ... |M_m_n| (c_m p_1 ... p_k y_1_1 y_m_1 ... y_m_nm))) -> + x_1 : ||A_1|| -> ... -> x_n : ||A_n|| -> + z : T ind_sort (ind p_1 ... p_k x_1 ... x_n) -> + T return_sort (return_type x_1 ... x_n z) + + with the rewrite rules + + [...] match_ind p_1 ... p_k y_1_1 {return_sort} return_type + case_1 ... case_m {|M_1_1|} ... {|M_1_n1|} + (c_1 p_1 ... p_k y_1_1 y_1_1 ... y_1_n1) --> + case_1 y_1_1 ... y_1_n1 + ... + [...] match_ind p_1 ... p_k y_1_1 {return_sort} return_type + case_1 ... case_m {|M_m_1|} ... {|M_m_nm|} + (c_m p_1 ... p_k y_1_1 y_m_1 ... y_m_nm) --> + case_m y_m_1 ... y_m_nm + **) + + let create_match_pragmas sort = + let univ_str = string_of_univ (translate_sort' sort) in + (D.Pragma ("BEGIN MATCH SORT=" ^ univ_str ^ "."), D.Pragma "END MATCH.") + + let translate_match_scheme leftno ind_name arity constructors sort = + (* Format.printf "size match set: %d@." (UMSet.cardinal !translated_match); *) + (* Format.printf "size match: %s %a@." ind_name pp_univ sort'; *) + (* Extract (p_i : C_i), (x_i : A_i), s_ind, (y_i_j : B_i_j), and M_i_j *) + let context = empty_context in + let left_ind_params, right_ind_params, ind_sort = + let ind_params, ind_sort = split_ind_arity context [] arity in + let left_params, right_ind_params = split_list leftno [] ind_params in + (left_params, right_ind_params, ind_sort) + in + let cons_info (_, cons_name, cons_type) = + let cons_params, cons_args = split_cons_type context [] cons_type in + let _, right_cons_params = split_list leftno [] cons_params in + let _, right_cons_args = split_list leftno [] cons_args in + (cons_name, right_cons_params, right_cons_args) + in + let cons_infos = List.map cons_info constructors in + (* Translate the name of the inductive type and its constructors. *) + let ind_const' = translate_const (I.baseuri, ind_name) in + let ind_sort' = translate_sort ind_sort in + let ind' = D.Const ind_const' in + (* Translate left parameters *) + let context, left_params' = translate_bindings context left_ind_params [] in + (* Translate match_ind *) + let match_const' = translate_match_const (I.baseuri, ind_name) sort in + (* Translate return_type *) + let return_type_name' = fresh_var context.dk "return_type" in + let sort' = translate_sort sort in + let _, right_ind_params' = translate_bindings context right_ind_params [] in + let quant_var_name' = fresh_var context.dk "z" in + let quant_var_type' = + term_type ind_sort' + (D.app_bindings ind' (left_params' @ right_ind_params')) + in + let return_type_type' = + to_cic_prods + (List.rev (right_ind_params' @ [ (quant_var_name', quant_var_type') ])) + (univ_type sort') + in + let context = + { context with dk = (return_type_name', return_type_type') :: context.dk } + in + let return_type' = D.Var return_type_name' in + (* Translate cases *) + let translate_case (cons_name, right_cons_params, right_cons_args) = + let cons_const' = translate_const (I.baseuri, cons_name) in + let cons' = D.Const cons_const' in + let case_name' = + fresh_var context.dk (Format.sprintf "case_%s" cons_name) + in + let context, right_cons_params' = + translate_bindings context right_cons_params [] + in + let right_cons_args' = + List.map (translate_term context) right_cons_args + in + let case_type' = + to_cic_prods + (List.rev right_cons_params') + (term_type sort' + (D.apps return_type' + (right_cons_args' + @ [ D.app_bindings cons' (left_params' @ right_cons_params') ]))) + in + (case_name', case_type') + in + let rec translate_cases context cons_infos translated = + match cons_infos with + | cons_info :: cons_infos -> + let case_name', case_type' = translate_case cons_info in + let context = + { context with dk = (case_name', case_type') :: context.dk } + in + let case' = D.Var case_name' in + translate_cases context cons_infos (case' :: translated) + | [] -> (context, List.rev translated) + in + let context, cases' = translate_cases context cons_infos [] in + (* Context common to the declaration and the rewrite rules *) + let common_context = context in + (* Translate conclusion *) + let context, right_ind_params' = + translate_bindings context right_ind_params [] + in + let quant_var_name' = fresh_var context.dk "z" in + let quant_var_type' = + term_type ind_sort' + (D.app_bindings ind' (left_params' @ right_ind_params')) + in + (* let context = { + context with + dk = (quant_var_name', quant_var_type') :: context.dk; + } in *) + let quant_var' = D.Var quant_var_name' in + let conclusion = + to_cic_prods + (List.rev (right_ind_params' @ [ (quant_var_name', quant_var_type') ])) + (term_type sort' + (D.App (D.app_bindings return_type' right_ind_params', quant_var'))) + in + (* Declare the match function *) + let match_type' = to_cic_prods common_context.dk conclusion in + let begin_pragma, end_pragma = create_match_pragmas sort in + add_entry (fst match_const') begin_pragma; + add_entry (fst match_const') + (D.DefDeclaration (snd match_const', match_type')); + (* Rewrite rules of the match function *) + let match_ind' = D.PConst match_const' in + let translate_rule i (cons_name, right_cons_params, right_cons_args) = + let cons_const' = translate_const (I.baseuri, cons_name) in + let cons' = D.PConst cons_const' in + let context = common_context in + let context, right_cons_params' = + translate_bindings context right_cons_params [] + in + let right_cons_args' = + List.map (translate_term context) right_cons_args + in + let left_pattern' = + D.papps + (D.papp_context match_ind' common_context.dk) + (List.map (fun m -> D.PGuard m) right_cons_args' + @ [ D.papp_bindings cons' (left_params' @ right_cons_params') ]) + in + let case' = List.nth cases' i in + let right_term' = D.app_bindings case' right_cons_params' in + add_entry (fst match_const') + (D.RewriteRule (context.dk, left_pattern', right_term')) + in + List.iteri translate_rule cons_infos; + add_entry (fst match_const') end_pragma + + (** A filter is similar to a match in that it blocks the application of + a function until a constructor is passed as an argument. It does not + reduce on variables for example. However only one uniform case + is given for all the constructors + + For the inductive type + + ind : x_1 : A_1 -> ... -> x_n : A_n -> ind_sort + c_1 : y_1_1 : B_1_1 -> ... -> y_1_n1 : B_1_n1 -> I p_1 ... p_k M_1_1 ... M_1_n + ... + c_m : y_m_1 : B_m_1 -> ... -> y_m_nm : B_m_nm -> I p_1 ... p_k M_m_1 ... M_m_n + + the filter scheme is + + filter_ind : + x_1 : ||A_1|| -> ... -> x_n : ||A_n|| -> + return_sort : Sort -> + return_type : (T ind_sort (ind x_1 ... x_n) -> U return_sort) -> + return : ( + z : T ind_sort (ind x_1 ... x_n) -> + T return_sort (return_type x_1 ... x_n z)) -> + z : T ind_sort (ind x_1 ... x_n) -> + T return_sort (return_type x_1 ... x_n z) + + with the rewrite rules + + [y_1_1 : ||B_1_1||, ..., y_1_n1 : ||B_1_n1||] + filter_ind {|M_1_1|} ... {|M_1_n|} return_sort return_type return (c_1 y_1_1 ... y_1_n1) --> + return (c_1 y_1_1 ... y_1_n1) + [...] + filter_ind {|M_m_1|} ... {|M_m_n|} return_sort return_type return (c_m y_m_1 ... y_m_nm) --> + return (c_m y_m_1 ... y_m_nm) + **) + + let translate_filter_scheme _ ind_name arity constructors sort = + let sort' = translate_sort' sort in + let context = empty_context in + let ind_params, ind_sort = split_ind_arity context [] arity in + let cons_info (_, cons_name, cons_type) = + let cons_params, cons_args = split_cons_type context [] cons_type in + (cons_name, cons_params, cons_args) + in + let cons_infos = List.map cons_info constructors in + (* Translate the name of the inductive type and its constructors. *) + let ind_const' = translate_const (I.baseuri, ind_name) in + let ind_sort' = translate_sort ind_sort in + let ind' = D.Const ind_const' in + (* Translate inductive parameters *) + let context, ind_params' = translate_bindings context ind_params [] in + (* Translate filter_ind *) + let filter_const' = translate_filter_const (I.baseuri, ind_name) in + (* Translate return_type *) + let return_type_name' = fresh_var context.dk "return_type" in + let quant_var_name' = fresh_var context.dk "z" in + let quant_var_type' = + term_type ind_sort' (D.app_bindings ind' ind_params') + in + let return_type_type' = + to_cic_prods + [ (quant_var_name', quant_var_type') ] + (univ_type (term_of_univ sort')) + in + let context = + { context with dk = (return_type_name', return_type_type') :: context.dk } + in + let return_type' = D.Var return_type_name' in + (* Translate return *) + let return_term_name' = fresh_var context.dk "return" in + let quant_var_name' = fresh_var context.dk "z" in + let quant_var_type' = + term_type ind_sort' (D.app_bindings ind' ind_params') + in + let return_term_type' = + to_cic_prods + [ (quant_var_name', quant_var_type') ] + (term_type (term_of_univ sort') + (D.App (return_type', D.Var quant_var_name'))) + in + let context = + { context with dk = (return_term_name', return_term_type') :: context.dk } + in + (* let return_term' = D.Var return_term_name' in *) + let quant_var_name' = fresh_var context.dk "z" in + let quant_var_type' = + term_type ind_sort' (D.app_bindings ind' ind_params') + in + let conclusion' = + to_cic_prods + [ (quant_var_name', quant_var_type') ] + (term_type (term_of_univ sort') + (D.App (return_type', D.Var quant_var_name'))) + in + (* Declare the filter function *) + let filter_type' = to_cic_prods context.dk conclusion' in + add_entry + (fst @@ filter_const' sort) + (D.DefDeclaration (snd @@ filter_const' sort, filter_type')); + (* Rewrite rules of the match function *) + let filter_ind' = D.PConst (filter_const' sort) in + let translate_rule _ (cons_name, cons_params, cons_args) sort = + let cons_const' = translate_const (I.baseuri, cons_name) in + let context = empty_context in + let context, cons_params' = translate_bindings context cons_params [] in + let cons_args' = List.map (translate_term context) cons_args in + (* Translate return_type *) + let return_type_name' = fresh_var context.dk "return_type" in + let quant_var_name' = fresh_var context.dk "z" in + let quant_var_type' = term_type ind_sort' (D.apps ind' cons_args') in + let return_type_type' = + to_cic_prods + [ (quant_var_name', quant_var_type') ] + (univ_type ind_sort') + in + let context = + { + context with + dk = (return_type_name', return_type_type') :: context.dk; + } + in + let return_type' = D.Var return_type_name' in + (* Translate return *) + let return_term_name' = fresh_var context.dk "return" in + let quant_var_name' = fresh_var context.dk "z" in + let quant_var_type' = term_type ind_sort' (D.apps ind' cons_args') in + let return_term_type' = + to_cic_prods + [ (quant_var_name', quant_var_type') ] + (term_type ind_sort' (D.App (return_type', D.Var quant_var_name'))) + in + let context = + { + context with + dk = (return_term_name', return_term_type') :: context.dk; + } + in + let return_term' = D.Var return_term_name' in + let left_pattern' = + D.papps filter_ind' + (List.map (fun m -> D.PGuard m) cons_args' + @ [ + D.PVar return_type_name'; + D.PVar return_term_name'; + D.papp_bindings (D.PConst cons_const') cons_params'; + ]) + in + let right_term' = + D.App (return_term', D.app_bindings (D.Const cons_const') cons_params') + in + add_entry + (fst @@ filter_const' sort) + (D.RewriteRule (context.dk, left_pattern', right_term')) + in + List.iteri (fun i x -> translate_rule i x sort) cons_infos + + let gen_inductive_attrs (_, name, _, constructors) = + let constr_names = + List.map + (fun (_, cname, _) -> Format.sprintf "CONS:%s=%s" name cname) + constructors + in + let constr = String.concat " " constr_names in + Format.sprintf "NAME=%s %s" name constr + + let create_inductive_pragmas leftno types = + let leftno = Format.sprintf "LEFTNO=%d" leftno in + let attrs = List.map gen_inductive_attrs types in + let attrs' = String.concat " " attrs in + ( D.Pragma (Format.sprintf "BEGIN INDUCTIVE %s %s." leftno attrs'), + D.Pragma "END INDUCTIVE." ) + + let translate_inductive leftno ((_, name, ty, constructors) as ind) = + (* Format.printf "translate inductive: %s@." name; *) + Hashtbl.add inductive_registry name (leftno, ind); + translate_declaration name ty; + List.iter (translate_constructor leftno) constructors; + let univs = + let rec types n = if n = 0 then [ Type 0 ] else Type n :: types (n - 1) in + [ Prop ] @ types 5 + in + let univs = List.map back_to_sort univs in + List.iter (translate_match_scheme leftno name ty constructors) univs; + List.iter (translate_filter_scheme leftno name ty constructors) univs + + (* translate_match_scheme leftno name ty constructors; + translate_filter_scheme leftno name ty constructors *) + + let translate_inductives leftno types = + List.iter (translate_inductive leftno) types + + let get_inductive_arguments context ty = + match whd context ty with + | C.Appl (C.Const (R.Ref (uri, R.Ind _)) :: args) -> (uri, args) + | C.Const (R.Ref (uri, R.Ind _)) -> (uri, []) + | _ -> failwith "not an inductive type" + + (** Translate one recursive function definition. + + For the inductive type + + ind : x_1 : A_1 -> ... -> x_n : A_n -> sort_ind. + ... + + the recursive function + + f : w_1 : D_1 -> ... -> w_k : D_k -> z : ind M_1 ... M_n -> R = M. + + where R : s is translated as + + f : w_1 : ||D_1|| -> ... -> w_k : ||D_k|| -> z : ||ind M_1 ... M_n|| -> ||R||. + f_body : w_1 : ||D_1|| -> ... -> w_k : ||D_k|| -> z : ||ind M_1 ... M_n|| -> ||R||. + + with the rewrite rules + + [w_1 : ||D_1||, ..., w_k : ||D_k||, z : ||ind M_1 ... M_n||] + f w_1 ... w_k z --> filter_ind |M_1| ... |M_n| |s| (z : ||ind M_1 ... M_n|| => |R|) (f_body w_1 ... w_k) z. + [w_1 : ||D_1||, ..., w_k : ||D_k||, z : ||ind M_1 ... M_n||] + f_body w_1 ... w_k z --> |M|. + **) + let translate_fixpoint (_, name, recno, ty, body) = + (* Format.printf "Dedukti Fixpoint: %s@." name; + Format.printf "Body: %s@." (new P.status#ppterm [] [] [] body); *) + let rec split_fixpoint recno params ty body = + match (ty, body) with + | C.Prod (x, a, b), C.Lambda (_, _, m) when recno = 0 -> + (List.rev params, (x, a), b, m) + | C.Prod (x, a, b), C.Lambda (_, _, m) when recno > 0 -> + split_fixpoint (recno - 1) ((x, a) :: params) b m + | _ -> failwith "invalid fixpoint" + in + let context = empty_context in + let params, rec_param, return_type, return = + split_fixpoint recno [] ty body + in + let ind_uri, ind_args = get_inductive_arguments context (snd rec_param) in + let fun_const' = translate_const (I.baseuri, name) in + (* Format.printf "new name: %s@." (snd fun_const'); *) + let fun_body' = translate_body_const (I.baseuri, name) in + let filter_ind' = + translate_filter_const (U.baseuri_of_uri ind_uri, U.name_of_uri ind_uri) + in + let context, params' = translate_bindings context params [] in + let ind_args' = List.map (translate_term context) ind_args in + let context, rec_param' = translate_binding (context, rec_param) in + let return_sort = sort_of context return_type in + let return_type' = translate_term context return_type in + let return_type'' = translate_type context return_type in + let fun_const_type'' = + to_cic_prods (List.rev (params' @ [ rec_param' ])) return_type'' + in + let () = + add_entry (fst fun_const') + (D.DefDeclaration (snd fun_const', fun_const_type'')) + in + let fun_body_type'' = + to_cic_prods (List.rev (params' @ [ rec_param' ])) return_type'' + in + let () = + add_entry (fst fun_body') + (D.DefDeclaration (snd fun_body', fun_body_type'')) + in + (* Must translate return AFTER declaring the functions otherwise some + let definitions referring to the functions inside the body will + be lifted before the function declarations, and therefore not + typecheck. *) + let return' = translate_term context return in + let left_fun_pattern' = + D.papp_bindings (D.PConst fun_const') (params' @ [ rec_param' ]) + in + let right_fun_term' = + D.apps + (D.Const (filter_ind' return_sort)) + (ind_args' + @ [ + D.Lam (fst rec_param', snd rec_param', return_type'); + D.app_bindings (D.Const fun_body') params'; + D.Var (fst rec_param'); + ]) + in + let () = + add_entry (fst fun_const') + (D.RewriteRule (context.dk, left_fun_pattern', right_fun_term')) + in + let left_body_pattern' = + D.papp_bindings (D.PConst fun_body') (params' @ [ rec_param' ]) + in + let right_body_term' = return' in + let () = + add_entry (fst fun_body') (create_fixpoint_body_pragma (snd fun_const')) + in + let () = + add_entry (fst fun_body') + (D.RewriteRule (context.dk, left_body_pattern', right_body_term')) + in + () + + let translate_fixpoints funs = List.iter translate_fixpoint funs + + (** Translate a Matita object. There are 4 kinds of objects: + - Constant declarations (i.e. axioms) + - Constant definitions + - Fixpoints definitions (i.e. top-level recursive functions) + - Inductive type definitions **) + let translate_obj_kind obj_kind = + let modname = translate_baseuri I.baseuri in + match obj_kind with + | C.Constant (_, name, None, ty, (att, _, _)) when att = `Generated -> + add_entry modname begin_gen; + translate_declaration name ty; + add_entry modname end_gen + | C.Constant (_, name, None, ty, _) -> + HLog.debug + (Format.sprintf "Dedukti: Translating constant declaration %s" name); + (* The relevance argument is irrelevant for our purposes (no pun intended). *) + translate_declaration name ty + | C.Constant (_, name, Some body, ty, _) -> + HLog.debug + (Format.sprintf "Dedukti: Translating constant definition %s" name); + (* Hack for prop irrelevance *) + let problematic = + [] + (* "lemmaK"; "eq_sigma_true"; "Vector_eq"; "vec_expand"; "vector_nil"; + "change_vec_cons_tail"; "pmap_vec_cons"; "pmap_change"; + "while_trans_false"; "sem_obj_to_cfg"; "sem_cfg_to_obj"; *) + (* "le_fact_10"; *) + in + if List.mem name problematic then translate_declaration name ty + else translate_definition name ty body + | C.Fixpoint (is_recursive, funs, (attr, _, _)) when attr = `Generated -> + HLog.debug (Format.sprintf "Dedukti: Translating fixpoint definitions"); + if not is_recursive then not_implemented "co-recursive fixpoint"; + add_entry modname begin_gen; + translate_fixpoints funs; + add_entry modname end_gen + | C.Fixpoint (is_recursive, funs, _) -> + HLog.debug (Format.sprintf "Dedukti: Translating fixpoint definitions"); + (* The boolean [is_recursive] indicates if the functions are recursive + (when true) or co-recursive (when false). + The [f_attr] argument is not needed by the kernel. *) + if not is_recursive then not_implemented "co-recursive fixpoint"; + let begin_pragma, end_pragma = create_fixpoint_pragmas funs in + add_entry modname begin_pragma; + translate_fixpoints funs; + add_entry modname end_pragma + | C.Inductive (is_inductive, leftno, types, (attr, _)) + when attr = `Generated -> + if not is_inductive then not_implemented "co-inductive type"; + add_entry modname begin_gen; + translate_inductives leftno types; + add_entry modname end_gen + | C.Inductive (is_inductive, leftno, types, _) -> + HLog.debug (Format.sprintf "Dedukti: Translating inductive definitions"); + (* The boolean [is_inductive] indicates if the types are inductive + (when true) or co-inductive (when false).dedukti + The [leftno] indicates the number of left parameters. + The [i_attr] argument is not needed by the kernel. *) + if not is_inductive then not_implemented "co-inductive type"; + let begin_pragma, end_pragma = create_inductive_pragmas leftno types in + add_entry modname begin_pragma; + translate_inductives leftno types; + add_entry modname end_pragma +end + +(** Extraction entry-points **) + +let extraction_enabled () = + let safe_get_bool name default = + try Helm_registry.get_bool name + with Helm_registry.Key_not_found _ -> default + in + safe_get_bool "extract_dedukti" false + +(** This function is called every time an object is added to the library. **) +let extract_obj status obj = + if extraction_enabled () then ( + let uri, _, metasenv, subst, obj_kind = obj in + (* The depth is the maximum of the depth of the objects occuring + in the definition plus one, used for reduction strategies. + It is equal to 0 if the object does not have a body (e.g. an axiom). *) + HLog.message + (Format.sprintf "Dedukti: Extracting object %s" (U.string_of_uri uri)); + (* There should be no unresolved meta-variables at this point. *) + assert (List.length metasenv = 0); + assert (List.length subst = 0); + let module I = struct + let status = status + let baseuri = U.baseuri_of_uri uri + end in + let module T = Translation (I) in + T.translate_obj_kind obj_kind; + HLog.message + (Format.sprintf "Dedukti: Done extracting object %s" (U.string_of_uri uri))) + +(** This function is called every time a constraint is added to the library. **) +let extract_constraint _ u1 u2 = + if extraction_enabled () then + HLog.message (Format.sprintf "Dedukti: Extracting universe constraint"); + translate_constraint u1 u2 + +let basedir = "." +let filename_of_modname modname = Format.sprintf "%s.dk" modname + +let filepath_of_modname modname = + let filename = filename_of_modname modname in + Filename.concat basedir filename + +let output_module modname signature = + let filepath = filepath_of_modname modname in + let out_channel = open_out filepath in + let formatter = F.formatter_of_out_channel out_channel in + F.fprintf formatter "%a@." DeduktiPrint.print_signature signature; + close_out out_channel + +let output_modules () = + if extraction_enabled () then ( + HLog.message (Format.sprintf "Dedukti: Writing files"); + Hashtbl.iter output_module modules_table; + HLog.message (Format.sprintf "Dedukti: Writing universes"); + output_module univs_modname (univs_signature ())) diff --git a/components/ng_extraction/deduktiExtraction.mli b/components/ng_extraction/deduktiExtraction.mli new file mode 100644 index 000000000..f6361f42e --- /dev/null +++ b/components/ng_extraction/deduktiExtraction.mli @@ -0,0 +1,12 @@ +(** Extraction of Matita proofs to Dedukti **) + +val extract_obj : NCic.status -> NCic.obj -> unit +(** Extract a single object and store it in the corresponding module signature. **) + +val extract_constraint : NCic.status -> NCic.universe -> NCic.universe -> unit +(** Register the constraint between two universes. **) + +val output_modules : unit -> unit +(** Write all the extracted modules and universes to files. + Universe constraints can change during the translation, which is why + we shoul delay the output until all modules have been translated. **) diff --git a/components/ng_extraction/deduktiPrint.ml b/components/ng_extraction/deduktiPrint.ml new file mode 100644 index 000000000..55459dc8a --- /dev/null +++ b/components/ng_extraction/deduktiPrint.ml @@ -0,0 +1,111 @@ +module D = Dedukti +module F = Format + +let print_var out var = F.fprintf out "%s" var +let print_constname out constname = F.fprintf out "%s" constname +let print_modname out modname = F.fprintf out "%s" modname + +let print_const out (modname, constname) = + F.fprintf out "%a.%a" print_modname modname print_constname constname + +let print_sort out sort = + match sort with D.Type -> F.fprintf out "Type" | D.Kind -> assert false + +let rec print_abs_term out term = + match term with + | D.Prod (x, a, b) -> + F.fprintf out "@[%a :@;<1 2>%a ->@ %a@]" print_var x print_app_term a + print_abs_term b + | D.Lam (x, a, m) -> + F.fprintf out "@[%a :@;<1 2>%a =>@ %a@]" print_var x print_app_term a + print_abs_term m + | _ -> F.fprintf out "%a" print_app_term term + +and print_app_term out term = + match term with + | D.App (m, n) -> + F.fprintf out "@[%a@;<1 2>%a@]" print_app_term m print_atomic_term n + | _ -> F.fprintf out "%a" print_atomic_term term + +and print_atomic_term out term = + match term with + | D.Var x -> F.fprintf out "%a" print_var x + | D.Const c -> F.fprintf out "%a" print_const c + | D.Sort s -> F.fprintf out "%a" print_sort s + | D.Prod _ | D.Lam _ | D.App _ -> F.fprintf out "(%a)" print_abs_term term + +let print_term out term = print_abs_term out term + +let rec print_abs_pattern out pattern = + match pattern with + | D.PLam (x, a, m) -> + F.fprintf out "@[%a :@;<1 2>%a =>@ %a@]" print_var x print_app_pattern a + print_abs_pattern m + | _ -> F.fprintf out "%a" print_app_pattern pattern + +and print_app_pattern out pattern = + match pattern with + | D.PApp (m, n) -> + F.fprintf out "@[%a@;<1 2>%a@]" print_app_pattern m print_atomic_pattern n + | _ -> F.fprintf out "%a" print_atomic_pattern pattern + +and print_atomic_pattern out pattern = + match pattern with + | D.PVar x -> F.fprintf out "%a" print_var x + | D.PConst c -> F.fprintf out "%a" print_const c + | D.PGuard m -> F.fprintf out "(%a)" print_abs_term m + | D.PLam _ | D.PApp _ -> F.fprintf out "(%a)" print_abs_pattern pattern + +let rec print_pattern out pattern = + (* Because Dedukti does not allow referring to the head constant by its fully + qualified name (module_name.constant_name), we have to hack around it. *) + match pattern with + | D.PApp (m, n) -> + F.fprintf out "@[%a@;<1 2>%a@]" print_pattern m print_atomic_pattern n + | D.PConst (_, constname) -> print_constname out constname + | _ -> failwith "Invalid pattern" + +let rec print_context out = function + | [] -> () + | [ (x, _) ] -> F.fprintf out "@[%a@]" print_var x + | (x, _) :: g -> + (* Contexts are stored in reverse order. *) + F.fprintf out "@[%a,@ %a@]" print_context g print_var x + +let print_command out = function + | D.Require modname -> F.fprintf out "#REQUIRE %a" print_modname modname + +let print_comment out comment = F.fprintf out "(; %s ;)" comment +let print_pragma out pragma = F.fprintf out "#PRAGMA %s" pragma + +let print_entry out entry = + match entry with + | D.StcDeclaration (name, ty) -> + F.fprintf out "@[%a :@;<1 2>%a.@]" print_constname name print_term ty + | D.DefDeclaration (name, ty) -> + F.fprintf out "def @[%a :@;<1 2>%a.@]" print_constname name print_term ty + | D.Definition (name, None, body) -> + F.fprintf out "def @[%a :=@;<1 2>%a.@]" print_constname name print_term + body + | D.Definition (name, Some ty, body) -> + F.fprintf out "def @[%a :@;<1 2>%a@;<1 2>:=@;<1 2>%a.@]" print_constname + name print_term ty print_term body + | D.RewriteRule (context, left_pattern, right_term) -> + F.fprintf out "@[[ %a ]@;<1 2>%a -->@;<1 2>%a.@]" print_context context + print_pattern left_pattern print_term right_term + | D.Command command -> F.fprintf out "%a." print_command command + | D.Comment comment -> F.fprintf out "%a" print_comment comment + | D.Pragma pragma -> F.fprintf out "%a" print_pragma pragma + +let print_signature out signature = + (* Signatures are stored in reverse order. *) + (* Reverse first and print one at a time to avoid stack overflow. *) + let entries = List.rev signature in + let rec print_entries out entries = + match entries with + | [] -> () + | entry :: entries -> + F.fprintf out "@[%a@.@.@]" print_entry entry; + print_entries out entries + in + print_entries out entries diff --git a/components/ng_extraction/deduktiPrint.mli b/components/ng_extraction/deduktiPrint.mli new file mode 100644 index 000000000..6cf70ed18 --- /dev/null +++ b/components/ng_extraction/deduktiPrint.mli @@ -0,0 +1,5 @@ +(** Pretty-printer for Dedukti **) + +val print_term : Format.formatter -> Dedukti.term -> unit +val print_entry : Format.formatter -> Dedukti.entry -> unit +val print_signature : Format.formatter -> Dedukti.signature -> unit diff --git a/components/ng_kernel/nCicReduction.ml b/components/ng_kernel/nCicReduction.ml index 131ccad07..744ad3809 100644 --- a/components/ng_kernel/nCicReduction.ml +++ b/components/ng_kernel/nCicReduction.ml @@ -337,7 +337,7 @@ let alpha_eq (status:#NCic.status) ~test_lambda_source aux test_eq_only metasenv ;; (* t1, t2 must be well-typed *) -let are_convertible status ~metasenv ~subst = +let are_convertible status ~metasenv ~subst ?(test_eq_only=false) = let rec aux test_eq_only context t1 t2 = let alpha_eq status test_eq_only = alpha_eq status ~test_lambda_source:false aux test_eq_only metasenv subst @@ -393,7 +393,7 @@ let are_convertible status ~metasenv ~subst = in convert_machines test_eq_only (put_in_whd (0,[],t1,[]) (0,[],t2,[])) in - aux false + aux test_eq_only ;; let alpha_eq status metasenv subst = diff --git a/components/ng_kernel/nCicReduction.mli b/components/ng_kernel/nCicReduction.mli index f53cb5dc8..4423ee673 100644 --- a/components/ng_kernel/nCicReduction.mli +++ b/components/ng_kernel/nCicReduction.mli @@ -25,7 +25,7 @@ val set_get_relevance : val are_convertible : #NCic.status -> metasenv:NCic.metasenv -> subst:NCic.substitution -> - NCic.context -> NCic.term -> NCic.term -> bool + ?test_eq_only:bool -> NCic.context -> NCic.term -> NCic.term -> bool (* performs head beta/(delta)/cast reduction; the default is to not perform diff --git a/components/ng_kernel/nCicTypeChecker.ml b/components/ng_kernel/nCicTypeChecker.ml index 24217d6ae..520a60c45 100644 --- a/components/ng_kernel/nCicTypeChecker.ml +++ b/components/ng_kernel/nCicTypeChecker.ml @@ -1244,6 +1244,8 @@ let height_of_obj_kind status uri ~subst = (List.fold_left (fun l (_,_,_,ty,bo) -> let bo = debruijn status uri iflno [] ~subst bo in + HLog.warn ("ZZZ " ^ status#ppterm ~context:[] ~subst:[] ~metasenv:[] ty); + HLog.warn ("YYY " ^ status#ppterm ~context:[] ~subst:[] ~metasenv:[] bo); ty::bo::l ) [] ifl) | NCic.Constant (_,_,Some bo,ty,_) -> height_of_term status [bo;ty] @@ -1316,8 +1318,8 @@ let typecheck_obj status (uri,height,metasenv,subst,kind) = let rec enum_from k = function [] -> [] | v::tl -> (k,v)::enum_from (k+1) tl in - guarded_by_destructors status r_uri r_len - ~subst ~metasenv context (enum_from (x+2) kl) m + let _fixme = guarded_by_destructors status r_uri r_len + ~subst ~metasenv context (enum_from (x+2) kl) in ignore m end else match returns_a_coinductive status ~subst [] ty with | None -> diff --git a/components/ng_library/dune b/components/ng_library/dune index bc4fe3bce..174408103 100644 --- a/components/ng_library/dune +++ b/components/ng_library/dune @@ -1,6 +1,6 @@ (library (name helm_ng_library) - (libraries helm_ng_refiner helm_registry helm_library) + (libraries helm_ng_refiner helm_registry helm_library helm_ng_extraction) (wrapped false)) (env (_ diff --git a/components/ng_library/nCicLibrary.ml b/components/ng_library/nCicLibrary.ml index 1852673f8..7a59813da 100644 --- a/components/ng_library/nCicLibrary.ml +++ b/components/ng_library/nCicLibrary.ml @@ -338,6 +338,7 @@ let aliases_of uri = if NUri.eq uri' uri then Some nref else None) !local_aliases ;; + let add_obj status ((u,_,_,_,_) as orig_obj) = NCicEnvironment.check_and_add_obj status orig_obj; storage := (`Obj (u,orig_obj))::!storage; @@ -370,6 +371,7 @@ let add_obj status ((u,_,_,_,_) as orig_obj) = ) il) in local_aliases := references @ !local_aliases; + DeduktiExtraction.extract_obj (status :> NCic.status) orig_obj; status#set_timestamp (!storage,!local_aliases) ;; @@ -383,6 +385,7 @@ let add_constraint status ~acyclic u1 u2 = (prerr_endline "CANNOT ADD A CONSTRAINT TWICE"; assert false); NCicEnvironment.add_lt_constraint ~acyclic u1 u2; storage := (`Constr (u1,u2)) :: !storage; + DeduktiExtraction.extract_constraint (status :> NCic.status) u1 u2; status#set_timestamp (!storage,!local_aliases) ;; diff --git a/dune-project b/dune-project index 93439a075..9f5635a90 100644 --- a/dune-project +++ b/dune-project @@ -40,9 +40,12 @@ (lablgtk3-sourceview3 (>= 3.1.3)) dune-build-info dune-site - (camlp5 (>= 8.00.04))) + (camlp5 (>= 8.00.04)) + dedukti) (tags ("interactive theorem proving" "calculus of constructions")) (sites (share myshare))) +(using menhir 2.1) + ; See the complete stanza docs at https://dune.readthedocs.io/en/stable/dune-files.html#dune-project diff --git a/matita.opam b/matita.opam index 9baf341ee..9a8196976 100644 --- a/matita.opam +++ b/matita.opam @@ -24,6 +24,7 @@ depends: [ "dune-build-info" "dune-site" "camlp5" {>= "8.00.04"} + "dedukti" "odoc" {with-doc} ] build: [ diff --git a/matita/dune b/matita/dune index 12123e8c6..0cb779d32 100644 --- a/matita/dune +++ b/matita/dune @@ -1,7 +1,7 @@ (library (name matita_cli) (wrapped false) - (libraries helm_grafite_engine lablgtk3-sourceview3 dune-build-info dune-site) + (libraries helm_grafite_engine lablgtk3-sourceview3 dune-build-info dune-site helm_dedukti) (modules mysites buildTimeConf matitaTypes matitaMiscCli applyTransformation matitaEngine matitaExcPp matitaInit)) diff --git a/matita/lib/basics/core_notation.ma b/matita/lib/basics/core_notation.ma index 0a1c3fd80..77e44c201 100644 --- a/matita/lib/basics/core_notation.ma +++ b/matita/lib/basics/core_notation.ma @@ -111,6 +111,9 @@ for @{ 'congruent $n $m $p }. (* pairs, projections *******************************************************) +notation "hvbox(\langle term 19 a, break term 19 b\rangle)" +with precedence 90 for @{ 'pair $a $b}. + notation "hvbox(x break \times y)" with precedence 70 for @{ 'product $x $y}. @@ -226,6 +229,10 @@ for @{ 'divide $a $b }. notation "- term 65 a" with precedence 65 for @{ 'uminus $a }. +notation "a !" + non associative with precedence 80 +for @{ 'fact $a }. + notation "\sqrt a" non associative with precedence 65 for @{ 'sqrt $a }. @@ -256,24 +263,71 @@ for @{ 'iff $a $b }. notation "hvbox(\Omega \sup term 90 A)" non associative with precedence 90 for @{ 'powerset $A }. - notation > "hvbox(\Omega ^ term 90 A)" non associative with precedence 90 for @{ 'powerset $A }. +notation < "hvbox({ ident i | term 19 p })" with precedence 90 +for @{ 'subset (\lambda ${ident i} : $nonexistent . $p)}. + +notation > "hvbox({ ident i | term 19 p })" with precedence 90 +for @{ 'subset (\lambda ${ident i}. $p)}. + +notation < "hvbox({ ident i ∈ term 19 s | term 19 p })" with precedence 90 +for @{ 'comprehension $s (\lambda ${ident i} : $nonexistent . $p)}. + +notation > "hvbox({ ident i ∈ term 19 s | term 19 p })" with precedence 90 +for @{ 'comprehension $s (\lambda ${ident i}. $p)}. + notation "hvbox(a break ∈ b)" non associative with precedence 45 for @{ 'mem $a $b }. notation "hvbox(a break ∉ b)" non associative with precedence 45 for @{ 'notmem $a $b }. +notation "hvbox(a break ≬ b)" non associative with precedence 45 +for @{ 'overlaps $a $b }. (* \between *) + +notation "hvbox(a break ⊆ b)" non associative with precedence 45 +for @{ 'subseteq $a $b }. (* \subseteq *) + notation "hvbox(a break ∩ b)" left associative with precedence 60 for @{ 'intersects $a $b }. (* \cap *) notation "hvbox(a break ∪ b)" left associative with precedence 55 for @{ 'union $a $b }. (* \cup *) +notation "hvbox({ term 19 a })" with precedence 90 for @{ 'singl $a}. + (* other notations **********************************************************) +notation "hvbox(a break \approx b)" non associative with precedence 45 + for @{ 'napart $a $b}. + +notation "hvbox(a break # b)" non associative with precedence 45 + for @{ 'apart $a $b}. + +notation "hvbox(a break \circ b)" + left associative with precedence 60 +for @{ 'compose $a $b }. + +notation < "↓ \ensp a" with precedence 60 for @{ 'downarrow $a }. +notation > "↓ a" with precedence 60 for @{ 'downarrow $a }. + +notation "hvbox(U break ↓ V)" non associative with precedence 60 for @{ 'fintersects $U $V }. + +notation "↑a" with precedence 60 for @{ 'uparrow $a }. + +notation "hvbox(a break ↑ b)" with precedence 60 for @{ 'funion $a $b }. + +notation < "term 76 a \sup term 90 b" non associative with precedence 75 for @{ 'exp $a $b}. +notation > "a \sup term 90 b" non associative with precedence 75 for @{ 'exp $a $b}. +notation > "a ^ term 90 b" non associative with precedence 75 for @{ 'exp $a $b}. +notation "s \sup (-1)" non associative with precedence 75 for @{ 'invert $s }. +notation > "s ^ (-1)" non associative with precedence 75 for @{ 'invert $s }. +notation < "s \sup (-1) x" non associative with precedence 90 for @{ 'invert_appl $s $x}. + +notation "| term 19 C |" with precedence 70 for @{ 'card $C }. + notation "\naturals" non associative with precedence 90 for @{'N}. notation "\rationals" non associative with precedence 90 for @{'Q}. notation "\reals" non associative with precedence 90 for @{'R}. diff --git a/matita/lib/basics/deqsets.ma b/matita/lib/basics/deqsets.ma index c17db5dc0..fd71131d7 100644 --- a/matita/lib/basics/deqsets.ma +++ b/matita/lib/basics/deqsets.ma @@ -1,12 +1,12 @@ (* - ||M|| This file is part of HELM, an Hypertextual, Electronic - ||A|| Library of Mathematics, developed at the Computer Science - ||T|| Department of the University of Bologna, Italy. - ||I|| - ||T|| - ||A|| - \ / This file is distributed under the terms of the - \ / GNU General Public License Version 2 + ||M|| This file is part of HELM, an Hypertextual, Electronic + ||A|| Library of Mathematics, developed at the Computer Science + ||T|| Department of the University of Bologna, Italy. + ||I|| + ||T|| + ||A|| + \ / This file is distributed under the terms of the + \ / GNU General Public License Version 2 V_______________________________________________________________ *) include "basics/types.ma". @@ -14,39 +14,39 @@ include "basics/bool.ma". (****** DeqSet: a set with a decidbale equality ******) -record DeqSet : Type[1] ≝ { +record DeqSet : Type[1] ≝ { carr :> Type[0]; eqb: carr → carr → bool; eqb_true: ∀x,y. (eqb x y = true) ↔ (x = y) }. - + notation "a == b" non associative with precedence 45 for @{ 'eqb $a $b }. interpretation "eqb" 'eqb a b = (eqb ? a b). -notation "\P H" non associative with precedence 90 - for @{(proj1 … (eqb_true ???) $H)}. +notation "\P H" non associative with precedence 90 + for @{(proj1 … (eqb_true ???) $H)}. + +notation "\b H" non associative with precedence 90 + for @{(proj2 … (eqb_true ???) $H)}. -notation "\b H" non associative with precedence 90 - for @{(proj2 … (eqb_true ???) $H)}. - -notation < "𝐃" non associative with precedence 90 +notation < "𝐃" non associative with precedence 90 for @{'bigD}. interpretation "DeqSet" 'bigD = (mk_DeqSet ???). - -lemma eqb_false: ∀S:DeqSet.∀a,b:S. + +lemma eqb_false: ∀S:DeqSet.∀a,b:S. (eqb ? a b) = false ↔ a ≠ b. -#S #a #b % #H +#S #a #b % #H [@(not_to_not … not_eq_true_false) #H1 (\P Heq) // @@ -103,16 +103,16 @@ qed. definition DeqOption ≝ λA:DeqSet. mk_DeqSet (option A) (eq_option A) (eq_option_true A). - -unification hint 0 ≔ C; + +unification hint 0 ≔ C; T ≟ carr C, X ≟ DeqOption C -(* ---------------------------------------- *) ⊢ +(* ---------------------------------------- *) ⊢ option T ≡ carr X. -unification hint 0 ≔ T,a1,a2; +unification hint 0 ≔ T,a1,a2; X ≟ DeqOption T -(* ---------------------------------------- *) ⊢ +(* ---------------------------------------- *) ⊢ eq_option T a1 a2 ≡ eqb X a1 a2. @@ -130,20 +130,20 @@ qed. definition DeqProd ≝ λA,B:DeqSet. mk_DeqSet (A×B) (eq_pairs A B) (eq_pairs_true A B). - -unification hint 0 ≔ C1,C2; + +unification hint 0 ≔ C1,C2; T1 ≟ carr C1, T2 ≟ carr C2, X ≟ DeqProd C1 C2 -(* ---------------------------------------- *) ⊢ +(* ---------------------------------------- *) ⊢ T1×T2 ≡ carr X. -unification hint 0 ≔ T1,T2,p1,p2; +unification hint 0 ≔ T1,T2,p1,p2; X ≟ DeqProd T1 T2 -(* ---------------------------------------- *) ⊢ +(* ---------------------------------------- *) ⊢ eq_pairs T1 T2 p1 p2 ≡ eqb X p1 p2. -example hint2: ∀b1,b2. +example hint2: ∀b1,b2. 〈b1,true〉==〈false,b2〉=true → 〈b1,true〉=〈false,b2〉. #b1 #b2 #H @(\P H) qed. @@ -154,17 +154,17 @@ definition eq_sum ≝ match p1 with [ inl a1 ⇒ match p2 with [ inl a2 ⇒ a1 == a2 | inr b2 ⇒ false ] - | inr b1 ⇒ match p2 with + | inr b1 ⇒ match p2 with [ inl a2 ⇒ false | inr b2 ⇒ b1 == b2 ] ]. lemma eq_sum_true: ∀A,B:DeqSet.∀p1,p2:A+B. eq_sum A B p1 p2 = true ↔ p1 = p2. -#A #B * - [#a1 * - [#a2 normalize % +#A #B * + [#a1 * + [#a2 normalize % [#eqa >(\P eqa) // | #H destruct @(\b ?) //] - |#b2 normalize % #H destruct + |#b2 normalize % #H destruct ] |#b1 * [#a2 normalize % #H destruct @@ -176,48 +176,48 @@ qed. definition DeqSum ≝ λA,B:DeqSet. mk_DeqSet (A+B) (eq_sum A B) (eq_sum_true A B). - -unification hint 0 ≔ C1,C2; + +unification hint 0 ≔ C1,C2; T1 ≟ carr C1, T2 ≟ carr C2, X ≟ DeqSum C1 C2 -(* ---------------------------------------- *) ⊢ +(* ---------------------------------------- *) ⊢ T1+T2 ≡ carr X. -unification hint 0 ≔ T1,T2,p1,p2; +unification hint 0 ≔ T1,T2,p1,p2; X ≟ DeqSum T1 T2 -(* ---------------------------------------- *) ⊢ +(* ---------------------------------------- *) ⊢ eq_sum T1 T2 p1 p2 ≡ eqb X p1 p2. (* sigma *) -definition eq_sigma ≝ +definition eq_sigma ≝ λA:DeqSet.λP:A→Prop.λp1,p2:Σx:A.P x. - match p1 with - [mk_Sig a1 h1 ⇒ - match p2 with + match p1 with + [mk_Sig a1 h1 ⇒ + match p2 with [mk_Sig a2 h2 ⇒ a1==a2]]. - + (* uses proof irrelevance *) -lemma eq_sigma_true: ∀A:DeqSet.∀P.∀p1,p2:Σx.P x. - eq_sigma A P p1 p2 = true ↔ p1 = p2. +axiom eq_sigma_true: ∀A:DeqSet.∀P.∀p1,p2:Σx.P x. + eq_sigma A P p1 p2 = true ↔ p1 = p2. (* #A #P * #a1 #Ha1 * #a2 #Ha2 % - [normalize #eqa generalize in match Ha1; >(\P eqa) // + [normalize #eqa generalize in match Ha1; >(\P eqa) // |#H >H @(\b ?) // ] -qed. +qed. *) definition DeqSig ≝ λA:DeqSet.λP:A→Prop. mk_DeqSet (Σx:A.P x) (eq_sigma A P) (eq_sigma_true A P). (* -unification hint 0 ≔ C,P; +unification hint 0 ≔ C,P; T ≟ carr C, X ≟ DeqSig C P -(* ---------------------------------------- *) ⊢ +(* ---------------------------------------- *) ⊢ Σx:T.P x ≡ carr X. -unification hint 0 ≔ T,P,p1,p2; +unification hint 0 ≔ T,P,p1,p2; X ≟ DeqSig T P -(* ---------------------------------------- *) ⊢ +(* ---------------------------------------- *) ⊢ eq_sigma T P p1 p2 ≡ eqb X p1 p2. *) diff --git a/matita/lib/basics/finset.ma b/matita/lib/basics/finset.ma index 24a5e16ce..df8d0a87c 100644 --- a/matita/lib/basics/finset.ma +++ b/matita/lib/basics/finset.ma @@ -9,7 +9,7 @@ \ / GNU General Public License Version 2 V_______________________________________________________________ *) -include "basics/deqlist.ma". +include "basics/lists/listb.ma". (****** DeqSet: a set with a decidable equality ******) @@ -270,184 +270,3 @@ f a = b → memb ? 〈a,b〉 (graph_enum A B f) = true. #A #B #f #a #b #eqf @memb_filter_l [@(\b eqf)] @enum_prod_complete // qed. - -(* FinFun *) - -definition enum_fun_raw: ∀A,B:DeqSet.list A → list B → list (list (DeqProd A B)) ≝ - λA,B,lA,lB.foldr A (list (list (DeqProd A B))) - (λa.compose ??? (λb. cons ? 〈a,b〉) lB) [[]] lA. - -lemma enum_fun_raw_cons: ∀A,B,a,lA,lB. - enum_fun_raw A B (a::lA) lB = - compose ??? (λb. cons ? 〈a,b〉) lB (enum_fun_raw A B lA lB). -// -qed. - -definition is_functional ≝ λA,B:DeqSet.λlA:list A.λl: list (DeqProd A B). - map ?? (fst A B) l = lA. - -definition carr_fun ≝ λA,B:FinSet. - DeqSig (DeqList (DeqProd A B)) (is_functional A B (enum A)). - -definition carr_fun_l ≝ λA,B:DeqSet.λl. - DeqSig (DeqList (DeqProd A B)) (is_functional A B l). - -lemma compose_spec1 : ∀A,B,C:DeqSet.∀f:A→B→C.∀a:A.∀b:B.∀lA:list A.∀lB:list B. - a ∈ lA = true → b ∈ lB = true → ((f a b) ∈ (compose A B C f lA lB)) = true. -#A #B #C #f #a #b #lA elim lA - [normalize #lB #H destruct - |#a1 #tl #Hind #lB #Ha #Hb cases (orb_true_l ?? Ha) #Hcase - [>(\P Hcase) normalize @memb_append_l1 @memb_map // - |@memb_append_l2 @Hind // - ] - ] -qed. - -lemma compose_cons: ∀A,B,C.∀f:A→B→C.∀l1,l2,a. - compose A B C f (a::l1) l2 = - (map ?? (f a) l2)@(compose A B C f l1 l2). -// qed. - -lemma compose_spec2 : ∀A,B,C:DeqSet.∀f:A→B→C.∀c:C.∀lA:list A.∀lB:list B. - c ∈ (compose A B C f lA lB) = true → - ∃a,b.a ∈ lA = true ∧ b ∈ lB = true ∧ c = f a b. -#A #B #C #f #c #lA elim lA - [normalize #lB #H destruct - |#a1 #tl #Hind #lB >compose_cons #Hc cases (memb_append … Hc) #Hcase - [lapply(memb_map_to_exists … Hcase) * #b * #Hb #Hf - %{a1} %{b} /3/ - |lapply(Hind ? Hcase) * #a2 * #b * * #Ha #Hb #Hf %{a2} %{b} % // % // - @orb_true_r2 // - ] - ] -qed. - -definition compose2 ≝ - λA,B:DeqSet.λa:A.λl. compose B (carr_fun_l A B l) (carr_fun_l A B (a::l)) - (λb,tl. mk_Sig ?? (〈a,b〉::(pi1 … tl)) ?). -normalize @eq_f @(pi2 … tl) -qed. - -let rec Dfoldr (A:Type[0]) (B:list A → Type[0]) - (f:∀a:A.∀l.B l → B (a::l)) (b:B [ ]) (l:list A) on l : B l ≝ - match l with [ nil ⇒ b | cons a l ⇒ f a l (Dfoldr A B f b l)]. - -definition empty_graph: ∀A,B:DeqSet. carr_fun_l A B []. -#A #B %{[]} // qed. - -definition enum_fun: ∀A,B:DeqSet.∀lA:list A.list B → list (carr_fun_l A B lA) ≝ - λA,B,lA,lB.Dfoldr A (λl.list (carr_fun_l A B l)) - (λa,l.compose2 ?? a l lB) [empty_graph A B] lA. - -lemma mem_enum_fun: ∀A,B:DeqSet.∀lA,lB.∀x:carr_fun_l A B lA. - pi1 … x ∈ map ?? (pi1 … ) (enum_fun A B lA lB) = true → - x ∈ enum_fun A B lA lB = true . -#A #B #lA #lB #x @(memb_map_inj - (DeqSig (DeqList (DeqProd A B)) - (λx0:DeqList (DeqProd A B).is_functional A B lA x0)) - (DeqList (DeqProd A B)) (pi1 …)) -* #l1 #H1 * #l2 #H2 #Heq lapply H1 lapply H2 >Heq // -qed. - -lemma enum_fun_cons: ∀A,B,a,lA,lB. - enum_fun A B (a::lA) lB = - compose ??? (λb,tl. mk_Sig ?? (〈a,b〉::(pi1 … tl)) ?) lB (enum_fun A B lA lB). -// -qed. - -lemma map_map: ∀A,B,C.∀f:A→B.∀g:B→C.∀l. - map ?? g (map ?? f l) = map ?? (g ∘ f) l. -#A #B #C #f #g #l elim l [//] -#a #tl #Hind normalize @eq_f @Hind -qed. - -lemma map_compose: ∀A,B,C,D.∀f:A→B→C.∀g:C→D.∀l1,l2. - map ?? g (compose A B C f l1 l2) = compose A B D (λa,b. g (f a b)) l1 l2. -#A #B #C #D #f #g #l1 elim l1 [//] -#a #tl #Hind #l2 >compose_cons >compose_cons (enum_fun_cons A B a tl lB) >enum_fun_raw_cons >map_compose -cut (∀lB2. compose B (Σx:DeqList (DeqProd A B).is_functional A B tl x) - (DeqList (DeqProd A B)) - (λa0:B - .λb:Σx:DeqList (DeqProd A B).is_functional A B tl x - .〈a,a0〉 - ::pi1 (list (A×B)) (λx:DeqList (DeqProd A B).is_functional A B tl x) b) lB - (enum_fun A B tl lB2) - =compose B (list (A×B)) (list (A×B)) (λb:B.cons (A×B) 〈a,b〉) lB - (enum_fun_raw A B tl lB2)) - [#lB2 elim lB - [normalize // - |#b #tlb #Hindb >compose_cons in ⊢ (???%); >compose_cons - @eq_f2 [map_map // |@Hindb]]] -#Hcut @Hcut -qed. - -lemma uniqueb_compose: ∀A,B,C:DeqSet.∀f,l1,l2. - (∀a1,a2,b1,b2. f a1 b1 = f a2 b2 → a1 = a2 ∧ b1 = b2) → - uniqueb ? l1 = true → uniqueb ? l2 = true → - uniqueb ? (compose A B C f l1 l2) = true. -#A #B #C #f #l1 #l2 #Hinj elim l1 // -#a #tl #Hind #HuA #HuB >compose_cons @uniqueb_append - [@(unique_map_inj … HuB) #b1 #b2 #Hb1b2 @(proj2 … (Hinj … Hb1b2)) - |@Hind // @(andb_true_r … HuA) - |#c #Hc lapply(memb_map_to_exists … Hc) * #b * #Hb2 #Hfab % #Hc - lapply(compose_spec2 … Hc) * #a1 * #b1 * * #Ha1 #Hb1 H - normalize #H1 destruct (H1) - ] - ] -qed. - -lemma enum_fun_unique: ∀A,B:DeqSet.∀lA,lB. - uniqueb ? lA = true → uniqueb ? lB = true → - uniqueb ? (enum_fun A B lA lB) = true. -#A #B #lA elim lA - [#lB #_ #ulB // - |#a #tlA #Hind #lB #uA #uB lapply (enum_fun_cons A B a tlA lB) #H >H - @(uniqueb_compose B (carr_fun_l A B tlA) (carr_fun_l A B (a::tlA))) - [#b1 #b2 * #l1 #funl1 * #l2 #funl2 #H1 destruct (H1) /2/ - |// - |@(Hind … uB) @(andb_true_r … uA) - ] - ] -qed. - -lemma enum_fun_complete: ∀A,B:FinSet.∀l1,l2. - (∀x:A. memb A x l1 = true) → - (∀x:B. memb B x l2 = true) → - ∀x:carr_fun_l A B l1. memb ? x (enum_fun A B l1 l2) = true. -#A #B #l1 #l2 #H1 #H2 * #g #H @mem_enum_fun >enum_fun_graphs -lapply H -H lapply g -g elim l1 - [* // #p #tlg normalize #H destruct (H) - |#a #tl #Hind #g cases g - [normalize in ⊢ (%→?); #H destruct (H) - |* #a1 #b #tl1 normalize in ⊢ (%→?); #H - cut (is_functional A B tl tl1) [destruct (H) //] #Hfun - >(cons_injective_l ????? H) - >(enum_fun_raw_cons … ) @(compose_spec1 … (λb. cons ? 〈a,b〉)) - [@H2 |@Hind @Hfun] - ] - ] -qed. - -definition FinFun ≝ -λA,B:FinSet.mk_FinSet (carr_fun A B) - (enum_fun A B (enum A) (enum B)) - (enum_fun_unique A B … (enum_unique A) (enum_unique B)) - (enum_fun_complete A B … (enum_complete A) (enum_complete B)). - -(* -unification hint 0 ≔ C1,C2; - T1 ≟ FinSetcarr C1, - T2 ≟ FinSetcarr C2, - X ≟ FinProd C1 C2 -(* ---------------------------------------- *) ⊢ - T1×T2 ≡ FinSetcarr X. *) \ No newline at end of file diff --git a/matita/lib/basics/jmeq.ma b/matita/lib/basics/jmeq.ma index 42e52c0a4..807ef2604 100644 --- a/matita/lib/basics/jmeq.ma +++ b/matita/lib/basics/jmeq.ma @@ -1,12 +1,12 @@ (* - ||M|| This file is part of HELM, an Hypertextual, Electronic - ||A|| Library of Mathematics, developed at the Computer Science - ||T|| Department of the University of Bologna, Italy. - ||I|| - ||T|| - ||A|| This file is distributed under the terms of the - \ / GNU General Public License Version 2 - \ / + ||M|| This file is part of HELM, an Hypertextual, Electronic + ||A|| Library of Mathematics, developed at the Computer Science + ||T|| Department of the University of Bologna, Italy. + ||I|| + ||T|| + ||A|| This file is distributed under the terms of the + \ / GNU General Public License Version 2 + \ / V_______________________________________________________________ *) include "basics/logic.ma". @@ -38,9 +38,9 @@ interpretation "john major's reflexivity" 'refl = refl_jmeq. definition eqProp ≝ λA:Prop.eq A. -lemma K : ∀A.∀x:A.∀h:x=x. eqProp ? h (refl A x). +axiom K : ∀A.∀x:A.∀h:x=x. eqProp ? h (refl A x). (* #A #x #h @(refl ? h: eqProp ? ? ?). -qed. +qed. *) definition cast: ∀A,B:Type[1].∀E:A=B. A → B. @@ -53,7 +53,7 @@ lemma tech1: [2: >E % | #Aa #Bb #E >E cases Bb #B #b normalize % ] qed. - + lemma gral: ∀A.∀x,y:A. mk_Sigma A x = mk_Sigma A y → x=y. #A #x #y #E lapply (tech1 ?? E) @@ -89,13 +89,13 @@ definition PP: ∀A:Prop.∀P:A → Type[0]. A → Type[0]. #A #P #a @(P a) qed. -lemma E : ∀A.∀x:A.∀P:∀y:A.jmeq A x A y→Type[0]. - PP ? (P x) (refl_jmeq A x) → ∀y.∀h:jmeq A x A y.PP ? (P y) h. +axiom E : ∀A.∀x:A.∀P:∀y:A.jmeq A x A y→Type[0]. + PP ? (P x) (refl_jmeq A x) → ∀y.∀h:jmeq A x A y.PP ? (P y) h. (* #A #a #P #H #b #E letin F ≝ (jm_to_eq_sigma ??? E) lapply (G ?? (curry ?? P) ?? F) [ normalize // | #H whd in H; @(H : PP ? (P b) ?) ] -qed. +qed. *) lemma jmeq_elim : ∀A.∀x:A.∀P:∀y:A.jmeq A x A y→Type[0]. P x (refl_jmeq A x) → ∀y.∀h:jmeq A x A y.P y h ≝ E. diff --git a/matita/lib/basics/lists/list.ma b/matita/lib/basics/lists/list.ma index ed988b760..f8c2ac772 100644 --- a/matita/lib/basics/lists/list.ma +++ b/matita/lib/basics/lists/list.ma @@ -11,7 +11,6 @@ include "basics/types.ma". include "arithmetics/nat.ma". -include "basics/core_notation/card_1.ma". inductive list (A:Type[0]) : Type[0] := | nil: list A diff --git a/matita/lib/basics/logic.ma b/matita/lib/basics/logic.ma index 4b90f7518..2905b3e24 100644 --- a/matita/lib/basics/logic.ma +++ b/matita/lib/basics/logic.ma @@ -1,12 +1,12 @@ (* - ||M|| This file is part of HELM, an Hypertextual, Electronic - ||A|| Library of Mathematics, developed at the Computer Science - ||T|| Department of the University of Bologna, Italy. - ||I|| - ||T|| - ||A|| - \ / This file is distributed under the terms of the - \ / GNU General Public License Version 2 + ||M|| This file is part of HELM, an Hypertextual, Electronic + ||A|| Library of Mathematics, developed at the Computer Science + ||T|| Department of the University of Bologna, Italy. + ||I|| + ||T|| + ||A|| + \ / This file is distributed under the terms of the + \ / GNU General Public License Version 2 V_______________________________________________________________ *) include "basics/pts.ma". @@ -15,8 +15,8 @@ include "hints_declaration.ma". (* propositional equality *) inductive eq (A:Type[2]) (x:A) : A → Prop ≝ - refl: eq A x x. - + refl: eq A x x. + interpretation "leibnitz's equality" 'eq t x y = (eq t x y). interpretation "leibniz reflexivity" 'refl = refl. @@ -69,13 +69,14 @@ theorem eq_f: ∀A,B.∀f:A→B.∀x,y:A. x=y → f x = f y. (* deleterio per auto? *) theorem eq_f2: ∀A,B,C.∀f:A→B→C. ∀x1,x2:A.∀y1,y2:B. x1=x2 → y1=y2 → f x1 y1 = f x2 y2. -#A #B #C #f #x1 #x2 #y1 #y2 #E1 #E2 >E1; >E2; //; qed. +#A #B #C #f #x1 #x2 #y1 #y2 #E1 #E2 >E1; >E2; //; qed. +(* // producing an unnecessarily complicated term, replaced by % *) lemma eq_f3: ∀A,B,C,D.∀f:A→B→C->D. ∀x1,x2:A.∀y1,y2:B. ∀z1,z2:C. x1=x2 → y1=y2 → z1=z2 → f x1 y1 z1 = f x2 y2 z2. -#A #B #C #D #f #x1 #x2 #y1 #y2 #z1 #z2 #E1 #E2 #E3 >E1; >E2; >E3 //; qed. +#A #B #C #D #f #x1 #x2 #y1 #y2 #z1 #z2 #E1 #E2 #E3 >E1; >E2; >E3 % qed. -(* hint to genereric equality +(* hint to genereric equality definition eq_equality: equality ≝ mk_equality eq refl rewrite_l rewrite_r. @@ -85,10 +86,10 @@ unification hint 0 ≔ T,a,b; (*------------------------------------*) ⊢ equal X T a b ≡ eq T a b. *) - + (********** connectives ********) -inductive True: Prop ≝ +inductive True: Prop ≝ I : True. inductive False: Prop ≝ . @@ -137,13 +138,13 @@ inductive Or (A,B:Prop) : Prop ≝ interpretation "logical or" 'or x y = (Or x y). -definition decidable : Prop → Prop ≝ +definition decidable : Prop → Prop ≝ λ A:Prop. A ∨ ¬ A. (* exists *) inductive ex (A:Type[0]) (P:A → Prop) : Prop ≝ ex_intro: ∀ x:A. P x → ex A P. - + interpretation "exists" 'exists x = (ex ? x). inductive ex2 (A:Type[0]) (P,Q:A →Prop) : Prop ≝ @@ -171,21 +172,21 @@ lemma iff_not: ∀A,B. A ↔ B → ¬A ↔ ¬B. #A #B * #H1 #H2 % /3/ qed. lemma iff_and_l: ∀A,B,C. A ↔ B → C ∧ A ↔ C ∧ B. -#A #B #C * #H1 #H2 % * /3/ qed. +#A #B #C * #H1 #H2 % * /3/ qed. lemma iff_and_r: ∀A,B,C. A ↔ B → A ∧ C ↔ B ∧ C. -#A #B #C * #H1 #H2 % * /3/ qed. +#A #B #C * #H1 #H2 % * /3/ qed. lemma iff_or_l: ∀A,B,C. A ↔ B → C ∨ A ↔ C ∨ B. -#A #B #C * #H1 #H2 % * /3/ qed. +#A #B #C * #H1 #H2 % * /3/ qed. lemma iff_or_r: ∀A,B,C. A ↔ B → A ∨ C ↔ B ∨ C. -#A #B #C * #H1 #H2 % * /3/ qed. +#A #B #C * #H1 #H2 % * /3/ qed. -(* cose per destruct: da rivedere *) +(* cose per destruct: da rivedere *) definition R0 ≝ λT:Type[0].λt:T.t. - + definition R1 ≝ eq_rect_Type0. (* used for lambda-delta *) @@ -201,10 +202,10 @@ definition R2 : ∀b1: T1 b0 e0. ∀e1:R1 ?? T1 a1 ? e0 = b1. T2 b0 e0 b1 e1. -#T0 #a0 #T1 #a1 #T2 #a2 #b0 #e0 #b1 #e1 -@(eq_rect_Type0 ????? e1) -@(R1 ?? ? ?? e0) -@a2 +#T0 #a0 #T1 #a1 #T2 #a2 #b0 #e0 #b1 #e1 +@(eq_rect_Type0 ????? e1) +@(R1 ?? ? ?? e0) +@a2 qed. definition R3 : @@ -224,10 +225,10 @@ definition R3 : ∀b2: T2 b0 e0 b1 e1. ∀e2:R2 ???? T2 a2 b0 e0 ? e1 = b2. T3 b0 e0 b1 e1 b2 e2. -#T0 #a0 #T1 #a1 #T2 #a2 #T3 #a3 #b0 #e0 #b1 #e1 #b2 #e2 -@(eq_rect_Type0 ????? e2) -@(R2 ?? ? ???? e0 ? e1) -@a3 +#T0 #a0 #T1 #a1 #T2 #a2 #T3 #a3 #b0 #e0 #b1 #e1 #b2 #e2 +@(eq_rect_Type0 ????? e2) +@(R2 ?? ? ???? e0 ? e1) +@a3 qed. definition R4 : @@ -239,15 +240,15 @@ definition R4 : ∀a2:T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1). ∀T3:∀x0:T0. ∀p0:eq (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:eq (T1 …) (R1 T0 a0 T1 a1 x0 p0) x1. ∀x2:T2 x0 p0 x1 p1.eq (T2 …) (R2 T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2 → Type[0]. - ∀a3:T3 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) - a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2). + ∀a3:T3 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) + a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2). ∀T4:∀x0:T0. ∀p0:eq (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:eq (T1 …) (R1 T0 a0 T1 a1 x0 p0) x1. ∀x2:T2 x0 p0 x1 p1.∀p2:eq (T2 …) (R2 T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2. - ∀x3:T3 x0 p0 x1 p1 x2 p2.∀p3:eq (T3 …) (R3 T0 a0 T1 a1 T2 a2 T3 a3 x0 p0 x1 p1 x2 p2) x3. + ∀x3:T3 x0 p0 x1 p1 x2 p2.∀p3:eq (T3 …) (R3 T0 a0 T1 a1 T2 a2 T3 a3 x0 p0 x1 p1 x2 p2) x3. Type[0]. - ∀a4:T4 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) - a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2) - a3 (refl (T3 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) + ∀a4:T4 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) + a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2) + a3 (refl (T3 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2)) a3). ∀b0:T0. @@ -259,19 +260,19 @@ definition R4 : ∀b3: T3 b0 e0 b1 e1 b2 e2. ∀e3:eq (T3 …) (R3 T0 a0 T1 a1 T2 a2 T3 a3 b0 e0 b1 e1 b2 e2) b3. T4 b0 e0 b1 e1 b2 e2 b3 e3. -#T0 #a0 #T1 #a1 #T2 #a2 #T3 #a3 #T4 #a4 #b0 #e0 #b1 #e1 #b2 #e2 #b3 #e3 -@(eq_rect_Type0 ????? e3) -@(R3 ????????? e0 ? e1 ? e2) -@a4 +#T0 #a0 #T1 #a1 #T2 #a2 #T3 #a3 #T4 #a4 #b0 #e0 #b1 #e1 #b2 #e2 #b3 #e3 +@(eq_rect_Type0 ????? e3) +@(R3 ????????? e0 ? e1 ? e2) +@a4 qed. definition eqProp ≝ λA:Prop.eq A. (* Example to avoid indexing and the consequential creation of ill typed terms during paramodulation *) -lemma lemmaK : ∀A.∀x:A.∀h:x=x. eqProp ? h (refl A x). -#A #x #h @(refl ? h: eqProp ? ? ?). -qed-. +axiom lemmaK : ∀A.∀x:A.∀h:x=x. eqProp ? h (refl A x). +(* #A #x #h @(refl ? h: eqProp ? ? ?). +qed-. *) theorem streicherK : ∀T:Type[2].∀t:T.∀P:t = t → Type[3].P (refl ? t) → ∀p.P p. #T #t #P #H #p >(lemmaK T t p) @H diff --git a/matita/lib/basics/relations.ma b/matita/lib/basics/relations.ma index 9da779ee4..fb0423cea 100644 --- a/matita/lib/basics/relations.ma +++ b/matita/lib/basics/relations.ma @@ -10,8 +10,6 @@ V_______________________________________________________________ *) include "basics/logic.ma". -include "basics/core_notation/compose_2.ma". -include "basics/core_notation/subseteq_2.ma". (********** predicates *********) @@ -56,12 +54,12 @@ definition antisymmetric: ∀A.∀R:relation A.Prop definition singlevalued: ∀A,B. predicate (relation2 A B) ≝ λA,B,R. ∀a,b1. R a b1 → ∀b2. R a b2 → b1 = b2. -definition pw_confluent: ∀A. relation A → predicate A ≝ λA,R,a0. - ∀a1. R a0 a1 → ∀a2. R a0 a2 → - ∃∃a. R a1 a & R a2 a. +definition confluent1: ∀A. relation A → predicate A ≝ λA,R,a0. + ∀a1. R a0 a1 → ∀a2. R a0 a2 → + ∃∃a. R a1 a & R a2 a. definition confluent: ∀A. predicate (relation A) ≝ λA,R. - ∀a0. pw_confluent … R a0. + ∀a0. confluent1 … R a0. (* triangular confluence of two relations *) definition Conf3: ∀A,B. relation2 A B → relation A → Prop ≝ λA,B,S,R. diff --git a/matita/lib/basics/sets.ma b/matita/lib/basics/sets.ma index 8e40b666b..cdc6ac9e8 100644 --- a/matita/lib/basics/sets.ma +++ b/matita/lib/basics/sets.ma @@ -10,8 +10,6 @@ V_______________________________________________________________ *) include "basics/logic.ma". -include "basics/core_notation/singl_1.ma". -include "basics/core_notation/subseteq_2.ma". (**** a subset of A is just an object of type A→Prop ****) @@ -118,4 +116,4 @@ qed. (* substraction *) lemma substract_def:∀U.∀A,B:U→Prop. A-B ≐ A ∩ ¬B. #U #A #B #w normalize /2/ -qed. +qed. \ No newline at end of file diff --git a/matita/lib/basics/types.ma b/matita/lib/basics/types.ma index efc213f95..a5e35c9d2 100644 --- a/matita/lib/basics/types.ma +++ b/matita/lib/basics/types.ma @@ -9,7 +9,6 @@ \ / GNU General Public License Version 2 V_______________________________________________________________ *) -include "basics/core_notation/pair_2.ma". include "basics/logic.ma". (* void *) diff --git a/matita/lib/basics/vectors.ma b/matita/lib/basics/vectors.ma index 2ab0a87b1..b831a112f 100644 --- a/matita/lib/basics/vectors.ma +++ b/matita/lib/basics/vectors.ma @@ -1,29 +1,29 @@ (* - ||M|| This file is part of HELM, an Hypertextual, Electronic - ||A|| Library of Mathematics, developed at the Computer Science - ||T|| Department of the University of Bologna, Italy. - ||I|| - ||T|| - ||A|| - \ / This file is distributed under the terms of the - \ / GNU General Public License Version 2 + ||M|| This file is part of HELM, an Hypertextual, Electronic + ||A|| Library of Mathematics, developed at the Computer Science + ||T|| Department of the University of Bologna, Italy. + ||I|| + ||T|| + ||A|| + \ / This file is distributed under the terms of the + \ / GNU General Public License Version 2 V_____________________________________________________________*) include "basics/lists/list.ma". -record Vector (A:Type[0]) (n:nat): Type[0] ≝ +record Vector (A:Type[0]) (n:nat): Type[0] ≝ { vec :> list A; len: length ? vec = n }. -lemma Vector_of_list ≝ λA,l. +lemma Vector_of_list ≝ λA,l. mk_Vector A (|l|) l (refl ??). -lemma Vector_eq : ∀A,n,v1,v2. - vec A n v1 = vec A n v2 → v1 = v2. -#A #n * #l1 #H1 * #l2 #H2 #eql1l2 generalize in match H1; +axiom Vector_eq : ∀A,n,v1,v2. + vec A n v1 = vec A n v2 → v1 = v2. (* +#A #n * #l1 #H1 * #l2 #H2 #eql1l2 generalize in match H1; -H1 >eql1l2 // -qed. +qed. *) definition vec_tail ≝ λA.λn.λv:Vector A n. mk_Vector A (pred n) (tail A v) ?. @@ -36,47 +36,47 @@ normalize >(len A n v) // qed. definition vec_hd ≝ λA.λn.λv:Vector A (S n). -hd A v ?. elim v * [normalize #H destruct | #a #H #eq @a] +hd A v ?. elim v * [normalize #H destruct | #a #H #eq @a] qed. -lemma vec_expand: ∀A,n,v. - v = vec_cons A (vec_hd A n v) n (vec_tail A (S n) v). +axiom vec_expand: ∀A,n,v. + v = vec_cons A (vec_hd A n v) n (vec_tail A (S n) v). (* #A #n * #l cases l [normalize in ⊢ (%→?); #H destruct |//] -qed. +qed. *) -lemma vector_nil: ∀A.∀v:Vector A 0. - v = mk_Vector A 0 (nil A) (refl ??). +axiom vector_nil: ∀A.∀v:Vector A 0. + v = mk_Vector A 0 (nil A) (refl ??). (* #A * * // #a #tl normalize #H destruct -qed. +qed. *) definition vec_append ≝ λA.λn1,n2.λv1:Vector A n1.λv2: Vector A n2. mk_Vector A (n1+n2) (v1@v2). definition vec_map ≝ λA,B.λf:A→B.λn.λv:Vector A n. -mk_Vector B n (map ?? f v) +mk_Vector B n (map ?? f v) (trans_eq … (length_map …) (len A n v)). - + lemma nth_default : ∀A,i,n.∀v:Vector A n.∀d1,d2. i < n → nth i ? v d1 = nth i ? v d2. #A #i elim i -i [#n #v #d1 #d2 #ltOn lapply v @(lt_O_n_elim … ltOn) -v #m #v >(vec_expand … v) // |#i #Hind #n #v #d1 #d2 #ltn lapply ltn lapply v @(lt_O_n_elim … (ltn_to_ltO … ltn)) - -v -ltn #m #v #ltim >(vec_expand … v) @(Hind m (vec_tail A (S m) v) d1 d2 ?) + -v -ltn #m #v #ltim >(vec_expand … v) @(Hind m (vec_tail A (S m) v) d1 d2 ?) @le_S_S_to_le // ] qed. - -lemma eq_vec: ∀A,n.∀v1,v2:Vector A n.∀d. + +lemma eq_vec: ∀A,n.∀v1,v2:Vector A n.∀d. (∀i. i < n → nth i A v1 d = nth i A v2 d) → v1 = v2. -#A #n elim n -n +#A #n elim n -n [#v1 #v2 #H >(vector_nil A v1) >(vector_nil A v2) // |#n #Hind #v1 #v2 #d #H >(vec_expand … v1) >(vec_expand … v2) >(Hind (vec_tail … v1) (vec_tail … v2) d) [cut (vec_hd A n v1 = vec_hd A n v2) // cut (∀i,d1,d2. i < S n → nth i A v1 d1 = nth i A v2 d2) [#i #d1 #d2 #Hi >(nth_default ????? d) // >(nth_default ???? d2 d) // @H //] - -H #H @(H 0) // + -H #H @(H 0) // |#i #ltin @(H (S i)) @le_S_S // ] ] @@ -90,7 +90,7 @@ lemma nth_vec_map : [ #v #d >(vector_nil … v) % | #n0 #v #d >(vec_expand … v) % ] | #i0 #IH * - [ #v #d >(vector_nil … v) normalize cases i0 // + [ #v #d >(vector_nil … v) normalize cases i0 // | #n #v #d >(vec_expand … v) whd in ⊢ (??(?%)%); >(IH n (vec_tail A (S n) v) d) % ] ] qed. @@ -102,7 +102,7 @@ match n return λn0.∀v:Vector A n0.A→nat→Vector A n0 with [ O ⇒ λv,a,i.v | S m ⇒ λv,a,i. match i with - [ O ⇒ vec_cons A a m (vec_tail … v) + [ O ⇒ vec_cons A a m (vec_tail … v) | S j ⇒ vec_cons A (vec_hd A m v) m (change_vec A m (vec_tail … v) a j) ] ]. @@ -111,9 +111,9 @@ lemma nth_change_vec : ∀A,i,n,v,a,d. i < n → nth i ? (change_vec A n v a i) d = a. #A #i elim i [#n #v #a #d #ltOn lapply v -v @(lt_O_n_elim n ltOn ??) // - |#m #Hind #n #v #a #d #Hlt - lapply Hlt lapply v @(lt_O_n_elim … (ltn_to_ltO … Hlt)) - #p #v #ltmp @Hind @le_S_S_to_le // + |#m #Hind #n #v #a #d #Hlt + lapply Hlt lapply v @(lt_O_n_elim … (ltn_to_ltO … Hlt)) + #p #v #ltmp @Hind @le_S_S_to_le // ] qed. @@ -123,8 +123,8 @@ lemma nth_change_vec_neq : ∀A,j,i,n,v,a,d. i ≠ j → [#i * // #n #v #a #d cases i [#H @False_ind @(absurd ?? H) // |#i0 #_ >(vec_expand ?? v) in ⊢ (???%); // - ] - |#m #Hind #i * // cases i // #i0 #n #v #a #d #neqim + ] + |#m #Hind #i * // cases i // #i0 #n #v #a #d #neqim whd in ⊢(??%?); whd in match (tail ??); >Hind [>(vec_expand ??v) in ⊢ (???%); // |@(not_to_not … neqim) // ] ] @@ -139,13 +139,13 @@ lemma change_vec_same : ∀sig,n,v,i,d. ] qed. -lemma change_vec_cons_tail :∀A,n,vA,a,b,i. +axiom change_vec_cons_tail :∀A,n,vA,a,b,i. change_vec A (S n) (vec_cons ? a n vA) b (S i) = - vec_cons ? a n (change_vec A n vA b i). + vec_cons ? a n (change_vec A n vA b i). (* #A #n #vA cases vA // -qed. +qed. *) -lemma change_vec_commute : ∀A,n,v,a,b,i,j. i ≠ j → +lemma change_vec_commute : ∀A,n,v,a,b,i,j. i ≠ j → change_vec A n (change_vec A n v a i) b j = change_vec A n (change_vec A n v b j) a i. #A #n #v #a #b #i #j #Hij @(eq_vec … a) @@ -162,7 +162,7 @@ lemma change_vec_commute : ∀A,n,v,a,b,i,j. i ≠ j → ] qed. -lemma change_vec_change_vec : ∀A,n,v,a,b,i. +lemma change_vec_change_vec : ∀A,n,v,a,b,i. change_vec A n (change_vec A n v a i) b i = change_vec A n v b i. #A #n #v #a #b #i @(eq_vec … a) #i0 #Hi0 cases (decidable_eq_nat i i0) #Hii0 @@ -172,8 +172,8 @@ cases (decidable_eq_nat i i0) #Hii0 qed. lemma eq_vec_change_vec : ∀sig,n.∀v1,v2:Vector sig n.∀i,t,d. - nth i ? v2 d = t → - (∀j.i ≠ j → nth j ? v1 d = nth j ? v2 d) → + nth i ? v2 d = t → + (∀j.i ≠ j → nth j ? v1 d = nth j ? v2 d) → v2 = change_vec ?? v1 t i. #sig #n #v1 #v2 #i #t #d #H1 #H2 @(eq_vec … d) #i0 #Hlt cases (decidable_eq_nat i0 i) #Hii0 @@ -181,12 +181,12 @@ lemma eq_vec_change_vec : ∀sig,n.∀v1,v2:Vector sig n.∀i,t,d. | >nth_change_vec_neq [|@sym_not_eq //] @sym_eq @H2 @sym_not_eq // ] qed-. -(* map *) +(* map *) let rec pmap A B C (f:A→B→C) l1 l2 on l1 ≝ match l1 with [ nil ⇒ nil C - | cons a tlA ⇒ + | cons a tlA ⇒ match l2 with [nil ⇒ nil C |cons b tlB ⇒ (f a b)::pmap A B C f tlA tlB @@ -194,29 +194,29 @@ let rec pmap A B C (f:A→B→C) l1 l2 on l1 ≝ ]. lemma length_pmap: ∀A,B,C.∀f:A→B→C.∀l1,l2. -length C (pmap A B C f l1 l2) = +length C (pmap A B C f l1 l2) = min (length A l1) (length B l2). #A #B #C #f #l1 elim l1 // #a #tlA #Hind #l2 cases l2 // -#b #tlB lapply (Hind tlB) normalize +#b #tlB lapply (Hind tlB) normalize cases (true_or_false (leb (length A tlA) (length B tlB))) #H >H normalize // qed. definition pmap_vec ≝ λA,B,C.λf:A→B→C.λn.λvA:Vector A n.λvB:Vector B n. mk_Vector C n (pmap A B C f vA vB) ?. ->length_pmap >(len A n vA) >(len B n vB) normalize +>length_pmap >(len A n vA) >(len B n vB) normalize >(le_to_leb_true … (le_n n)) // qed. -lemma pmap_vec_cons :∀A,B,C,f,n,vA,vB,a,b. +axiom pmap_vec_cons :∀A,B,C,f,n,vA,vB,a,b. pmap_vec A B C f (S n) (vec_cons ? a n vA) (vec_cons ? b n vB) = - vec_cons ? (f a b) n (pmap_vec A B C f n vA vB). -// qed. + vec_cons ? (f a b) n (pmap_vec A B C f n vA vB). (* +// qed. *) -lemma pmap_change : ∀A,B,C,f,n,vA,vB,a,b,i. +axiom pmap_change : ∀A,B,C,f,n,vA,vB,a,b,i. pmap_vec A B C f n (change_vec ? n vA a i) (change_vec ? n vB b i) = - change_vec ? n (pmap_vec A B C f n vA vB) (f a b) i. + change_vec ? n (pmap_vec A B C f n vA vB) (f a b) i. (* #A #B #C #f #n elim n // #m #Hind #vA #vB #a #b >(vec_expand … vA) >(vec_expand … vB) * // -#i >pmap_vec_cons >pmap_vec_cons >change_vec_cons_tail @eq_f @Hind -qed. \ No newline at end of file +#i >pmap_vec_cons >pmap_vec_cons >change_vec_cons_tail @eq_f @Hind +qed. *) \ No newline at end of file diff --git a/matita/matitaEngine.ml b/matita/matitaEngine.ml index 6b4c6655b..27ce3cb3b 100644 --- a/matita/matitaEngine.ml +++ b/matita/matitaEngine.ml @@ -369,16 +369,22 @@ and compile ~compiling ~asserted ~include_paths fname = (Filename.dirname (Http_getter.filename ~local:true ~writable:true (baseuri ^ "foo.con"))); - let buf = - GrafiteParser.parsable_statement status - (Ulexing.from_utf8_channel (open_in fname)) - in - let print_cb = - if not (Helm_registry.get_bool "matita.verbose") then fun _ _ -> () - else pp_ast_statement ~fname - in - let asserted, status = - eval_from_stream ~compiling ~asserted ~include_paths status buf print_cb in + let asserted,status = + let rex = Str.regexp ".*\\.dk$" in + if Str.string_match rex fname 0 then + (* DEDUKTI CODE HERE! *) + let buf = Parsers.Parser.from (Parsers.Parser.input_from_file fname) in + DeduktiImport.eval_from_dedukti_stream ~asserted ~baseuri status buf + else + let buf = + GrafiteParser.parsable_statement status + (Ulexing.from_utf8_channel (open_in fname)) + in + let print_cb = + if not (Helm_registry.get_bool "matita.verbose") then fun _ _ -> () + else pp_ast_statement ~fname + in + eval_from_stream ~compiling ~asserted ~include_paths status buf print_cb in let status = OcamlExtraction.close_file status in let elapsed = Unix.time () -. time in (if Helm_registry.get_bool "matita.moo" then begin diff --git a/matita/matitaInit.ml b/matita/matitaInit.ml index 9f7ae3ac2..0328d808a 100644 --- a/matita/matitaInit.ml +++ b/matita/matitaInit.ml @@ -202,6 +202,9 @@ let parse_cmdline init_status = "-extract_ocaml", Arg.Unit (fun () -> Helm_registry.set_bool "extract_ocaml" true), "Extract ocaml code"; + "-extract_dedukti", + Arg.Unit (fun () -> Helm_registry.set_bool "extract_dedukti" true), + "Extract dedukti code"; "-force", Arg.Unit (fun () -> Helm_registry.set_bool "matita.force" true), ("Force actions that would not be executed per default"); diff --git a/matita/matitac.ml b/matita/matitac.ml index c2291d575..14501f24c 100644 --- a/matita/matitac.ml +++ b/matita/matitac.ml @@ -67,6 +67,7 @@ let main_compiler () = HLog.error (snd (MatitaExcPp.to_string exn)); false) && b ) true targets then + let () = DeduktiExtraction.output_modules () in (HLog.message "Compilation successful"; 0) else (HLog.message "Compilation failed"; 1)