diff options
-rw-r--r-- | HoTT_Base.thy | 38 | ||||
-rw-r--r-- | Prod.thy | 58 | ||||
-rw-r--r-- | typing.ML | 205 |
3 files changed, 237 insertions, 64 deletions
diff --git a/HoTT_Base.thy b/HoTT_Base.thy index e74ef31..ed8c18b 100644 --- a/HoTT_Base.thy +++ b/HoTT_Base.thy @@ -1,11 +1,11 @@ (******** Isabelle/HoTT: Foundational definitions and notation -Jan 2019 +Feb 2019 This file is the starting point of the Isabelle/HoTT object logic, and contains the basic setup and definitions. Among other things, it: -* Sets up object-level type inference functionality (via typing.ML). +* Calls the setup of object-level type inference functionality. * Defines the universe hierarchy and its governing rules. * Defines named theorems for later use by proof methods. @@ -13,19 +13,49 @@ Among other things, it: theory HoTT_Base imports Pure +keywords "assume_type" "assume_types" :: thy_decl begin section \<open>Terms and typing\<close> -declare[[eta_contract=false]] - typedecl t \<comment> \<open>Type of object-logic terms (which includes the types)\<close> judgment has_type :: "[t, t] \<Rightarrow> prop" ("(3_:/ _)") +section \<open>Setup non-core logic functionality\<close> + +declare[[eta_contract=false]] \<comment> \<open>Do not eta-contract\<close> + +text \<open>The following file sets up type assumption and inference functionality.\<close> + ML_file "typing.ML" +ML \<open> +val _ = + let + fun store_typing ctxt = Typing.put_declared_typing o (Syntax.read_prop ctxt) + in + Outer_Syntax.local_theory + @{command_keyword "assume_type"} + "Declare typing assumptions" + (Parse.prop >> + (fn prop => fn lthy => Local_Theory.background_theory (store_typing lthy prop) lthy)) + end + +val _ = + let + val parser = Parse.and_list (Parse.prop) + fun store_typings ctxt = fold (Typing.put_declared_typing o (Syntax.read_prop ctxt)) + in + Outer_Syntax.local_theory + @{command_keyword "assume_types"} + "Declare typing assumptions" + (parser >> + (fn props => fn lthy => Local_Theory.background_theory (store_typings lthy props) lthy)) + end +\<close> + section \<open>Universes\<close> typedecl ord \<comment> \<open>Type of meta-numerals\<close> @@ -1,6 +1,6 @@ (******** Isabelle/HoTT: Dependent product (dependent function) -Jan 2019 +Feb 2019 ********) @@ -20,13 +20,13 @@ axiomatization syntax "_Prod" :: "[idt, t, t] \<Rightarrow> t" ("(3TT '(_: _')./ _)" 30) "_Prod'" :: "[idt, t, t] \<Rightarrow> t" ("(3TT _: _./ _)" 30) - "_lam" :: "[idt, t, t] \<Rightarrow> t" ("(3,\\ '(_: _')./ _)" 30) - "_lam'" :: "[idt, t, t] \<Rightarrow> t" ("(3,\\ _: _./ _)" 30) + "_lam" :: "[idt, t, t] \<Rightarrow> t" ("(3\<lambda>'(_: _')./ _)" 30) + "_lam'" :: "[idt, t, t] \<Rightarrow> t" ("(3\<lambda>_: _./ _)" 30) translations - "TT(x: A). B" \<rightleftharpoons> "(CONST Prod) A (\<lambda>x. B)" - "TT x: A. B" \<rightleftharpoons> "(CONST Prod) A (\<lambda>x. B)" - ",\\(x: A). b" \<rightleftharpoons> "(CONST lam) A (\<lambda>x. b)" - ",\\x: A. b" \<rightleftharpoons> "(CONST lam) A (\<lambda>x. b)" + "TT(x: A). B" \<rightleftharpoons> "(CONST Prod) A (\<lambda>x. B)" + "TT x: A. B" \<rightleftharpoons> "(CONST Prod) A (\<lambda>x. B)" + "\<lambda>(x: A). b" \<rightleftharpoons> "(CONST lam) A (\<lambda>x. b)" + "\<lambda>x: A. b" \<rightleftharpoons> "(CONST lam) A (\<lambda>x. b)" text \<open> The syntax translations above bind the variable @{term x} in the expressions @{term B} and @{term b}. @@ -40,22 +40,22 @@ where "A \<rightarrow> B \<equiv> TT(_: A). B" axiomatization where \<comment> \<open>Type rules\<close> - Prod_form: "\<lbrakk>A: U i; \<And>x. x: A \<Longrightarrow> B x: U i\<rbrakk> \<Longrightarrow> TT x: A. B x: U i" and + Prod_form: "\<lbrakk>A: U i; B: A \<leadsto> U i\<rbrakk> \<Longrightarrow> TT x: A. B x: U i" and - Prod_intro: "\<lbrakk>A: U i; \<And>x. x: A \<Longrightarrow> b x: B x\<rbrakk> \<Longrightarrow> ,\\x: A. b x: TT x: A. B x" and + Prod_intro: "\<lbrakk>A: U i; \<And>x. x: A \<Longrightarrow> b x: B x\<rbrakk> \<Longrightarrow> \<lambda>x: A. b x: TT x: A. B x" and Prod_elim: "\<lbrakk>f: TT x: A. B x; a: A\<rbrakk> \<Longrightarrow> f`a: B a" and - Prod_cmp: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x: B x; a: A\<rbrakk> \<Longrightarrow> (,\\x: A. b x)`a \<equiv> b a" and + Prod_cmp: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x: B x; a: A\<rbrakk> \<Longrightarrow> (\<lambda>x: A. b x)`a \<equiv> b a" and - Prod_uniq: "f: TT x: A. B x \<Longrightarrow> ,\\x: A. f`x \<equiv> f" and + Prod_uniq: "f: TT x: A. B x \<Longrightarrow> \<lambda>x: A. f`x \<equiv> f" and \<comment> \<open>Congruence rules\<close> Prod_form_eq: "\<lbrakk>A: U i; B: A \<leadsto> U i; C: A \<leadsto> U i; \<And>x. x: A \<Longrightarrow> B x \<equiv> C x\<rbrakk> \<Longrightarrow> TT x: A. B x \<equiv> TT x: A. C x" and - Prod_intro_eq: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x \<equiv> c x; A: U i\<rbrakk> \<Longrightarrow> ,\\x: A. b x \<equiv> ,\\x: A. c x" + Prod_intro_eq: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x \<equiv> c x; A: U i\<rbrakk> \<Longrightarrow> \<lambda>x: A. b x \<equiv> \<lambda>x: A. c x" text \<open> The Pure rules for \<open>\<equiv>\<close> only let us judge strict syntactic equality of object lambda expressions. @@ -71,23 +71,23 @@ lemmas Prod_cong [cong] = Prod_form_eq Prod_intro_eq section \<open>Function composition\<close> definition compose :: "[t, t, t] \<Rightarrow> t" -where "compose A g f \<equiv> ,\\x: A. g`(f`x)" +where "compose A g f \<equiv> \<lambda>x: A. g`(f`x)" declare compose_def [comp] syntax "_compose" :: "[t, t] \<Rightarrow> t" (infixr "o" 110) - parse_translation \<open> -let fun compose_tr ctxt tms = +let fun compose_tr ctxt [g, f] = let - val g :: f :: _ = tms |> map (Typing.tm_of_ptm ctxt) + val [g, f] = [g, f] |> map (Typing.prep_term @{context}) val dom = case f of Const ("Prod.lam", _) $ T $ _ => T - | _ => (case Typing.get_typing f (Typing.typing_assms ctxt) of + | Const ("Prod.compose", _) $ T $ _ $ _ => T + | _ => (case Typing.get_typing ctxt f of SOME (Const ("Prod.Prod", _) $ T $ _) => T - | SOME _ => Exn.error "Can't compose with a non-function" - | NONE => Exn.error "Cannot infer domain of composition: please state this explicitly") + | SOME t => (@{print} t; Exn.error "Can't compose with a non-function") + | NONE => Exn.error "Cannot infer domain of composition; please state this explicitly") in @{const compose} $ dom $ g $ f end @@ -96,16 +96,30 @@ in end \<close> +text \<open>Pretty-printing switch for composition; hides domain type information.\<close> + +ML \<open>val pretty_compose = Attrib.setup_config_bool @{binding "pretty_compose"} (K true)\<close> + +print_translation \<open> +let fun compose_tr' ctxt [A, g, f] = + if Config.get ctxt pretty_compose + then Syntax.const @{syntax_const "_compose"} $ g $ f + else Const ("compose", Syntax.read_typ ctxt "t \<Rightarrow> t \<Rightarrow> t") $ A $ g $ f +in + [(@{const_syntax compose}, compose_tr')] +end +\<close> + lemma compose_assoc: - assumes "A: U i" "f: A \<rightarrow> B" "g: B \<rightarrow> C" "h: TT x: C. D x" + assumes "A: U i" and "f: A \<rightarrow> B" and "g: B \<rightarrow> C" and "h: TT x: C. D x" shows "compose A (compose B h g) f \<equiv> compose A h (compose A g f)" by (derive lems: assms cong) lemma compose_comp: assumes "A: U i" and "\<And>x. x: A \<Longrightarrow> b x: B" and "\<And>x. x: B \<Longrightarrow> c x: C x" - shows "(,\\x: B. c x) o (,\\x: A. b x) \<equiv> ,\\x: A. c (b x)" + shows "(\<lambda>x: B. c x) o (\<lambda>x: A. b x) \<equiv> \<lambda>x: A. c (b x)" by (derive lems: assms cong) -abbreviation id :: "t \<Rightarrow> t" where "id A \<equiv> ,\\x: A. x" +abbreviation id :: "t \<Rightarrow> t" where "id A \<equiv> \<lambda>x: A. x" end @@ -6,15 +6,96 @@ Functionality for object-level type inference. ********) +(* Context attribute for tracing; use declare[[trace=true]] at any point in a theory to turn on *) +val trace = Attrib.setup_config_bool @{binding "trace"} (K false) + +(* ====================================== Helper functions ====================================== *) + +signature TYPING_LIB = +sig + (* Lexical functions *) + val unmark: string -> string + + (* General functions *) + val get_all_thms: Proof.context -> (string * thm) list + val find_thm: Proof.context -> string -> (string * thm) list + + (* Get specific theorems *) + val get_this: Proof.context -> thm list + val get_assms: Proof.context -> thm list +end + +structure Typing_Lib: TYPING_LIB = +struct + +fun unmark s = + Lexicon.unmark_class s + handle Fail "unprefix" => + Lexicon.unmark_type s + handle Fail "unprefix" => + Lexicon.unmark_const s + handle Fail "unprefix" => + Lexicon.unmark_fixed s + handle Fail "unprefix" => + s + +fun name_and_thm (fact, thm) = + ((case fact of + Facts.Named ((name, _), _) => name + | Facts.Fact name => name), + thm) + +fun get_all_thms ctxt = + let val (_, ref_thms) = Find_Theorems.find_theorems ctxt NONE NONE true [] + in ref_thms |> (map name_and_thm) + end + +(* Search all visible theorems (including local assumptions) containing a given name *) +fun find_thm ctxt name = + let val (_, ref_thms) = + Find_Theorems.find_theorems ctxt NONE NONE true [(true, Find_Theorems.Name name)] + in + ref_thms |> (map name_and_thm) + end + +(* Get the fact corresponding to the Isar term "this" in the current proof scope *) +fun get_this ctxt = + Proof_Context.get_fact ctxt (Facts.named "local.this") + handle ERROR "Undefined fact: \"local.this\"" => ( + if (Config.get ctxt trace) then warning "\"this\" undefined in the context" else (); + [] + ) + +(* Get all visible assumptions in the current proof scope. + This includes the facts introduced in the statement of the goal using "assumes", + as well as all facts introduced using "assume" that are visible in the current block. + *) +val get_assms = Assumption.all_prems_of + +end + +(* ================================ Type inference functionality ================================ *) + signature TYPING = sig type jmt = term + val is_typing_jmt: term -> bool val term_of_jmt: jmt -> term val type_of_jmt: jmt -> term + + val typing_this: Proof.context -> jmt list val typing_assms: Proof.context -> jmt list - val get_typing: term -> jmt list -> term option - val get_global_typing: theory -> Proof.context -> term -> term option - val tm_of_ptm: Proof.context -> term -> term + val typing_all: Proof.context -> jmt list + + val get_typing_in: term -> jmt list -> term option + val get_local_typing: Proof.context -> term -> term option + + val get_declared_typing: theory -> term -> term option + val put_declared_typing: term -> theory -> theory + + val get_typing: Proof.context -> term -> term option + + val prep_term: Proof.context -> term -> term end structure Typing: TYPING = @@ -22,6 +103,10 @@ struct type jmt = term +(* Determine if a term is a typing judgment *) +fun is_typing_jmt (@{const has_type} $ _ $ _) = true + | is_typing_jmt _ = false + (* Functions to extract a and A from propositions "a: A" *) fun term_of_jmt (@{const has_type} $ t $ _) = t | term_of_jmt _ = Exn.error "Not a typing judgment" @@ -29,62 +114,106 @@ fun term_of_jmt (@{const has_type} $ t $ _) = t fun type_of_jmt (@{const has_type} $ _ $ T) = T | type_of_jmt _ = Exn.error "Not a typing judgment" +(* Get typing assumptions in "this" *) +fun typing_this ctxt = Typing_Lib.get_this ctxt |> map Thm.prop_of |> filter is_typing_jmt + (* Get the typing assumptions in the current context *) -fun typing_assms ctxt = - let - fun is_typing_jmt (@{const has_type} $ _ $ _) = true - | is_typing_jmt _ = false - in - Assumption.all_prems_of ctxt |> map Thm.prop_of |> filter is_typing_jmt - end +val typing_assms = Typing_Lib.get_assms #> map Thm.prop_of #> filter is_typing_jmt -(* Search for a term typing in a list of judgments*) -fun get_typing tm jmts = +(* Get all visible typing judgments—Quick hack for now; should record them separately *) +fun typing_all ctxt = + Typing_Lib.get_all_thms ctxt |> map (Thm.prop_of o snd) |> filter is_typing_jmt + +(* Search for a term typing in a list of judgments, and return the type if found. + -- + The use of aconv_untyped as opposed to aconv is crucial here: + meta-level type inference is performed *after* syntax translation, which means that + the translation functions see an unannotated term "f" (in contrast to one annotated + e.g. "f::t") as having a blank type field. + Using aconv would result in no match ever being found for f, because any judgment + involving f records it together with its (non-blank) type field, e.g. + "Free ("f", "_")" + seen by the translation function, vs. + "Free ("f", "t")" + recorded in the typing judgment. +*) +fun get_typing_in tm jmts = let val find_type = fn jmt => if Term.aconv_untyped (tm, term_of_jmt jmt) then SOME (type_of_jmt jmt) else NONE in get_first find_type jmts end -(* Private storage space for global typing data *) -structure Typing_Data = Theory_Data +(* Search for typing in the local proof context (no global data). + We search the facts bound to "this" before searching the assumptions. + -- + A previous version of this function passed the argument + (typing_this ctxt @ typing_assms ctxt) + to get_typing_in. + -- + This version only evaluates successive lists if the search on the previous list was unsuccessful. +*) +fun get_local_typing ctxt tm = + case get_typing_in tm (typing_this ctxt) of + NONE => get_typing_in tm (typing_assms ctxt) + | res => res + +(* Storage space for declared typings *) +structure Declared_Typings = Theory_Data ( type T = term Symtab.table val empty = Symtab.empty val extend = I - - fun merge (tbl, tbl') = - let - val key_vals = map (Symtab.lookup_key tbl') (Symtab.keys tbl') - val updates = map (fn SOME kv => Symtab.update kv) key_vals - fun f u t = u t - in fold f updates tbl end + val merge = K Symtab.empty ) - (* Accessor for the above data *) -fun get_global_typing thy ctxt tm = Symtab.lookup (Typing_Data.get thy) (Syntax.string_of_term ctxt tm) +fun get_declared_typing thy tm = + case tm of + (Free (v, _)) => Symtab.lookup (Declared_Typings.get thy) v + | _ => NONE + +(* Store new typing information for free variables only + (constants should have their types declared at the moment of definition). + End users should use the "typing" keyword instead. +*) +fun put_declared_typing jmt = + case jmt of + Const("HoTT_Base.has_type", _) $ term $ typing => + (case term of + Free (v, _) => Declared_Typings.map (Symtab.update (v, typing)) + | _ => Exn.error "Cannot declare typing assumption (not a free variable)") + | _ => Exn.error "Not a typing judgment" + +(* Get the typing of a term *) +fun get_typing ctxt tm = + case get_local_typing ctxt tm of + NONE => get_declared_typing (Proof_Context.theory_of ctxt) tm + | res => res -(* Convert a Pure pre-term to an unchecked Pure term *) -fun tm_of_ptm ctxt ptm = +(* Prepare a Pure pre-term, converting it partway to a (unchecked) term *) +fun prep_term ctxt ptm = let val ptm = Term_Position.strip_positions ptm in (case ptm of - Const ("_constrain", _) $ Free (t, _) $ (Const ("\<^type>fun", _) $ Const(n1, _) $ Const (n2, _)) => + Const ("_constrain", _) $ Free (t, _) $ + (Const ("\<^type>fun", _) $ Const(typ1, _) $ Const (typ2, _)) => let - val typ1 = unprefix "\<^type>" n1 - val typ2 = unprefix "\<^type>" n2 + val typ1 = Lexicon.unmark_type typ1 + val typ2 = Lexicon.unmark_type typ2 val typ = typ1 ^ "\<Rightarrow>" ^ typ2 |> Syntax.read_typ ctxt in Free (t, typ) end - | Const ("_constrain", _) $ Free (t, _) $ Const (T, _) => - Free (t, Syntax.read_typ ctxt (unprefix "\<^type>" T)) + | Const ("_constrain", _) $ t $ Const (typ, _) => + let + val T = Syntax.read_typ ctxt (Lexicon.unmark_type typ) + in + case t of + Free (s, _) => Free (s, T) + | Const (s, _) => Const (Lexicon.unmark_const s, T) + | _ => Exn.error "Unimplemented term constraint handler" + end | ptm1 $ ptm2 => - (tm_of_ptm ctxt ptm1) $ (tm_of_ptm ctxt ptm2) - | Const (t, T) => - let val t' = - unprefix "\<^const>" t handle Fail "unprefix" => - unprefix "\<^type>" t handle Fail "unprefix" => - t - in Const (t', T) end + (prep_term ctxt ptm1) $ (prep_term ctxt ptm2) | Abs (t, T, ptm') => - Abs (t, T, tm_of_ptm ctxt ptm') + Abs (t, T, prep_term ctxt ptm') + | Const (t, T) => Const (Typing_Lib.unmark t, T) | t => t) end |