aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--HoTT_Base.thy71
-rw-r--r--HoTT_Methods.thy75
-rw-r--r--Prod.thy108
-rw-r--r--ROOT3
-rw-r--r--typing.ML91
5 files changed, 226 insertions, 122 deletions
diff --git a/HoTT_Base.thy b/HoTT_Base.thy
index 9e8fee7..e74ef31 100644
--- a/HoTT_Base.thy
+++ b/HoTT_Base.thy
@@ -1,23 +1,34 @@
-(*
-Title: HoTT_Base.thy
-Author: Joshua Chen
-Date: 2018
+(********
+Isabelle/HoTT: Foundational definitions and notation
+Jan 2019
-Basic setup of a homotopy type theory object logic with a cumulative Russell-style universe hierarchy.
-*)
+This file is the starting point of the Isabelle/HoTT object logic, and contains the basic setup and definitions.
+Among other things, it:
+
+* Sets up object-level type inference functionality (via typing.ML).
+* Defines the universe hierarchy and its governing rules.
+* Defines named theorems for later use by proof methods.
+
+********)
theory HoTT_Base
imports Pure
begin
-
-section \<open>Basic setup\<close>
+section \<open>Terms and typing\<close>
declare[[eta_contract=false]]
-typedecl t \<comment> \<open>Type of object types and terms\<close>
-typedecl ord \<comment> \<open>Type of meta-level numerals\<close>
+typedecl t \<comment> \<open>Type of object-logic terms (which includes the types)\<close>
+
+judgment has_type :: "[t, t] \<Rightarrow> prop" ("(3_:/ _)")
+
+ML_file "typing.ML"
+
+section \<open>Universes\<close>
+
+typedecl ord \<comment> \<open>Type of meta-numerals\<close>
axiomatization
O :: ord and
@@ -34,56 +45,44 @@ declare
leq_min [intro!]
lt_trans [intro]
-
-section \<open>Judgment\<close>
-
-judgment hastype :: "[t, t] \<Rightarrow> prop" ("(3_:/ _)")
-
-
-section \<open>Universes\<close>
-
axiomatization
U :: "ord \<Rightarrow> t"
where
- U_hierarchy: "i < j \<Longrightarrow> U i: U j" and
+ U_hierarchy: "i < j \<Longrightarrow> U i: U j" and
U_cumulative: "\<lbrakk>A: U i; i < j\<rbrakk> \<Longrightarrow> A: U j" and
U_cumulative': "\<lbrakk>A: U i; i \<le> j\<rbrakk> \<Longrightarrow> A: U j"
text \<open>
-Using method @{method rule} with @{thm U_cumulative} is unsafe, if applied blindly it will typically lead to non-termination.
-One should instead use @{method elim}, or instantiate @{thm U_cumulative} suitably.
+Using method @{method rule} with @{thm U_cumulative} and @{thm U_cumulative'} is unsafe: if applied blindly it will very easily lead to non-termination.
+Instead use @{method elim}, or instantiate the rules suitably.
-@{thm U_cumulative'} is an alternative rule used by the method \<open>cumulativity\<close> in @{file HoTT_Methods.thy}.
+@{thm U_cumulative'} is an alternative rule used by the method @{theory_text cumulativity} in @{file HoTT_Methods.thy}.
\<close>
-
section \<open>Type families\<close>
-abbreviation (input) constrained :: "[t \<Rightarrow> t, t, t] \<Rightarrow> prop" ("(1_:/ _ \<longrightarrow> _)")
- where "f: A \<longrightarrow> B \<equiv> (\<And>x. x : A \<Longrightarrow> f x: B)"
-
-text \<open>
-The abbreviation @{abbrev constrained} is used to define type families, which are constrained expressions @{term "P: A \<longrightarrow> B"}, where @{term "A::t"} and @{term "B::t"} are small types.
-\<close>
-
-type_synonym tf = "t \<Rightarrow> t" \<comment> \<open>Type of type families\<close>
+abbreviation (input) constraint :: "[t \<Rightarrow> t, t, t] \<Rightarrow> prop" ("(1_:/ _ \<leadsto> _)")
+where "f: A \<leadsto> B \<equiv> (\<And>x. x: A \<Longrightarrow> f x: B)"
+text \<open>We use the notation @{prop "B: A \<leadsto> U i"} to abbreviate type families.\<close>
section \<open>Named theorems\<close>
-named_theorems comp
named_theorems form
+named_theorems comp
+named_theorems cong
text \<open>
-Declare named theorems to be used by proof methods defined in @{file HoTT_Methods.thy}.
-
-@{attribute comp} declares computation rules, which are used by the \<open>compute\<close> method, and may also be passed to invocations of the method \<open>subst\<close> to perform equational rewriting.
+The named theorems above will be used by proof methods defined in @{file HoTT_Methods.thy}.
@{attribute form} declares type formation rules.
These are mainly used by the \<open>cumulativity\<close> method, which lifts types into higher universes.
+
+@{attribute comp} declares computation rules, which are used by the \<open>compute\<close> method, and may also be passed to invocations of the method \<open>subst\<close> to perform equational rewriting.
+
+@{attribute cong} declares congruence rules, the definitional equality rules of the type theory.
\<close>
(* Todo: Set up the Simplifier! *)
-
end
diff --git a/HoTT_Methods.thy b/HoTT_Methods.thy
index f0cee6c..99f3446 100644
--- a/HoTT_Methods.thy
+++ b/HoTT_Methods.thy
@@ -1,80 +1,73 @@
-(*
-Title: HoTT_Methods.thy
-Author: Joshua Chen
-Date: 2018
+(********
+Isabelle/HoTT: Proof methods
+Jan 2019
-Method setup for the HoTT logic.
-*)
+********)
theory HoTT_Methods
imports HoTT_Base "HOL-Eisbach.Eisbach" "HOL-Eisbach.Eisbach_Tools"
begin
+section \<open>Substitution and simplification\<close>
-section \<open>Handling universes\<close>
+ML_file "~~/src/Tools/misc_legacy.ML"
+ML_file "~~/src/Tools/IsaPlanner/isand.ML"
+ML_file "~~/src/Tools/IsaPlanner/rw_inst.ML"
+ML_file "~~/src/Tools/IsaPlanner/zipper.ML"
+ML_file "~~/src/Tools/eqsubst.ML"
+\<comment> \<open>Import the @{method subst} method, used for substituting definitional equalities.\<close>
-method provelt = (rule lt_Suc | (rule lt_trans, (rule lt_Suc)+)+)
+text \<open>
+Method @{theory_text compute} performs single-step simplifications, using any rules declared @{attribute comp} in the context.
+Premises of the rule(s) applied are added as new subgoals.
+\<close>
-method hierarchy = (rule U_hierarchy, provelt)
+method compute declares comp = (subst comp)
-method cumulativity declares form = (
- ((elim U_cumulative' | (rule U_cumulative', rule form)), rule leq_min) |
- ((elim U_cumulative | (rule U_cumulative, rule form)), provelt)
-)
+section \<open>Handling universes\<close>
text \<open>
-Methods @{method provelt}, @{method hierarchy}, and @{method cumulativity} prove statements of the form
+Methods @{theory_text provelt}, @{theory_text hierarchy}, and @{theory_text cumulativity} prove propositions of the form
+
\<^item> \<open>n < (Suc (... (Suc n) ...))\<close>,
\<^item> \<open>U i: U (Suc (... (Suc i) ...))\<close>, and
-\<^item> @{prop "A: U i \<Longrightarrow> A: U j"}, where @{prop "i \<le> j"}
-respectively.
-\<close>
-
-
-section \<open>Deriving typing judgments\<close>
+\<^item> @{prop "A: U i \<Longrightarrow> A: U j"}, where @{prop "i \<le> j"},
-method routine uses add = (assumption | rule add | rule)+
-
-text \<open>
-The method @{method routine} proves type judgments @{prop "a : A"} using the rules declared @{attribute intro} in the respective theory files, as well as additional provided lemmas passed using the modifier \<open>add\<close>.
+respectively.
\<close>
+method provelt = (rule lt_Suc | (rule lt_trans, (rule lt_Suc)+)+)
-section \<open>Substitution and simplification\<close>
+method hierarchy = (rule U_hierarchy, provelt)
-ML_file "~~/src/Tools/misc_legacy.ML"
-ML_file "~~/src/Tools/IsaPlanner/isand.ML"
-ML_file "~~/src/Tools/IsaPlanner/rw_inst.ML"
-ML_file "~~/src/Tools/IsaPlanner/zipper.ML"
-ML_file "~~/src/Tools/eqsubst.ML"
+method cumulativity declares form = (
+ ((elim U_cumulative' | (rule U_cumulative', rule form)), rule leq_min) |
+ ((elim U_cumulative | (rule U_cumulative, rule form)), provelt)
+)
-\<comment> \<open>Import the @{method subst} method, used for substituting definitional equalities.\<close>
+section \<open>Deriving typing judgments\<close>
-method compute declares comp = (subst comp)
+method routine uses add = (assumption | rule add | rule)+
text \<open>
-Method @{method compute} performs single-step simplifications, using any rules declared @{attribute comp} in the context.
-Premises of the rule(s) applied are added as new subgoals.
+The method @{method routine} proves typing judgments @{prop "a: A"} using the rules declared @{attribute intro} in the respective theory files, as well as any additional lemmas provided with the modifier @{theory_text add}.
\<close>
-
section \<open>Derivation search\<close>
text \<open>
-Combine @{method routine} and @{method compute} to search for derivations of judgments.
-Also handle universes using @{method hierarchy} and @{method cumulativity}.
+The method @{theory_text derive} combines @{method routine} and @{method compute} to search for derivations of judgments.
+It also handles universes using @{method hierarchy} and @{method cumulativity}.
\<close>
method derive uses lems = (routine add: lems | compute comp: lems | cumulativity form: lems | hierarchy)+
-
section \<open>Induction\<close>
text \<open>
-Placeholder section for the automation of induction, i.e. using the elimination rules.
-At the moment one must directly apply them with \<open>rule X_elim\<close>.
+Placeholder section for the automation of induction, i.e. using the type elimination rules.
+At the moment one must directly apply them with @{method rule} or @{method elim}.
\<close>
-
end
diff --git a/Prod.thy b/Prod.thy
index 8d840bd..0de7d89 100644
--- a/Prod.thy
+++ b/Prod.thy
@@ -1,91 +1,111 @@
-(*
-Title: Prod.thy
-Author: Joshua Chen
-Date: 2018
+(********
+Isabelle/HoTT: Dependent product (dependent function)
+Jan 2019
-Dependent product type
-*)
+********)
theory Prod
imports HoTT_Base HoTT_Methods
begin
-
-section \<open>Basic definitions\<close>
+section \<open>Basic type definitions\<close>
axiomatization
- Prod :: "[t, tf] \<Rightarrow> t" and
- lambda :: "(t \<Rightarrow> t) \<Rightarrow> t" (binder "\<^bold>\<lambda>" 30) and
- appl :: "[t, t] \<Rightarrow> t" ("(1_`_)" [120, 121] 120) \<comment> \<open>Application binds tighter than abstraction.\<close>
+ Prod :: "[t, t \<Rightarrow> t] \<Rightarrow> t" and
+ lam :: "[t, t \<Rightarrow> t] \<Rightarrow> t" and
+ app :: "[t, t] \<Rightarrow> t" ("(1_ ` _)" [120, 121] 120)
+ \<comment> \<open>Application should bind tighter than abstraction.\<close>
syntax
- "_prod" :: "[idt, t, t] \<Rightarrow> t" ("(3\<Prod>_: _./ _)" 30)
- "_prod_ascii" :: "[idt, t, t] \<Rightarrow> t" ("(3II _: _./ _)" 30)
-
-text \<open>The translations below bind the variable @{term x} in the expressions @{term B} and @{term b}.\<close>
-
+ "_Prod" :: "[idt, t, t] \<Rightarrow> t" ("(3TT '(_: _')./ _)" 30)
+ "_Prod'" :: "[idt, t, t] \<Rightarrow> t" ("(3TT _: _./ _)" 30)
+ "_lam" :: "[idt, t, t] \<Rightarrow> t" ("(3,\\ '(_: _')./ _)" 30)
+ "_lam'" :: "[idt, t, t] \<Rightarrow> t" ("(3,\\ _: _./ _)" 30)
translations
- "\<Prod>x:A. B" \<rightleftharpoons> "CONST Prod A (\<lambda>x. B)"
- "II x:A. B" \<rightharpoonup> "CONST Prod A (\<lambda>x. B)"
+ "TT(x: A). B" \<rightleftharpoons> "(CONST Prod) A (\<lambda>x. B)"
+ "TT x: A. B" \<rightleftharpoons> "(CONST Prod) A (\<lambda>x. B)"
+ ",\\(x: A). b" \<rightleftharpoons> "(CONST lam) A (\<lambda>x. b)"
+ ",\\x: A. b" \<rightleftharpoons> "(CONST lam) A (\<lambda>x. b)"
-text \<open>Non-dependent functions are a special case.\<close>
+text \<open>
+The syntax translations above bind the variable @{term x} in the expressions @{term B} and @{term b}.
+\<close>
+
+text \<open>Non-dependent functions are a special case:\<close>
abbreviation Fun :: "[t, t] \<Rightarrow> t" (infixr "\<rightarrow>" 40)
- where "A \<rightarrow> B \<equiv> \<Prod>_: A. B"
+where "A \<rightarrow> B \<equiv> TT(_: A). B"
axiomatization where
\<comment> \<open>Type rules\<close>
- Prod_form: "\<lbrakk>A: U i; B: A \<longrightarrow> U i\<rbrakk> \<Longrightarrow> \<Prod>x:A. B x: U i" and
+ Prod_form: "\<lbrakk>A: U i; \<And>x. x: A \<Longrightarrow> B x: U i\<rbrakk> \<Longrightarrow> TT x: A. B x: U i" and
- Prod_intro: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x: B x; A: U i\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>x. b x: \<Prod>x:A. B x" and
+ Prod_intro: "\<lbrakk>A: U i; \<And>x. x: A \<Longrightarrow> b x: B x\<rbrakk> \<Longrightarrow> ,\\x: A. b x: TT x: A. B x" and
- Prod_elim: "\<lbrakk>f: \<Prod>x:A. B x; a: A\<rbrakk> \<Longrightarrow> f`a: B a" and
+ Prod_elim: "\<lbrakk>f: TT x: A. B x; a: A\<rbrakk> \<Longrightarrow> f`a: B a" and
- Prod_comp: "\<lbrakk>a: A; \<And>x. x: A \<Longrightarrow> b x: B x\<rbrakk> \<Longrightarrow> (\<^bold>\<lambda>x. b x)`a \<equiv> b a" and
+ Prod_cmp: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x: B x; a: A\<rbrakk> \<Longrightarrow> (,\\x: A. b x)`a \<equiv> b a" and
- Prod_uniq: "f: \<Prod>x:A. B x \<Longrightarrow> \<^bold>\<lambda>x. f`x \<equiv> f" and
+ Prod_uniq: "f: TT x: A. B x \<Longrightarrow> ,\\x: A. f`x \<equiv> f" and
\<comment> \<open>Congruence rules\<close>
- Prod_form_eq: "\<lbrakk>A: U i; B: A \<longrightarrow> U i; C: A \<longrightarrow> U i; \<And>x. x: A \<Longrightarrow> B x \<equiv> C x\<rbrakk> \<Longrightarrow> \<Prod>x:A. B x \<equiv> \<Prod>x:A. C x" and
+ Prod_form_eq: "\<lbrakk>A: U i; B: A \<leadsto> U i; C: A \<leadsto> U i; \<And>x. x: A \<Longrightarrow> B x \<equiv> C x\<rbrakk>
+ \<Longrightarrow> TT x: A. B x \<equiv> TT x: A. C x" and
- Prod_intro_eq: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x \<equiv> c x; A: U i\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>x. b x \<equiv> \<^bold>\<lambda>x. c x"
+ Prod_intro_eq: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x \<equiv> c x; A: U i\<rbrakk> \<Longrightarrow> ,\\x: A. b x \<equiv> ,\\x: A. c x"
text \<open>
The Pure rules for \<open>\<equiv>\<close> only let us judge strict syntactic equality of object lambda expressions.
-The actual definitional equality rule is @{thm Prod_intro_eq}.
+The actual definitional equality rule in the type theory is @{thm Prod_intro_eq}.
Note that this is a separate rule from function extensionality.
-
-Note that the bold lambda symbol \<open>\<^bold>\<lambda>\<close> used for dependent functions clashes with the proof term syntax (cf. \<section>2.5.2 of the Isabelle/Isar Implementation).
\<close>
lemmas Prod_form [form]
lemmas Prod_routine [intro] = Prod_form Prod_intro Prod_elim
-lemmas Prod_comps [comp] = Prod_comp Prod_uniq
+lemmas Prod_comp [comp] = Prod_cmp Prod_uniq
+lemmas Prod_cong [cong] = Prod_form_eq Prod_intro_eq
+section \<open>Function composition\<close>
-section \<open>Additional definitions\<close>
-
-definition compose :: "[t, t] \<Rightarrow> t" (infixr "o" 110) where "g o f \<equiv> \<^bold>\<lambda>x. g`(f`x)"
-
-syntax "_compose" :: "[t, t] \<Rightarrow> t" (infixr "\<circ>" 110)
-translations "g \<circ> f" \<rightleftharpoons> "g o f"
+definition compose :: "[t, t, t] \<Rightarrow> t"
+where "compose A g f \<equiv> ,\\x: A. g`(f`x)"
declare compose_def [comp]
+syntax "_compose" :: "[t, t] \<Rightarrow> t" (infixr "o" 110)
+
+parse_translation \<open>
+let fun compose_tr ctxt tms =
+ let
+ val g :: f :: _ = tms |> map (Typing.tm_of_ptm ctxt)
+ val dom =
+ case f of
+ Const ("Prod.lam", _) $ T $ _ => T
+ | _ => (case Typing.get_typing f (Typing.typing_assms ctxt) of
+ SOME (Const ("Prod.Prod", _) $ T $ _) => T
+ | SOME _ => Exn.error "Can't compose with a non-function"
+ | NONE => Exn.error "Cannot infer domain of composition: please state this explicitly")
+ in
+ @{const compose} $ dom $ g $ f
+ end
+in
+ [("_compose", compose_tr)]
+end
+\<close>
+
lemma compose_assoc:
- assumes "A: U i" and "f: A \<rightarrow> B" "g: B \<rightarrow> C" "h: \<Prod>x:C. D x"
- shows "(h \<circ> g) \<circ> f \<equiv> h \<circ> (g \<circ> f)"
-by (derive lems: assms Prod_intro_eq)
+ assumes "A: U i" "f: A \<rightarrow> B" "g: B \<rightarrow> C" "h: TT x: C. D x"
+ shows "compose A (compose B h g) f \<equiv> compose A h (compose A g f)"
+by (derive lems: assms cong)
lemma compose_comp:
assumes "A: U i" and "\<And>x. x: A \<Longrightarrow> b x: B" and "\<And>x. x: B \<Longrightarrow> c x: C x"
- shows "(\<^bold>\<lambda>x. c x) \<circ> (\<^bold>\<lambda>x. b x) \<equiv> \<^bold>\<lambda>x. c (b x)"
-by (derive lems: assms Prod_intro_eq)
-
-abbreviation id :: t where "id \<equiv> \<^bold>\<lambda>x. x" \<comment> \<open>Polymorphic identity function\<close>
+ shows "(,\\x: B. c x) o (,\\x: A. b x) \<equiv> ,\\x: A. c (b x)"
+by (derive lems: assms cong)
+abbreviation id :: "t \<Rightarrow> t" where "id A \<equiv> ,\\x: A. x"
end
diff --git a/ROOT b/ROOT
index cecdd32..b05d022 100644
--- a/ROOT
+++ b/ROOT
@@ -6,7 +6,8 @@ session HoTT = Pure +
hierarchy à la Russell.
Follows the development of the theory in
- The Univalent Foundations Program, Homotopy Type Theory: Univalent Foundations of Mathematics,
+ The Univalent Foundations Program,
+ Homotopy Type Theory: Univalent Foundations of Mathematics,
Institute for Advanced Study, (2013).
Available online at https://homotopytypetheory.org/book.
diff --git a/typing.ML b/typing.ML
new file mode 100644
index 0000000..bfe8409
--- /dev/null
+++ b/typing.ML
@@ -0,0 +1,91 @@
+(********
+Isabelle/HoTT: typing.ML
+Feb 2019
+
+Functionality for object-level type inference.
+
+********)
+
+signature TYPING =
+sig
+ type jmt = term
+ val term_of_jmt: jmt -> term
+ val type_of_jmt: jmt -> term
+ val typing_assms: Proof.context -> jmt list
+ val get_typing: term -> jmt list -> term option
+ val get_global_typing: theory -> Proof.context -> term -> term option
+ val tm_of_ptm: Proof.context -> term -> term
+end
+
+structure Typing: TYPING =
+struct
+
+type jmt = term
+
+(* Functions to extract a and A from propositions "a: A" *)
+fun term_of_jmt (@{const has_type} $ t $ _) = t
+ | term_of_jmt _ = Exn.error "Not a typing judgment"
+
+fun type_of_jmt (@{const has_type} $ _ $ T) = T
+ | type_of_jmt _ = Exn.error "Not a typing judgment"
+
+(* Get the typing assumptions in the current context *)
+fun typing_assms ctxt =
+ let
+ fun is_typing_jmt (@{const has_type} $ _ $ _) = true
+ | is_typing_jmt _ = false
+ in
+ Assumption.all_prems_of ctxt |> map Thm.prop_of |> filter is_typing_jmt
+ end
+
+(* Search for a term typing in a list of judgments*)
+fun get_typing tm jmts =
+ let val find_type =
+ fn jmt => if Term.aconv_untyped (tm, term_of_jmt jmt) then SOME (type_of_jmt jmt) else NONE
+ in get_first find_type jmts end
+
+(* Private storage space for global typing data *)
+structure Typing_Data = Theory_Data
+(
+ type T = term Symtab.table
+ val empty = Symtab.empty
+ val extend = I
+
+ fun merge (tbl, tbl') =
+ let
+ val key_vals = map (Symtab.lookup_key tbl') (Symtab.keys tbl')
+ val updates = map (fn SOME kv => Symtab.update kv) key_vals
+ fun f u t = u t
+ in fold f updates tbl end
+)
+
+(* Accessor for the above data *)
+fun get_global_typing thy ctxt tm = Symtab.lookup (Typing_Data.get thy) (Syntax.string_of_term ctxt tm)
+
+(* Convert a Pure pre-term to an unchecked Pure term *)
+fun tm_of_ptm ctxt ptm =
+ let val ptm = Term_Position.strip_positions ptm
+ in
+ (case ptm of
+ Const ("_constrain", _) $ Free (t, _) $ (Const ("\<^type>fun", _) $ Const(n1, _) $ Const (n2, _)) =>
+ let
+ val typ1 = unprefix "\<^type>" n1
+ val typ2 = unprefix "\<^type>" n2
+ val typ = typ1 ^ "\<Rightarrow>" ^ typ2 |> Syntax.read_typ ctxt
+ in Free (t, typ) end
+ | Const ("_constrain", _) $ Free (t, _) $ Const (T, _) =>
+ Free (t, Syntax.read_typ ctxt (unprefix "\<^type>" T))
+ | ptm1 $ ptm2 =>
+ (tm_of_ptm ctxt ptm1) $ (tm_of_ptm ctxt ptm2)
+ | Const (t, T) =>
+ let val t' =
+ unprefix "\<^const>" t handle Fail "unprefix" =>
+ unprefix "\<^type>" t handle Fail "unprefix" =>
+ t
+ in Const (t', T) end
+ | Abs (t, T, ptm') =>
+ Abs (t, T, tm_of_ptm ctxt ptm')
+ | t => t)
+ end
+
+end