diff options
35 files changed, 3291 insertions, 1835 deletions
@@ -1,12 +1,3 @@ -.directory -*.thy~ +*~ \#*.thy# - -ex/*.thy~ -ex/\#*.thy# - -ex/Hott book/*.thy~ -ex/Hott book/\#*.thy# - -tests/*.thy~ -tests/\#*.thy# +\#*.ML# @@ -1,390 +0,0 @@ -(******** -Isabelle/HoTT: Equality -Feb 2019 - -Contains: -* Type definitions for intensional equality -* Some setup for path induction -* Basic properties of equality (inv, pathcomp) -* The higher groupoid structure of types -* Functoriality of functions (ap) - -********) - -theory Eq -imports HoTT_Methods Prod - -begin - - -section \<open>Type definitions\<close> - -axiomatization - Eq :: "[t, t, t] \<Rightarrow> t" and - refl :: "t \<Rightarrow> t" and - indEq :: "[[t, t, t] \<Rightarrow> t, t \<Rightarrow> t, t, t, t] \<Rightarrow> t" - -syntax - "_Eq" :: "[t, t, t] \<Rightarrow> t" ("(2_ =[_]/ _)" [101, 0, 101] 100) -translations - "a =[A] b" \<rightleftharpoons> "(CONST Eq) A a b" - -axiomatization where - Eq_form: "\<lbrakk>A: U i; a: A; b: A\<rbrakk> \<Longrightarrow> a =[A] b: U i" and - - Eq_intro: "a: A \<Longrightarrow> (refl a): a =[A] a" and - - Eq_elim: "\<lbrakk> - p: a =[A] b; - a: A; - b: A; - \<And>x. x: A \<Longrightarrow> f x: C x x (refl x); - \<And>x y. \<lbrakk>x: A; y: A\<rbrakk> \<Longrightarrow> C x y: x =[A] y \<leadsto> U i \<rbrakk> \<Longrightarrow> indEq C f a b p: C a b p" and - - Eq_comp: "\<lbrakk> - a: A; - \<And>x. x: A \<Longrightarrow> f x: C x x (refl x); - \<And>x y. \<lbrakk>x: A; y: A\<rbrakk> \<Longrightarrow> C x y: x =[A] y \<leadsto> U i \<rbrakk> \<Longrightarrow> indEq C f a a (refl a) \<equiv> f a" - -lemmas Eq_form [form] -lemmas Eq_routine [intro] = Eq_form Eq_intro Eq_elim -lemmas Eq_comp [comp] - - -section \<open>Path induction\<close> - -text \<open>We set up rudimentary automation of path induction:\<close> - -method path_ind for C :: "[t, t, t] \<Rightarrow> t" = - (rule Eq_elim[where ?C=C]; (assumption | fact)?) - -method path_ind' for a b p :: t = - (rule Eq_elim[where ?a=a and ?b=b and ?p=p]; (assumption | fact)?) - -syntax "_induct_over" :: "[idt, idt, idt, t] \<Rightarrow> [t, t, t] \<Rightarrow> t" ("(2{_, _, _}/ _)" 0) -translations "{x, y, p} C" \<rightharpoonup> "\<lambda>x y p. C" - -text \<open> -Use "@{method path_ind} @{term "{x, y, p} C x y p"}" to perform path induction for the given type family over the variables @{term x}, @{term y}, and @{term p}, -and "@{method path_ind'} @{term a} @{term b} @{term p}" to let Isabelle try and infer the shape of the type family itself (this doesn't always work!). - -Note that @{term "{x, y, p} C x y p"} is just syntactic sugar for @{term "\<lambda>x y p. C x y p"}. -\<close> - - -section \<open>Properties of equality\<close> - -subsection \<open>Symmetry (path inverse)\<close> - -definition inv :: "[t, t, t] \<Rightarrow> t" -where "inv A x y \<equiv> \<lambda>p: x =[A] y. indEq (\<lambda>x y. &(y =[A] x)) (\<lambda>x. refl x) x y p" - -syntax "_inv" :: "[t, t, t] \<Rightarrow> t" ("(2inv[_, _, _])" [0, 0, 0] 999) -translations "inv[A, x, y]" \<rightleftharpoons> "(CONST inv) A x y" - -syntax "_inv'" :: "t \<Rightarrow> t" ("inv") - -text \<open>Pretty-printing switch for path inverse:\<close> - -ML \<open>val pretty_inv = Attrib.setup_config_bool @{binding "pretty_inv"} (K true)\<close> - -print_translation \<open> -let fun inv_tr' ctxt [A, x, y] = - if Config.get ctxt pretty_inv - then Syntax.const @{syntax_const "_inv'"} - else Syntax.const @{syntax_const "_inv"} $ A $ x $ y -in - [(@{const_syntax inv}, inv_tr')] -end -\<close> - -lemma inv_type: "\<lbrakk>A: U i; x: A; y: A\<rbrakk> \<Longrightarrow> inv[A, x, y]: x =[A] y \<rightarrow> y =[A] x" -unfolding inv_def by derive - -lemma inv_comp: "\<lbrakk>A: U i; a: A\<rbrakk> \<Longrightarrow> inv[A, a, a]`(refl a) \<equiv> refl a" -unfolding inv_def by derive - -declare - inv_type [intro] - inv_comp [comp] - - -subsection \<open>Transitivity (path composition)\<close> - -schematic_goal transitivity: - assumes "A: U i" "x: A" "y: A" "p: x =[A] y" - shows "?p: \<Prod>z: A. y =[A] z \<rightarrow> x =[A] z" -by - (path_ind' x y p, quantify_all, - path_ind "{x, z, _} x =[A] z", - rule Eq_intro, routine add: assms) - -definition pathcomp :: "[t, t, t, t] \<Rightarrow> t" -where - "pathcomp A x y z \<equiv> \<lambda>p: x =[A] y. \<lambda>q: y =[A] z. (indEq - (\<lambda>x y. & \<Prod>z: A. y =[A] z \<rightarrow> x =[A] z) - (\<lambda>x. \<lambda>z: A. \<lambda>q: x =[A] z. indEq (\<lambda>x z. & x =[A] z) (\<lambda>x. refl x) x z q) - x y p)`z`q" - -syntax "_pathcomp" :: "[t, t, t, t, t, t] \<Rightarrow> t" - ("(2pathcomp[_, _, _, _])" [0, 0, 0, 0] 999) -translations "pathcomp[A, x, y, z]" \<rightleftharpoons> "(CONST pathcomp) A x y z" - -syntax "_pathcomp'" :: "[t, t] \<Rightarrow> t" ("pathcomp") - -ML \<open>val pretty_pathcomp = Attrib.setup_config_bool @{binding "pretty_pathcomp"} (K true)\<close> -\<comment> \<open>Pretty-printing switch for path composition\<close> - -print_translation \<open> -let fun pathcomp_tr' ctxt [A, x, y, z] = - if Config.get ctxt pretty_pathcomp - then Syntax.const @{syntax_const "_pathcomp'"} - else Syntax.const @{syntax_const "_pathcomp"} $ A $ x $ y $ z -in - [(@{const_syntax pathcomp}, pathcomp_tr')] -end -\<close> - -corollary pathcomp_type: - assumes [intro]: "A: U i" "x: A" "y: A" "z: A" - shows "pathcomp[A, x, y, z]: x =[A] y \<rightarrow> y =[A] z \<rightarrow> x =[A] z" -unfolding pathcomp_def by (derive lems: transitivity) - -corollary pathcomp_comp: - assumes [intro]: "A: U i" "a: A" - shows "pathcomp[A, a, a, a]`(refl a)`(refl a) \<equiv> refl a" -unfolding pathcomp_def by (derive lems: transitivity) - -declare - pathcomp_type [intro] - pathcomp_comp [comp] - - -section \<open>Higher groupoid structure of types\<close> - -schematic_goal pathcomp_idr: - assumes [intro]: "A: U i" "x: A" "y: A" "p: x =[A] y" - shows "?prf: pathcomp[A, x, y, y]`p`(refl y) =[x =[A] y] p" -proof (path_ind' x y p) - show "\<And>x. x: A \<Longrightarrow> refl(refl x): pathcomp[A, x, x, x]`(refl x)`(refl x) =[x =[A] x] (refl x)" - by derive -qed routine - -schematic_goal pathcomp_idl: - assumes [intro]: "A: U i" "x: A" "y: A" "p: x =[A] y" - shows "?prf: pathcomp[A, x, x, y]`(refl x)`p =[x =[A] y] p" -proof (path_ind' x y p) - show "\<And>x. x: A \<Longrightarrow> refl(refl x): pathcomp[A, x, x, x]`(refl x)`(refl x) =[x =[A] x] (refl x)" - by derive -qed routine - -schematic_goal pathcomp_invr: - assumes [intro]: "A: U i" "x: A" "y: A" "p: x =[A] y" - shows "?prf: pathcomp[A, x, y, x]`p`(inv[A, x, y]`p) =[x =[A] x] (refl x)" -proof (path_ind' x y p) - show - "\<And>x. x: A \<Longrightarrow> refl(refl x): - pathcomp[A, x, x, x]`(refl x)`(inv[A, x, x]`(refl x)) =[x =[A] x] (refl x)" - by derive -qed routine - -schematic_goal pathcomp_invl: - assumes [intro]: "A: U i" "x: A" "y: A" "p: x =[A] y" - shows "?prf: pathcomp[A, y, x, y]`(inv[A, x, y]`p)`p =[y =[A] y] refl(y)" -proof (path_ind' x y p) - show - "\<And>x. x: A \<Longrightarrow> refl(refl x): - pathcomp[A, x, x, x]`(inv[A, x, x]`(refl x))`(refl x) =[x =[A] x] (refl x)" - by derive -qed routine - -schematic_goal inv_involutive: - assumes [intro]: "A: U i" "x: A" "y: A" "p: x =[A] y" - shows "?prf: inv[A, y, x]`(inv[A, x, y]`p) =[x =[A] y] p" -proof (path_ind' x y p) - show "\<And>x. x: A \<Longrightarrow> refl(refl x): inv[A, x, x]`(inv[A, x, x]`(refl x)) =[x =[A] x] (refl x)" - by derive -qed routine - -text \<open> -We use the proof of associativity of path composition to demonstrate the process of deriving proof terms. -The proof involves a triply-nested path induction, which is cumbersome to write in a structured style, especially if one does not know the correct form of the proof term in the first place. -However, using proof scripts the derivation becomes quite easy: we simply give the correct form of the statements to induct over, and prove the simple subgoals returned by the prover. - -The proof is sensitive to the order of the quantifiers in the product. -In particular, changing the order causes unification to fail in the path inductions. -It seems to be good practice to order the variables in the order over which we will path-induct. -\<close> - -schematic_goal pathcomp_assoc: - assumes [intro]: "A: U i" - shows - "?prf: \<Prod>x: A. \<Prod>y: A. \<Prod>p: x =[A] y. \<Prod>z: A. \<Prod>q: y =[A] z. \<Prod>w: A. \<Prod>r: z =[A] w. - pathcomp[A, x, y, w]`p`(pathcomp[A, y, z, w]`q`r) =[x =[A] w] - pathcomp[A, x, z, w]`(pathcomp[A, x, y, z]`p`q)`r" - -apply (quantify 3) -apply (path_ind "{x, y, p} - \<Prod>(z: A). \<Prod>(q: y =[A] z). \<Prod>(w: A). \<Prod>(r: z =[A] w). - pathcomp[A, x, y, w]`p`(pathcomp[A, y, z, w]`q`r) =[x =[A] w] - pathcomp[A, x, z, w]`(pathcomp[A, x, y, z]`p`q)`r") -apply (quantify 2) -apply (path_ind "{x, z, q} - \<Prod>(w: A). \<Prod>(r: z =[A] w). - pathcomp[A, x, x, w]`(refl x)`(pathcomp[A, x, z, w]`q`r) =[x =[A] w] - pathcomp[A, x, z, w]`(pathcomp[A, x, x, z]`(refl x)`q)`r") -apply (quantify 2) -apply (path_ind "{x, w, r} - pathcomp[A, x, x, w]`(refl x)`(pathcomp[A, x, x, w] `(refl x)`r) =[x =[A] w] - pathcomp[A, x, x, w]`(pathcomp[A, x, x, x]`(refl x)`(refl x))`r") - -text \<open>The rest is now routine.\<close> - -proof - - show - "\<And>x. x: A \<Longrightarrow> refl(refl x): - pathcomp[A, x, x, x]`(refl x)`(pathcomp[A, x, x, x]`(refl x)`(refl x)) =[x =[A] x] - pathcomp[A, x, x, x]`(pathcomp[A, x, x, x]`(refl x)`(refl x))`(refl x)" - by derive -qed routine - - -section \<open>Functoriality of functions on types under equality\<close> - -schematic_goal transfer: - assumes [intro]: - "A: U i" "B: U i" "f: A \<rightarrow> B" - "x: A" "y: A" "p: x =[A] y" - shows "?prf: f`x =[B] f`y" -by (path_ind' x y p, rule Eq_routine, routine) - -definition ap :: "[t, t, t, t, t] \<Rightarrow> t" -where "ap f A B x y \<equiv> \<lambda>p: x =[A] y. indEq ({x, y, _} f`x =[B] f`y) (\<lambda>x. refl (f`x)) x y p" - -syntax "_ap" :: "[t, t, t, t, t] \<Rightarrow> t" ("(2ap[_, _, _, _, _])") -translations "ap[f, A, B, x, y]" \<rightleftharpoons> "(CONST ap) f A B x y" - -syntax "_ap'" :: "t \<Rightarrow> t" ("ap[_]") - -ML \<open>val pretty_ap = Attrib.setup_config_bool @{binding "pretty_ap"} (K true)\<close> - -print_translation \<open> -let fun ap_tr' ctxt [f, A, B, x, y] = - if Config.get ctxt pretty_ap - then Syntax.const @{syntax_const "_ap'"} $ f - else Syntax.const @{syntax_const "_ap"} $ f $ A $ B $ x $ y -in - [(@{const_syntax ap}, ap_tr')] -end -\<close> - -corollary ap_typ: - assumes [intro]: - "A: U i" "B: U i" "f: A \<rightarrow> B" - "x: A" "y: A" - shows "ap[f, A, B, x, y]: x =[A] y \<rightarrow> f`x =[B] f`y" -unfolding ap_def by routine - -corollary ap_app_typ: - assumes [intro]: - "A: U i" "B: U i" "f: A \<rightarrow> B" - "x: A" "y: A" "p: x =[A] y" - shows "ap[f, A, B, x, y]`p: f`x =[B] f`y" -by (routine add: ap_typ) - -lemma ap_comp: - assumes [intro]: "A: U i" "B: U i" "f: A \<rightarrow> B" "x: A" - shows "ap[f, A, B, x, x]`(refl x) \<equiv> refl (f`x)" -unfolding ap_def by derive - -lemmas ap_type [intro] = ap_typ ap_app_typ -lemmas ap_comp [comp] - - -schematic_goal ap_func_pathcomp: - assumes [intro]: "A: U i" "B: U i" "f: A \<rightarrow> B" - shows - "?prf: \<Prod>x: A. \<Prod>y: A. \<Prod>p: x =[A] y. \<Prod>z: A. \<Prod>q: y =[A] z. - ap[f, A, B, x, z]`(pathcomp[A, x, y, z]`p`q) =[f`x =[B] f`z] - pathcomp[B, f`x, f`y, f`z]`(ap[f, A, B, x, y]`p)`(ap[f, A, B, y, z]`q)" -apply (quantify 3) -apply (path_ind "{x, y, p} - \<Prod>z: A. \<Prod>q: y =[A] z. - ap[f, A, B, x, z]`(pathcomp[A, x, y, z]`p`q) =[f`x =[B] f`z] - pathcomp[B, f`x, f`y, f`z]`(ap[f, A, B, x, y]`p)`(ap[f, A, B, y, z]`q)") -apply (quantify 2) -apply (path_ind "{x, z, q} - ap[f, A, B, x, z]`(pathcomp[A, x, x, z]`(refl x)`q) =[f`x =[B] f`z] - pathcomp[B, f`x, f`x, f`z]`(ap[f, A, B, x, x]`(refl x))`(ap[f, A, B, x, z]`q)") -proof - - show - "\<And>x. x: A \<Longrightarrow> refl(refl(f`x)) : - ap[f, A, B, x, x]`(pathcomp[A, x, x, x]`(refl x)`(refl x)) =[f`x =[B] f`x] - pathcomp[B, f`x, f`x, f`x]`(ap[f, A, B, x, x]`(refl x))`(ap[f, A, B, x, x]`(refl x))" - by derive -qed routine - - -schematic_goal ap_func_compose: - assumes [intro]: - "A: U i" "B: U i" "C: U i" - "f: A \<rightarrow> B" "g: B \<rightarrow> C" - shows - "?prf: \<Prod>x: A. \<Prod>y: A. \<Prod>p: x =[A] y. - ap[g, B, C, f`x, f`y]`(ap[f, A, B, x, y]`p) =[g`(f`x) =[C] g`(f`y)] - ap[g o[A] f, A, C, x, y]`p" -apply (quantify 3) -apply (path_ind "{x, y, p} - ap[g, B, C, f`x, f`y]`(ap[f, A, B, x, y]`p) =[g`(f`x) =[C] g`(f`y)] - ap[g o[A] f, A, C, x, y]`p") -proof - - show "\<And>x. x: A \<Longrightarrow> refl(refl (g`(f`x))) : - ap[g, B, C, f`x, f`x]`(ap[f, A, B, x, x]`(refl x)) =[g`(f`x) =[C] g`(f`x)] - ap[g o[A] f, A, C, x, x]`(refl x)" - unfolding compose_def by derive - - fix x y p assume [intro]: "x: A" "y: A" "p: x =[A] y" - show - "ap[g, B, C, f`x, f`y]`(ap[f, A, B, x, y]`p) =[g`(f`x) =[C] g`(f`y)] - ap[g o[A] f, A, C, x, y]`p: U i" - proof - have - "ap[g o[A] f, A, C, x, y]`p: (\<lambda>x: A. g`(f`x))`x =[C] (\<lambda>x: A. g`(f`x))`y" - unfolding compose_def by derive - moreover have - "(\<lambda>x: A. g`(f`x))`x =[C] (\<lambda>x: A. g`(f`x))`y \<equiv> g`(f`x) =[C] g`(f`y)" by derive - ultimately show - "ap[g o[A] f, A, C, x, y]`p: g`(f`x) =[C] g`(f`y)" by simp - qed derive -qed routine - -schematic_goal ap_func_inv: - assumes [intro]: - "A: U i" "B: U i" "f: A \<rightarrow> B" - "x: A" "y: A" "p: x =[A] y" - shows "?prf: - ap[f, A, B, y, x]`(inv[A, x, y]`p) =[f`y =[B] f`x] inv[B, f`x, f`y]`(ap[f, A, B, x, y]`p)" -proof (path_ind' x y p) - show "\<And>x. x: A \<Longrightarrow> refl(refl(f`x)): - ap[f, A, B, x, x]`(inv[A, x, x]`(refl x)) =[f`x =[B] f`x] - inv[B, f`x, f`x]`(ap[f, A, B, x, x]`(refl x))" - by derive -qed routine - -schematic_goal ap_func_id: - assumes [intro]: "A: U i" "x: A" "y: A" "p: x =[A] y" - shows "?prf: ap[id A, A, A, x, y]`p =[x =[A] y] p" -proof (path_ind' x y p) - fix x assume [intro]: "x: A" - show "refl(refl x): ap[id A, A, A, x, x]`(refl x) =[x =[A] x] refl x" by derive - - fix y p assume [intro]: "y: A" "p: x =[A] y" - have "ap[id A, A, A, x, y]`p: (id A)`x =[A] (id A)`y" by derive - moreover have "(id A)`x =[A] (id A)`y \<equiv> x =[A] y" by derive - ultimately have [intro]: "ap[id A, A, A, x, y]`p: x =[A] y" by simp - - show "ap[id A, A, A, x, y]`p =[x =[A] y] p: U i" by derive -qed - - -end diff --git a/Equivalence.thy b/Equivalence.thy deleted file mode 100644 index 07ef0df..0000000 --- a/Equivalence.thy +++ /dev/null @@ -1,228 +0,0 @@ -(******** -Isabelle/HoTT: Quasi-inverse and equivalence -Mar 2019 - -********) - -theory Equivalence -imports Type_Families - -begin - -section \<open>Homotopy\<close> - -definition homotopy :: "[t, t \<Rightarrow> t, t, t] \<Rightarrow> t" ("(2homotopy[_, _] _ _)" [0, 0, 1000, 1000]) -where "homotopy[A, B] f g \<equiv> \<Prod>x: A. f`x =[B x] g`x" - -declare homotopy_def [comp] - -syntax "_homotopy" :: "[t, idt, t, t, t] \<Rightarrow> t" ("(1_ ~[_: _. _]/ _)" [101, 0, 0, 0, 101] 100) -translations "f ~[x: A. B] g" \<rightleftharpoons> "(CONST homotopy) A (\<lambda>x. B) f g" - -lemma homotopy_type: - assumes [intro]: "A: U i" "B: A \<leadsto> U i" "f: \<Prod>x: A. B x" "g: \<Prod>x: A. B x" - shows "f ~[x: A. B x] g: U i" -by derive - -declare homotopy_type [intro] - -text \<open>Homotopy inverse and composition (symmetry and transitivity):\<close> - -definition hominv :: "[t, t \<Rightarrow> t, t, t] \<Rightarrow> t" ("(2hominv[_, _, _, _])") -where "hominv[A, B, f, g] \<equiv> \<lambda>H: f ~[x: A. B x] g. \<lambda>x: A. inv[B x, f`x, g`x]`(H`x)" - -lemma hominv_type: - assumes [intro]: "A: U i" "B: A \<leadsto> U i" "f: \<Prod>x: A. B x" "g: \<Prod>x: A. B x" - shows "hominv[A, B, f, g]: f ~[x: A. B x] g \<rightarrow> g ~[x: A. B x] f" -unfolding hominv_def by (derive, fold homotopy_def)+ derive - -definition homcomp :: "[t, t \<Rightarrow> t, t, t, t] \<Rightarrow> t" ("(2homcomp[_, _, _, _, _])") where - "homcomp[A, B, f, g, h] \<equiv> \<lambda>H: f ~[x: A. B x] g. \<lambda>H': g ~[x: A. B x] h. - \<lambda>x: A. pathcomp[B x, f`x, g`x, h`x]`(H`x)`(H'`x)" - -lemma homcomp_type: - assumes [intro]: - "A: U i" "B: A \<leadsto> U i" - "f: \<Prod>x: A. B x" "g: \<Prod>x: A. B x" "h: \<Prod>x: A. B x" - shows "homcomp[A, B, f, g, h]: f ~[x: A. B x] g \<rightarrow> g ~[x: A. B x] h \<rightarrow> f ~[x: A. B x] h" -unfolding homcomp_def by (derive, fold homotopy_def)+ derive - -schematic_goal fun_eq_imp_homotopy: - assumes [intro]: - "p: f =[\<Prod>x: A. B x] g" - "f: \<Prod>x: A. B x" "g: \<Prod>x: A. B x" - "A: U i" "B: A \<leadsto> U i" - shows "?prf: f ~[x: A. B x] g" -proof (path_ind' f g p) - show "\<And>f. f : \<Prod>(x: A). B x \<Longrightarrow> \<lambda>x: A. refl(f`x): f ~[x: A. B x] f" by derive -qed routine - -definition happly :: "[t, t \<Rightarrow> t, t, t, t] \<Rightarrow> t" -where "happly A B f g p \<equiv> indEq (\<lambda>f g. & f ~[x: A. B x] g) (\<lambda>f. \<lambda>(x: A). refl(f`x)) f g p" - -syntax "_happly" :: "[idt, t, t, t, t, t] \<Rightarrow> t" - ("(2happly[_: _. _] _ _ _)" [0, 0, 0, 1000, 1000, 1000]) -translations "happly[x: A. B] f g p" \<rightleftharpoons> "(CONST happly) A (\<lambda>x. B) f g p" - -corollary happly_type: - assumes [intro]: - "p: f =[\<Prod>x: A. B x] g" - "f: \<Prod>x: A. B x" "g: \<Prod>x: A. B x" - "A: U i" "B: A \<leadsto> U i" - shows "happly[x: A. B x] f g p: f ~[x: A. B x] g" -unfolding happly_def by (derive lems: fun_eq_imp_homotopy) - -text \<open>Homotopy and function composition:\<close> - -schematic_goal composition_homl: - assumes [intro]: - "H: f ~[x: A. B] g" - "f: A \<rightarrow> B" "g: A \<rightarrow> B" "h: B \<rightarrow> C" - "A: U i" "B: U i" "C: U i" - shows "?prf: h o[A] f ~[x: A. C] h o[A] g" -unfolding homotopy_def compose_def proof (rule Prod_routine, subst (0 1) comp) - fix x assume [intro]: "x: A" - show "ap[h, B, C, f`x, g`x]`(H`x): h`(f`x) =[C] h`(g`x)" by (routine, fold homotopy_def, fact+) -qed routine - -schematic_goal composition_homr: - assumes [intro]: - "H: f ~[x: B. C] g" - "h: A \<rightarrow> B" "f: B \<rightarrow> C" "g: B \<rightarrow> C" - "A: U i" "B: U i" "C: U i" - shows "?prf: f o[A] h ~[x: A. C] g o[A] h" -unfolding homotopy_def compose_def proof (rule Prod_routine, subst (0 1) comp) - fix x assume [intro]: "x: A" - show "H`(h`x): f`(h`x) =[C] g`(h`x)" by (routine, fold homotopy_def, routine) -qed routine - - -section \<open>Bi-invertibility\<close> - -definition biinv :: "[t, t, t] \<Rightarrow> t" ("(2biinv[_, _]/ _)") -where "biinv[A, B] f \<equiv> - (\<Sum>g: B \<rightarrow> A. g o[A] f ~[x:A. A] id A) \<times> (\<Sum>g: B \<rightarrow> A. f o[B] g ~[x: B. B] id B)" - -text \<open> -The meanings of the syntax defined above are: -\<^item> @{term "f ~[x: A. B x] g"} expresses that @{term f} and @{term g} are homotopy functions of type @{term "\<Prod>x:A. B x"}. -\<^item> @{term "biinv[A, B] f"} expresses that the function @{term f} of type @{term "A \<rightarrow> B"} is bi-invertible. -\<close> - -lemma biinv_type: - assumes [intro]: "A: U i" "B: U i" "f: A \<rightarrow> B" - shows "biinv[A, B] f: U i" -unfolding biinv_def by derive - -declare biinv_type [intro] - -schematic_goal id_is_biinv: - assumes [intro]: "A: U i" - shows "?prf: biinv[A, A] (id A)" -unfolding biinv_def proof (rule Sum_routine) - show "<id A, \<lambda>x: A. refl x>: \<Sum>(g: A \<rightarrow> A). (g o[A] id A) ~[x: A. A] (id A)" by derive - show "<id A, \<lambda>x: A. refl x>: \<Sum>(g: A \<rightarrow> A). (id A o[A] g) ~[x: A. A] (id A)" by derive -qed derive - -definition equivalence :: "[t, t] \<Rightarrow> t" (infix "\<cong>" 100) -where "A \<cong> B \<equiv> \<Sum>f: A \<rightarrow> B. biinv[A, B] f" - -schematic_goal equivalence_symmetric: - assumes [intro]: "A: U i" - shows "?prf: A \<cong> A" -unfolding equivalence_def proof (rule Sum_routine) - show "\<And>f. f : A \<rightarrow> A \<Longrightarrow> biinv[A, A] f : U i" unfolding biinv_def by derive - show "id A: A \<rightarrow> A" by routine -qed (routine add: id_is_biinv) - - -section \<open>Quasi-inverse\<close> - -definition qinv :: "[t, t, t] \<Rightarrow> t" ("(2qinv[_, _]/ _)") -where "qinv[A, B] f \<equiv> \<Sum>g: B \<rightarrow> A. (g o[A] f ~[x: A. A] id A) \<times> (f o[B] g ~[x: B. B] id B)" - -schematic_goal biinv_imp_qinv: - assumes [intro]: "A: U i" "B: U i" "f: A \<rightarrow> B" - shows "?prf: (biinv[A, B] f) \<rightarrow> (qinv[A,B] f)" -proof (rule Prod_routine) - -assume [intro]: "b: biinv[A, B] f" - -text \<open>Components of the witness of biinvertibility of @{term f}:\<close> - -let ?fst_of_b = - "fst[\<Sum>g: B \<rightarrow> A. g o[A] f ~[x: A. A] id A, &\<Sum>g: B \<rightarrow> A. f o[B] g ~[x: B. B] id B]" -and ?snd_of_b = - "snd[\<Sum>g: B \<rightarrow> A. g o[A] f ~[x: A. A] id A, &\<Sum>g: B \<rightarrow> A. f o[B] g ~[x: B. B] id B]" - -define g H g' H' where - "g \<equiv> fst[B \<rightarrow> A, \<lambda>g. g o[A] f ~[x: A. A] id A] ` (?fst_of_b ` b)" and - "H \<equiv> snd[B \<rightarrow> A, \<lambda>g. g o[A] f ~[x: A. A] id A] ` (?fst_of_b ` b)" and - "g' \<equiv> fst[B \<rightarrow> A, \<lambda>g. f o[B] g ~[x: B. B] id B] ` (?snd_of_b ` b)" and - "H' \<equiv> snd[B \<rightarrow> A, \<lambda>g. f o[B] g ~[x: B. B] id B] ` (?snd_of_b ` b)" - -have "H: g o[A] f ~[x: A. A] id A" -unfolding H_def g_def proof standard+ - have - "fst[\<Sum>(g: B \<rightarrow> A). g o[A] f ~[x: A. A] id A, &\<Sum>(g: B \<rightarrow> A). f o[B] g ~[x: B. B] id B] : - (biinv[A, B] f) \<rightarrow> (\<Sum>(g: B \<rightarrow> A). g o[A] f ~[g: A. A] id A)" unfolding biinv_def by derive - thus - "fst[\<Sum>(g: B \<rightarrow> A). g o[A] f ~[x: A. A] id A, &\<Sum>(g: B \<rightarrow> A). f o[B] g ~[x: B. B] id B]`b : - \<Sum>(g: B \<rightarrow> A). g o[A] f ~[g: A. A] id A" by derive rule -qed derive - -moreover have "(id A) o[B] g' \<equiv> g'" proof derive - show "g': B \<rightarrow> A" unfolding g'_def proof - have - "snd[\<Sum>(g: B \<rightarrow> A). g o[A] f ~[x: A. A] id A, &\<Sum>(g: B \<rightarrow> A). f o[B] g ~[x: B. B] id B] : - (biinv[A, B] f) \<rightarrow> (\<Sum>(g: B \<rightarrow> A). f o[B] g ~[x: B. B] id B)" unfolding biinv_def by derive - thus - "snd[\<Sum>(g: B \<rightarrow> A). g o[A] f ~[x: A. A] id A, &\<Sum>(g: B \<rightarrow> A). f o[B] g ~[x: B. B] id B]`b : - \<Sum>(g: B \<rightarrow> A). f o[B] g ~[x: B. B] id B" by derive rule - qed derive -qed - - - -section \<open>Transport, homotopy, and bi-invertibility\<close> - -schematic_goal transport_invl_hom: - assumes [intro]: - "P: A \<leadsto> U j" "A: U i" - "x: A" "y: A" "p: x =[A] y" - shows "?prf: - (transport[A, P, y, x]`(inv[A, x, y]`p)) o[P x] (transport[A, P, x, y]`p) ~[w: P x. P x] id P x" -by (rule happly_type[OF transport_invl], derive) - -schematic_goal transport_invr_hom: - assumes [intro]: - "A: U i" "P: A \<leadsto> U j" - "y: A" "x: A" "p: x =[A] y" - shows "?prf: - (transport[A, P, x, y]`p) o[P y] (transport[A, P, y, x]`(inv[A, x, y]`p)) ~[w: P y. P y] id P y" -by (rule happly_type[OF transport_invr], derive) - -declare - transport_invl_hom [intro] - transport_invr_hom [intro] - -text \<open> -The following result states that the transport of an equality @{term p} is bi-invertible, with inverse given by the transport of the inverse @{text "~p"}. -\<close> - -schematic_goal transport_biinv: - assumes [intro]: "p: A =[U i] B" "A: U i" "B: U i" - shows "?prf: biinv[A, B] (transport[U i, Id, A, B]`p)" -unfolding biinv_def -apply (rule Sum_routine) -prefer 2 - apply (rule Sum_routine) - prefer 3 apply (rule transport_invl_hom) -prefer 9 - apply (rule Sum_routine) - prefer 3 apply (rule transport_invr_hom) -\<comment> \<open>The remaining subgoals are now handled easily\<close> -by derive - - -end diff --git a/HoTT.thy b/HoTT.thy deleted file mode 100644 index e5db673..0000000 --- a/HoTT.thy +++ /dev/null @@ -1,40 +0,0 @@ -(******** -Isabelle/HoTT -Feb 2019 - -An experimental implementation of homotopy type theory. - -********) - -theory HoTT -imports - -(* Basic theories *) -HoTT_Base -HoTT_Methods - -(* Types *) -Eq -Nat -Prod -Sum -More_Types - -(* Derived definitions and properties *) -Projections -Univalence - -begin - -text \<open>Rule bundles:\<close> - -lemmas intros = - Nat_intro_0 Nat_intro_succ Prod_intro Sum_intro Eq_intro Cprd_intro_inl Cprd_intro_inr Unit_intro - -lemmas elims = - Nat_elim Prod_elim Sum_elim Eq_elim Cprd_elim Unit_elim Null_elim - -lemmas routines = - Nat_routine Prod_routine Sum_routine Eq_routine Cprd_routine Unit_routine Null_routine - -end diff --git a/HoTT_Base.thy b/HoTT_Base.thy deleted file mode 100644 index 9987b78..0000000 --- a/HoTT_Base.thy +++ /dev/null @@ -1,103 +0,0 @@ -(******** -Isabelle/HoTT: Basic logical definitions and notation -Feb 2019 - -This file completes the basic logical and functional setup of the HoTT logic. It defines: - -* The universe hierarchy and its governing rules. -* Some notational abbreviations. -* Named theorems for later use by proof methods. - -********) - -theory HoTT_Base -imports Pure - -begin - - -section \<open>Basic setup\<close> - -declare[[names_short]] -declare[[eta_contract=false]] \<comment> \<open>Do not eta-contract\<close> - -typedecl t \<comment> \<open>Type of object-logic terms (which includes the types)\<close> - -judgment has_type :: "[t, t] \<Rightarrow> prop" ("(2_ :/ _)") - - -section \<open>Universes\<close> - -typedecl ord \<comment> \<open>Type of meta-numerals\<close> - -axiomatization - O :: ord and - Suc :: "ord \<Rightarrow> ord" and - lt :: "[ord, ord] \<Rightarrow> prop" (infix "<" 999) and - leq :: "[ord, ord] \<Rightarrow> prop" (infix "\<le>" 999) -where - lt_Suc: "n < (Suc n)" and - lt_trans: "\<lbrakk>m1 < m2; m2 < m3\<rbrakk> \<Longrightarrow> m1 < m3" and - leq_min: "O \<le> n" - -declare - lt_Suc [intro!] - leq_min [intro!] - lt_trans [intro] - -axiomatization - U :: "ord \<Rightarrow> t" -where - U_hierarchy: "i < j \<Longrightarrow> U i: U j" and - U_cumulative: "\<lbrakk>A: U i; i < j\<rbrakk> \<Longrightarrow> A: U j" and - U_cumulative': "\<lbrakk>A: U i; i \<le> j\<rbrakk> \<Longrightarrow> A: U j" - -lemma U_hierarchy': "U i: U (Suc i)" by (fact U_hierarchy[OF lt_Suc]) - -declare U_hierarchy' [intro!] - -text \<open> -Using method @{method rule} with @{thm U_cumulative} and @{thm U_cumulative'} is unsafe: if applied blindly it will very easily lead to non-termination. -Instead use @{method elim}, or instantiate the rules suitably. - -@{thm U_cumulative'} is an alternative rule used by the method @{theory_text cumulativity} in @{file HoTT_Methods.thy}. - -@{thm U_hierarchy'} is declared with safe @{attribute intro} to be used by the method @{theory_text derive} to handle the universe hierarchy. -Note that @{thm U_hierarchy} is unsafe. -\<close> - - -section \<open>Notation\<close> - -abbreviation (input) constraint :: "[t \<Rightarrow> t, t, t] \<Rightarrow> prop" ("(1_:/ _ \<leadsto> _)") -where "f: A \<leadsto> B \<equiv> (\<And>x. x: A \<Longrightarrow> f x: B)" - -text \<open>We use the notation @{prop "B: A \<leadsto> U i"} to abbreviate type families.\<close> - -abbreviation (input) K_combinator :: "'a \<Rightarrow> 'b \<Rightarrow> 'a" ("&_" [0] 3) -where "&A \<equiv> \<lambda>_. A" - -abbreviation (input) Id :: "t \<Rightarrow> t" where "Id \<equiv> \<lambda>x. x" -\<comment> \<open>NOTE: removing the input attribute causes term evaluations and even theorem attribute declarations to loop! Possible bug?\<close> - - -section \<open>Named theorems\<close> - -named_theorems form -named_theorems comp -named_theorems cong - -text \<open> -The named theorems above will be used by proof methods defined in @{file HoTT_Methods.thy}. - -@{attribute form} declares type formation rules. -These are mainly used by the \<open>cumulativity\<close> method, which lifts types into higher universes. - -@{attribute comp} declares computation rules, which are used by the \<open>compute\<close> method, and may also be passed to invocations of the method \<open>subst\<close> to perform equational rewriting. - -@{attribute cong} declares congruence rules, the definitional equality rules of the type theory. -\<close> - -(* Todo: Set up the Simplifier! *) - -end diff --git a/HoTT_Methods.thy b/HoTT_Methods.thy deleted file mode 100644 index 0199a49..0000000 --- a/HoTT_Methods.thy +++ /dev/null @@ -1,91 +0,0 @@ -(******** -Isabelle/HoTT: Proof methods -Jan 2019 - -Set up various proof methods and auxiliary functionality. - -********) - -theory HoTT_Methods -imports HoTT_Base "HOL-Eisbach.Eisbach" "HOL-Eisbach.Eisbach_Tools" - -begin - - -section \<open>Substitution and simplification\<close> - -ML_file "~~/src/Tools/misc_legacy.ML" -ML_file "~~/src/Tools/IsaPlanner/isand.ML" -ML_file "~~/src/Tools/IsaPlanner/rw_inst.ML" -ML_file "~~/src/Tools/IsaPlanner/zipper.ML" -ML_file "~~/src/Tools/eqsubst.ML" -\<comment> \<open>Import the @{method subst} method, used for substituting definitional equalities.\<close> - -text \<open> -Method @{theory_text compute} performs single-step simplifications, using any rules declared @{attribute comp} in the context. -Subgoals arising from a substitution are solved by assumption and standard rules, as well as rules passed using @{theory_text add}. - -Note that the solving is sometimes a little eager; for more manual control one should use @{method subst} directly. -\<close> - -method compute uses add declares comp = (subst comp; (rule add | assumption | rule)?) - - -section \<open>Handling universes\<close> - -text \<open> -Methods @{theory_text provelt}, @{theory_text hierarchy}, and @{theory_text cumulativity} prove propositions of the form -\<^item> \<open>n < (Suc (... (Suc n) ...))\<close>, -\<^item> \<open>U i: U (Suc (... (Suc i) ...))\<close>, and -\<^item> @{prop "A: U i \<Longrightarrow> A: U j"}, where @{prop "i \<le> j"}, -respectively. -\<close> - -method provelt = (rule lt_Suc | (rule lt_trans, (rule lt_Suc)+)+) - -method hierarchy = (rule U_hierarchy, provelt) - -method cumulativity declares form = ( - ((elim U_cumulative' | (rule U_cumulative', rule form)), rule leq_min) | - ((elim U_cumulative | (rule U_cumulative, rule form)), provelt) -) - - -section \<open>Deriving typing judgments\<close> - -method routine uses add = (rule add | assumption | rule)+ - -text \<open> -The method @{method routine} proves typing judgments @{prop "a: A"} using the rules declared @{attribute intro} in the respective theory files, as well as any additional lemmas provided with the modifier @{theory_text add}. -\<close> - - -section \<open>Derivation search\<close> - -text \<open> -The method @{theory_text derive} combines @{method routine} and @{method compute} to search for derivations of judgments. -It also handles universes using @{method cumulativity}. -The method @{method hierarchy} has been observed to cause looping in previous versions, and is hence no longer part of @{theory_text derive}. -\<close> - -method derive uses lems unfold declares comp = - (unfold unfold | routine add: lems | compute add: lems comp: comp | cumulativity form: lems)+ - -section \<open>Additional method combinators\<close> - -text \<open> -The ML expression @{ML_text "repeat tac"} below yields a @{ML_type "(Proof.context -> Proof.method) context_parser"}, which may be passed to @{command method_setup} to set up a method that scans for an integer n and executes the tactic returned by @{ML_text tac} n times. -See the setup of method @{text quantify} in @{file Prod.thy} for an example. -\<close> - -ML \<open> -fun repeat (tac: Proof.context -> tactic) = - let - fun cparser_of parser (ctxt, toks) = parser toks ||> (fn toks => (ctxt, toks)) - in - cparser_of Args.text >> (fn n => fn ctxt => fn facts => - SIMPLE_METHOD (REPEAT_DETERM_N (the (Int.fromString n))(tac ctxt)) facts) - end -\<close> - -end @@ -1,3 +1,17 @@ +Isabelle/HoTT +Copyright (c) 2018–2020 Joshua Chen + +All files are licensed under the terms of the +GNU Lesser General Public License v3.0 reproduced below, +WITH THE EXCEPTION OF the following files: + spartan/lib/focus.ML + spartan/lib/goals.ML + +These have been modified from source code which is part of the official +Isabelle distribution (https://isabelle.in.tum.de/) and licensed under the +Isabelle license, also reproduced below. + +================================================================================ GNU LESSER GENERAL PUBLIC LICENSE Version 3, 29 June 2007 @@ -163,3 +177,42 @@ whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library. + +================================================================================ +ISABELLE COPYRIGHT NOTICE, LICENCE AND DISCLAIMER. + +Copyright (c) 1986-2019, + University of Cambridge, + Technische Universitaet Muenchen, + and contributors. + + All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +* Neither the name of the University of Cambridge or the Technische +Universitaet Muenchen nor the names of their contributors may be used +to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/More_Types.thy b/More_Types.thy deleted file mode 100644 index ebbe10b..0000000 --- a/More_Types.thy +++ /dev/null @@ -1,83 +0,0 @@ -(******** -Isabelle/HoTT: Coproduct, unit, and empty types -Feb 2019 - -********) - -theory More_Types -imports HoTT_Base - -begin - - -section \<open>Coproduct type\<close> - -axiomatization - Cprd :: "[t, t] \<Rightarrow> t" (infixr "+" 50) and - inl :: "[t, t, t] \<Rightarrow> t" and - inr :: "[t, t, t] \<Rightarrow> t" and - indCprd :: "[t \<Rightarrow> t, t \<Rightarrow> t, t \<Rightarrow> t, t] \<Rightarrow> t" -where - Cprd_form: "\<lbrakk>A: U i; B: U i\<rbrakk> \<Longrightarrow> A + B: U i" and - - Cprd_intro_inl: "\<lbrakk>a: A; B: U i\<rbrakk> \<Longrightarrow> inl A B a: A + B" and - - Cprd_intro_inr: "\<lbrakk>b: B; A: U i\<rbrakk> \<Longrightarrow> inr A B b: A + B" and - - Cprd_elim: "\<lbrakk> - u: A + B; - C: A + B \<leadsto> U i; - \<And>x. x: A \<Longrightarrow> c x: C (inl A B x); - \<And>y. y: B \<Longrightarrow> d y: C (inr A B y) \<rbrakk> \<Longrightarrow> indCprd C c d u: C u" and - - Cprd_cmp_inl: "\<lbrakk> - a: A; - C: A + B \<leadsto> U i; - \<And>x. x: A \<Longrightarrow> c x: C (inl A B x); - \<And>y. y: B \<Longrightarrow> d y: C (inr A B y) \<rbrakk> \<Longrightarrow> indCprd C c d (inl A B a) \<equiv> c a" and - - Cprd_cmp_inr: "\<lbrakk> - b: B; - C: A + B \<leadsto> U i; - \<And>x. x: A \<Longrightarrow> c x: C (inl A B x); - \<And>y. y: B \<Longrightarrow> d y: C (inr A B y) \<rbrakk> \<Longrightarrow> indCprd C c d (inr A B b) \<equiv> d b" - -lemmas Cprd_form [form] -lemmas Cprd_routine [intro] = Cprd_form Cprd_intro_inl Cprd_intro_inr Cprd_elim -lemmas Cprd_comp [comp] = Cprd_cmp_inl Cprd_cmp_inr - - -section \<open>Unit type\<close> - -axiomatization - Unit :: t and - pt :: t and - indUnit :: "[t \<Rightarrow> t, t, t] \<Rightarrow> t" -where - Unit_form: "Unit: U O" and - - Unit_intro: "pt: Unit" and - - Unit_elim: "\<lbrakk>x: Unit; c: C pt; C: Unit \<leadsto> U i\<rbrakk> \<Longrightarrow> indUnit C c x: C x" and - - Unit_cmp: "\<lbrakk>c: C Unit; C: Unit \<leadsto> U i\<rbrakk> \<Longrightarrow> indUnit C c pt \<equiv> c" - -lemmas Unit_form [form] -lemmas Unit_routine [intro] = Unit_form Unit_intro Unit_elim -lemmas Unit_comp [comp] = Unit_cmp - - -section \<open>Empty type\<close> - -axiomatization - Null :: t and - indNull :: "[t \<Rightarrow> t, t] \<Rightarrow> t" -where - Null_form: "Null: U O" and - - Null_elim: "\<lbrakk>z: Null; C: Null \<leadsto> U i\<rbrakk> \<Longrightarrow> indNull C z: C z" - -lemmas Null_form [form] -lemmas Null_routine [intro] = Null_form Null_elim - -end diff --git a/Nat.thy b/Nat.thy deleted file mode 100644 index cbfbe2d..0000000 --- a/Nat.thy +++ /dev/null @@ -1,46 +0,0 @@ -(******** -Isabelle/HoTT: Natural numbers -Feb 2019 - -********) - -theory Nat -imports HoTT_Base - -begin - -axiomatization - Nat :: t and - zero :: t ("0") and - succ :: "t \<Rightarrow> t" and - indNat :: "[t \<Rightarrow> t, [t, t] \<Rightarrow> t, t, t] \<Rightarrow> t" -where - Nat_form: "Nat: U O" and - - Nat_intro_0: "0: Nat" and - - Nat_intro_succ: "n: Nat \<Longrightarrow> succ n: Nat" and - - Nat_elim: "\<lbrakk> - a: C 0; - n: Nat; - C: Nat \<leadsto> U i; - \<And>n c. \<lbrakk>n: Nat; c: C n\<rbrakk> \<Longrightarrow> f n c: C (succ n) \<rbrakk> \<Longrightarrow> indNat C f a n: C n" and - - Nat_cmp_0: "\<lbrakk> - a: C 0; - C: Nat \<leadsto> U i; - \<And>n c. \<lbrakk>n: Nat; c: C n\<rbrakk> \<Longrightarrow> f n c: C (succ n) \<rbrakk> \<Longrightarrow> indNat C f a 0 \<equiv> a" and - - Nat_cmp_succ: "\<lbrakk> - a: C 0; - n: Nat; - C: Nat \<leadsto> U i; - \<And>n c. \<lbrakk>n: Nat; c: C n\<rbrakk> \<Longrightarrow> f n c: C (succ n) - \<rbrakk> \<Longrightarrow> indNat C f a (succ n) \<equiv> f n (indNat C f a n)" - -lemmas Nat_form [form] -lemmas Nat_routine [intro] = Nat_form Nat_intro_0 Nat_intro_succ Nat_elim -lemmas Nat_comp [comp] = Nat_cmp_0 Nat_cmp_succ - -end diff --git a/Prod.thy b/Prod.thy deleted file mode 100644 index a35138c..0000000 --- a/Prod.thy +++ /dev/null @@ -1,169 +0,0 @@ -(******** -Isabelle/HoTT: Dependent product (dependent function) -Feb 2019 - -********) - -theory Prod -imports HoTT_Base HoTT_Methods - -begin - - -section \<open>Basic type definitions\<close> - -axiomatization - Prod :: "[t, t \<Rightarrow> t] \<Rightarrow> t" and - lam :: "[t, t \<Rightarrow> t] \<Rightarrow> t" and - app :: "[t, t] \<Rightarrow> t" ("(2_`/_)" [120, 121] 120) - \<comment> \<open>Application should bind tighter than abstraction.\<close> - -syntax - "_Prod" :: "[idt, t, t] \<Rightarrow> t" ("(2\<Prod>'(_: _')./ _)" 30) - "_Prod'" :: "[idt, t, t] \<Rightarrow> t" ("(2\<Prod>_: _./ _)" 30) - "_lam" :: "[idt, t, t] \<Rightarrow> t" ("(2\<lambda>'(_: _')./ _)" 30) - "_lam'" :: "[idt, t, t] \<Rightarrow> t" ("(2\<lambda>_: _./ _)" 30) -translations - "\<Prod>x: A. B" \<rightleftharpoons> "(CONST Prod) A (\<lambda>x. B)" - "\<Prod>(x: A). B" \<rightleftharpoons> "(CONST Prod) A (\<lambda>x. B)" - "\<lambda>(x: A). b" \<rightleftharpoons> "(CONST lam) A (\<lambda>x. b)" - "\<lambda>x: A. b" \<rightleftharpoons> "(CONST lam) A (\<lambda>x. b)" - -text \<open> -The syntax translations above bind the variable @{term x} in the expressions @{term B} and @{term b}. -\<close> - -text \<open>Non-dependent functions are a special case:\<close> - -abbreviation Fun :: "[t, t] \<Rightarrow> t" (infixr "\<rightarrow>" 40) -where "A \<rightarrow> B \<equiv> \<Prod>_: A. B" - -axiomatization where -\<comment> \<open>Type rules\<close> - - Prod_form: "\<lbrakk>A: U i; B: A \<leadsto> U i\<rbrakk> \<Longrightarrow> \<Prod>x: A. B x: U i" and - - Prod_intro: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x: B x; A: U i\<rbrakk> \<Longrightarrow> \<lambda>x: A. b x: \<Prod>x: A. B x" and - - Prod_elim: "\<lbrakk>f: \<Prod>x: A. B x; a: A\<rbrakk> \<Longrightarrow> f`a: B a" and - - Prod_cmp: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x: B x; a: A\<rbrakk> \<Longrightarrow> (\<lambda>x: A. b x)`a \<equiv> b a" and - - Prod_uniq: "f: \<Prod>x: A. B x \<Longrightarrow> \<lambda>x: A. f`x \<equiv> f" and - -\<comment> \<open>Congruence rules\<close> - - Prod_form_eq: "\<lbrakk>A: U i; B: A \<leadsto> U i; C: A \<leadsto> U i; \<And>x. x: A \<Longrightarrow> B x \<equiv> C x\<rbrakk> - \<Longrightarrow> \<Prod>x: A. B x \<equiv> \<Prod>x: A. C x" and - - Prod_intro_eq: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x \<equiv> c x; A: U i\<rbrakk> \<Longrightarrow> \<lambda>x: A. b x \<equiv> \<lambda>x: A. c x" - -text \<open> -The Pure rules for \<open>\<equiv>\<close> only let us judge strict syntactic equality of object lambda expressions. -The actual definitional equality rule in the type theory is @{thm Prod_intro_eq}. -Note that this is a separate rule from function extensionality. -\<close> - -lemmas Prod_form [form] -lemmas Prod_routine [intro] = Prod_form Prod_intro Prod_elim -lemmas Prod_comp [comp] = Prod_cmp Prod_uniq -lemmas Prod_cong [cong] = Prod_form_eq Prod_intro_eq - - -section \<open>Function composition\<close> - -definition compose :: "[t, t, t] \<Rightarrow> t" -where "compose A g f \<equiv> \<lambda>x: A. g`(f`x)" - -syntax "_compose" :: "[t, t, t] \<Rightarrow> t" ("(2_ o[_]/ _)" [111, 0, 110] 110) -translations "g o[A] f" \<rightleftharpoons> "(CONST compose) A g f" - -text \<open>The composition @{term "g o[A] f"} is annotated with the domain @{term A} of @{term f}.\<close> - -syntax "_compose'" :: "[t, t] \<Rightarrow> t" (infixr "o" 110) - -text \<open>Pretty-printing switch for composition; hides domain type information.\<close> - -ML \<open>val pretty_compose = Attrib.setup_config_bool @{binding "pretty_compose"} (K true)\<close> - -print_translation \<open> -let fun compose_tr' ctxt [A, g, f] = - if Config.get ctxt pretty_compose - then Syntax.const @{syntax_const "_compose'"} $ g $ f - else @{const compose} $ A $ g $ f -in - [(@{const_syntax compose}, compose_tr')] -end -\<close> - -lemma compose_type: - assumes - "A: U i" and "B: U i" and "C: B \<leadsto> U i" and - "f: A \<rightarrow> B" and "g: \<Prod>x: B. C x" - shows "g o[A] f: \<Prod>x: A. C (f`x)" -unfolding compose_def by (derive lems: assms) - -lemma compose_comp: - assumes "A: U i" and "\<And>x. x: A \<Longrightarrow> b x: B" and "\<And>x. x: B \<Longrightarrow> c x: C x" - shows "(\<lambda>x: B. c x) o[A] (\<lambda>x: A. b x) \<equiv> \<lambda>x: A. c (b x)" -unfolding compose_def by (derive lems: assms cong) - -declare - compose_type [intro] - compose_comp [comp] - -lemma compose_assoc: - assumes "A: U i" and "f: A \<rightarrow> B" and "g: B \<rightarrow> C" and "h: \<Prod>x: C. D x" - shows "(h o[B] g) o[A] f \<equiv> h o[A] g o[A] f" -unfolding compose_def by (derive lems: assms cong) - -abbreviation id :: "t \<Rightarrow> t" ("(id _)" [115] 114) where "id A \<equiv> \<lambda>x: A. x" - -lemma id_type: "\<And>A. A: U i \<Longrightarrow> id A: A \<rightarrow> A" by derive - -lemma id_compl: - assumes [intro]: "A: U i" "B: U i" "f: A \<rightarrow> B" - shows "id B o[A] f \<equiv> f" -unfolding compose_def proof - - { - fix x assume [intro]: "x: A" - have "(id B)`(f`x) \<equiv> f`x" by derive - } - hence "\<lambda>x: A. (id B)`(f`x) \<equiv> \<lambda>x: A. f`x" by (derive lems: cong) derive - also have "\<lambda>x: A. f`x \<equiv> f" by derive - finally show "\<lambda>(x: A). (id B)`(f`x) \<equiv> f" by simp -qed - -lemma id_compr: - assumes [intro]: "A: U i" "B: U i" "f: A \<rightarrow> B" - shows "f o[A] id A \<equiv> f" -unfolding compose_def proof - - { - fix x assume [intro]: "x: A" - have "f`((id A)`x) \<equiv> f`x" by derive - } - hence "\<lambda>x: A. f`((id A)`x) \<equiv> \<lambda>x: A. f`x" by (derive lems: cong) derive - also have "\<lambda>x: A. f`x \<equiv> f" by derive - finally show "\<lambda>x: A. f`((id A)`x) \<equiv> f" by simp -qed - -declare id_type [intro] -lemmas id_comp [comp] = id_compl id_compr - -section \<open>Universal quantification\<close> - -text \<open> -It will often be useful to convert a proof goal asserting the inhabitation of a dependent product to one that instead uses Pure universal quantification. - -Method @{theory_text quantify_all} converts the goal -@{text "t: \<Prod>x1: A1. ... \<Prod>xn: An. B x1 ... xn"}, -where @{term B} is not a product, to -@{text "\<And>x1 ... xn . \<lbrakk>x1: A1; ...; xn: An\<rbrakk> \<Longrightarrow> ?b x1 ... xn: B x1 ... xn"}. - -Method @{theory_text "quantify k"} does the same, but only for the first k unknowns. -\<close> - -method quantify_all = (rule Prod_intro)+ -method_setup quantify = \<open>repeat (fn ctxt => Method.rule_tac ctxt [@{thm Prod_intro}] [] 1)\<close> - -end diff --git a/Projections.thy b/Projections.thy deleted file mode 100644 index c89b569..0000000 --- a/Projections.thy +++ /dev/null @@ -1,43 +0,0 @@ -(******** -Isabelle/HoTT: Projections -Feb 2019 - -Projection functions for the dependent sum type. - -********) - -theory Projections -imports Prod Sum - -begin - -definition fst ("(2fst[_,/ _])") -where "fst[A, B] \<equiv> \<lambda>(p: \<Sum>x: A. B x). indSum (\<lambda>_. A) (\<lambda>x y. x) p" - -definition snd ("(2snd[_,/ _])") -where "snd[A, B] \<equiv> \<lambda>(p: \<Sum>x: A. B x). indSum (\<lambda>p. B (fst[A, B]`p)) (\<lambda>x y. y) p" - -lemma fst_type: - assumes [intro]: "A: U i" "B: A \<leadsto> U i" - shows "fst[A, B]: (\<Sum>x: A. B x) \<rightarrow> A" -unfolding fst_def by derive - -lemma fst_cmp: - assumes [intro]: "A: U i" "B: A \<leadsto> U i" "a: A" "b: B a" - shows "fst[A, B]`<a, b> \<equiv> a" -unfolding fst_def by derive - -lemma snd_type: - assumes [intro]: "A: U i" "B: A \<leadsto> U i" - shows "snd[A, B]: \<Prod>(p: \<Sum>x: A. B x). B (fst[A,B]`p)" -unfolding snd_def by (derive lems: fst_type comp: fst_cmp) - -lemma snd_cmp: - assumes [intro]: "A: U i" "B: A \<leadsto> U i" "a: A" "b: B a" - shows "snd[A, B]`<a, b> \<equiv> b" -unfolding snd_def proof derive qed (derive lems: assms fst_type comp: fst_cmp) - -lemmas proj_type [intro] = fst_type snd_type -lemmas proj_comp [comp] = fst_cmp snd_cmp - -end @@ -2,27 +2,21 @@ An experimental implementation of [homotopy type theory](https://en.wikipedia.org/wiki/Homotopy_type_theory) in [Isabelle](https://isabelle.in.tum.de/). -## Caveat lector -The convenience and readability of formalizations in this library is taking a hit as we approach the limit of pre-existing functionality available in Isabelle. -Development has thus, for the moment, moved to [Isabelle/Spartan](https://github.com/jaycech3n/Isabelle-Spartan) to build more automation and improve the framework. - ### Usage -The default entry point for the logic is `HoTT`, import this to load everything else. +Isabelle/HoTT is compatible with Isabelle2019. +To use, add the Isabelle/HoTT folder path to `.isabelle/Isabelle2019/ROOTS` (on Mac/Linux/cygwin installations): -You can also load theories selectively, in this case, importing `HoTT_Base` is required and `HoTT_Methods` is helpful. +``` +echo <path/to/Isabelle/HoTT> >> ~/.isabelle/Isabelle2019/ROOTS +``` ### What (and why) is this? Isabelle/HoTT is a first attempt at bringing [homotopy type theory](https://en.wikipedia.org/wiki/Homotopy_type_theory) to the [Isabelle](https://isabelle.in.tum.de/) interactive theorem prover. -In its current state it is best viewed as a formalization of HoTT within Isabelle/Pure, largely following the development of the theory in the [Homotopy Type Theory book](https://homotopytypetheory.org/book/). +It largely follows the development of the theory in the [Homotopy Type Theory book](https://homotopytypetheory.org/book/). Work is slowly ongoing to develop the logic into a fully-featured framework in which one can formulate and prove mathematical statements, in the style of the univalent foundations school. While Isabelle has provided support for object logics based on Martin-Löf type theory since the beginning, these have largely been ignored in favor of Isabelle/HOL. Thus this project is also an experiment in creating a viable framework, based on dependent type theory, inside the simple type theoretic foundations of Isabelle/Pure. -A writeup of some theoretical considerations in this implementation is in my [Masters thesis](https://arxiv.org/abs/1911.00399). - -### License - -GNU LGPLv3 @@ -1,25 +1,24 @@ -chapter HoTT - -session HoTT = Pure + - description {* - Homotopy type theory based on intensional Martin-Löf type theory, with a cumulative universe - hierarchy à la Russell. - - Follows the development of the theory in - The Univalent Foundations Program, - Homotopy Type Theory: Univalent Foundations of Mathematics, - Institute for Advanced Study, (2013). - Available online at https://homotopytypetheory.org/book. - - Author: Joshua Chen, University of Bonn (2018) - *} +session Spartan in "spartan/theories" = Pure + + description " + Spartan type theory: a minimal dependent type theory based on + intensional Martin-Löf type theory with cumulative Russell-style universes, + Pi, Sigma and identity types. + " sessions "HOL-Eisbach" theories - HoTT (global) - HoTT_Base - HoTT_Methods - "tests/Test" - "ex/Methods" - "ex/Synthesis" - "ex/Book/Ch1" + Spartan (global) + Identity + Equivalence + +session HoTT in hott = Spartan + + description " + Homotopy type theory, following the development in + The Univalent Foundations Program, + Homotopy Type Theory: Univalent Foundations of Mathematics, + Institute for Advanced Study, (2013). + Available online at https://homotopytypetheory.org/book. + " + theories + Basic_Types + Nat diff --git a/Sum.thy b/Sum.thy deleted file mode 100644 index a1c0d34..0000000 --- a/Sum.thy +++ /dev/null @@ -1,56 +0,0 @@ -(******** -Isabelle/HoTT: Dependent sum (dependent pair) -Feb 2019 - -********) - -theory Sum -imports HoTT_Base - -begin - -axiomatization - Sum :: "[t, t \<Rightarrow> t] \<Rightarrow> t" and - pair :: "[t, t] \<Rightarrow> t" ("(2<_,/ _>)") and - indSum :: "[t \<Rightarrow> t, [t, t] \<Rightarrow> t, t] \<Rightarrow> t" - -syntax - "_Sum" :: "[idt, t, t] \<Rightarrow> t" ("(2\<Sum>'(_: _')./ _)" 20) - "_Sum'" :: "[idt, t, t] \<Rightarrow> t" ("(2\<Sum>_: _./ _)" 20) -translations - "\<Sum>(x: A). B" \<rightleftharpoons> "(CONST Sum) A (\<lambda>x. B)" - "\<Sum>x: A. B" \<rightleftharpoons> "(CONST Sum) A (\<lambda>x. B)" - -abbreviation Pair :: "[t, t] \<Rightarrow> t" (infixr "\<times>" 50) - where "A \<times> B \<equiv> \<Sum>_: A. B" - -axiomatization where -\<comment> \<open>Type rules\<close> - - Sum_form: "\<lbrakk>A: U i; \<And>x. x: A \<Longrightarrow> B x: U i\<rbrakk> \<Longrightarrow> \<Sum>x: A. B x: U i" and - - Sum_intro: "\<lbrakk>\<And>x. x: A \<Longrightarrow> B x: U i; a: A; b: B a\<rbrakk> \<Longrightarrow> <a, b>: \<Sum>x: A. B x" and - - Sum_elim: "\<lbrakk> - p: \<Sum>x: A. B x; - C: \<Sum>x: A. B x \<leadsto> U i; - \<And>x y. \<lbrakk>x: A; y: B x\<rbrakk> \<Longrightarrow> f x y: C <x,y> \<rbrakk> \<Longrightarrow> indSum C f p: C p" and - - Sum_cmp: "\<lbrakk> - a: A; - b: B a; - B: A \<leadsto> U i; - C: \<Sum>x: A. B x \<leadsto> U i; - \<And>x y. \<lbrakk>x: A; y: B x\<rbrakk> \<Longrightarrow> f x y: C <x,y> \<rbrakk> \<Longrightarrow> indSum C f <a, b> \<equiv> f a b" and - -\<comment> \<open>Congruence rules\<close> - - Sum_form_eq: "\<lbrakk>A: U i; B: A \<leadsto> U i; C: A \<leadsto> U i; \<And>x. x: A \<Longrightarrow> B x \<equiv> C x\<rbrakk> - \<Longrightarrow> \<Sum>x: A. B x \<equiv> \<Sum>x: A. C x" - -lemmas Sum_form [form] -lemmas Sum_routine [intro] = Sum_form Sum_intro Sum_elim -lemmas Sum_comp [comp] = Sum_cmp -lemmas Sum_cong [cong] = Sum_form_eq - -end diff --git a/Type_Families.thy b/Type_Families.thy deleted file mode 100644 index b9e1049..0000000 --- a/Type_Families.thy +++ /dev/null @@ -1,208 +0,0 @@ -(******** -Isabelle/HoTT: Type families as fibrations -Feb 2019 - -Various results viewing type families as fibrations: transport, path lifting, dependent map etc. - -********) - -theory Type_Families -imports Eq Projections - -begin - - -section \<open>Transport\<close> - -schematic_goal transport: - assumes [intro]: - "A: U i" "P: A \<leadsto> U j" - "x: A" "y: A" "p: x =[A] y" - shows "?prf: P x \<rightarrow> P y" -proof (path_ind' x y p) - show "\<And>x. x: A \<Longrightarrow> id P x: P x \<rightarrow> P x" by derive -qed routine - -definition transport :: "[t, t \<Rightarrow> t, t, t] \<Rightarrow> t" ("(2transport[_, _, _, _])") -where "transport[A, P, x, y] \<equiv> \<lambda>p: x =[A] y. indEq (\<lambda>a b. & (P a \<rightarrow> P b)) (\<lambda>x. id P x) x y p" - -syntax "_transport'" :: "t \<Rightarrow> t" ("transport[_]") - -ML \<open>val pretty_transport = Attrib.setup_config_bool @{binding "pretty_transport"} (K true)\<close> - -print_translation \<open> -let fun transport_tr' ctxt [A, P, x, y] = - if Config.get ctxt pretty_transport - then Syntax.const @{syntax_const "_transport'"} $ P - else @{const transport} $ A $ P $ x $ y -in - [(@{const_syntax transport}, transport_tr')] -end -\<close> - -corollary transport_type: - assumes [intro]: "A: U i" "P: A \<leadsto> U j" "x: A" "y: A" - shows "transport[A, P, x, y]: x =[A] y \<rightarrow> P x \<rightarrow> P y" -unfolding transport_def by derive - -lemma transport_comp: - assumes [intro]: "A: U i" "P: A \<leadsto> U j" "a: A" - shows "transport[A, P, a, a]`(refl a) \<equiv> id P a" -unfolding transport_def by derive - -declare - transport_type [intro] - transport_comp [comp] - -schematic_goal transport_invl: - assumes [intro]: - "A: U i" "P: A \<leadsto> U j" - "x: A" "y: A" "p: x =[A] y" - shows "?prf: - (transport[A, P, y, x]`(inv[A, x, y]`p)) o[P x] (transport[A, P, x, y]`p) =[P x \<rightarrow> P x] id P x" -proof (path_ind' x y p) - fix x assume [intro]: "x: A" - show - "refl (id P x) : - transport[A, P, x, x]`(inv[A, x, x]`(refl x)) o[P x] (transport[A, P, x, x]`(refl x)) - =[P x \<rightarrow> P x] id P x" - by derive - - fix y p assume [intro]: "y: A" "p: x =[A] y" - show - "transport[A, P, y, x]`(inv[A, x, y]`p) o[P x] transport[A, P, x, y]`p - =[P x \<rightarrow> P x] id P x : U j" - by derive -qed - -schematic_goal transport_invr: - assumes [intro]: - "A: U i" "P: A \<leadsto> U j" - "x: A" "y: A" "p: x =[A] y" - shows "?prf: - (transport[A, P, x, y]`p) o[P y] (transport[A, P, y, x]`(inv[A, x, y]`p)) =[P y \<rightarrow> P y] id P y" -proof (path_ind' x y p) - fix x assume [intro]: "x: A" - show - "refl (id P x) : - (transport[A, P, x, x]`(refl x)) o[P x] transport[A, P, x, x]`(inv[A, x, x]`(refl x)) - =[P x \<rightarrow> P x] id P x" - by derive - - fix y p assume [intro]: "y: A" "p: x =[A] y" - show - "transport[A, P, x, y]`p o[P y] transport[A, P, y, x]`(inv[A, x, y]`p) - =[P y \<rightarrow> P y] id P y : U j" - by derive -qed - -declare - transport_invl [intro] - transport_invr [intro] - - -section \<open>Path lifting\<close> - -schematic_goal path_lifting: - assumes [intro]: - "P: A \<leadsto> U i" "A: U i" - "x: A" "y: A" "p: x =[A] y" - shows "?prf: \<Prod>u: P x. <x, u> =[\<Sum>x: A. P x] <y, transport[A, P, x, y]`p`u>" -proof (path_ind' x y p, rule Prod_routine) - fix x u assume [intro]: "x: A" "u: P x" - have "(transport[A, P, x, x]`(refl x))`u \<equiv> u" by derive - thus "(refl <x, u>): <x, u> =[\<Sum>(x: A). P x] <x, transport[A, P, x, x]`(refl x)`u>" - proof simp qed routine -qed routine - -definition lift :: "[t, t \<Rightarrow> t, t, t] \<Rightarrow> t" ("(2lift[_, _, _, _])") -where - "lift[A, P, x, y] \<equiv> \<lambda>u: P x. \<lambda>p: x =[A] y. (indEq - (\<lambda>x y p. \<Prod>u: P x. <x, u> =[\<Sum>(x: A). P x] <y, transport[A, P, x, y]`p`u>) - (\<lambda>x. \<lambda>(u: P x). refl <x, u>) x y p)`u" - -corollary lift_type: - assumes [intro]: - "P: A \<leadsto> U i" "A: U i" - "x: A" "y: A" - shows - "lift[A, P, x, y]: - \<Prod>u: P x. \<Prod>p: x =[A] y. <x, u> =[\<Sum>x: A. P x] <y, transport[A, P, x, y]`p`u>" -unfolding lift_def by (derive lems: path_lifting) - -corollary lift_comp: - assumes [intro]: - "P: A \<leadsto> U i" "A: U i" - "x: A" "u: P x" - shows "lift[A, P, x, x]`u`(refl x) \<equiv> refl <x, u>" -unfolding lift_def apply compute prefer 3 apply compute by derive - -declare - lift_type [intro] - lift_comp [comp] - -text \<open> -Proof of the computation property of @{const lift}, stating that the first projection of the lift of @{term p} is equal to @{term p}: -\<close> - -schematic_goal lift_fst_comp: - assumes [intro]: - "P: A \<leadsto> U i" "A: U i" - "x: A" "y: A" "p: x =[A] y" - defines - "Fst \<equiv> \<lambda>x y p u. ap[fst[A, P], \<Sum>x: A. P x, A, <x, u>, <y, transport[A, P, x, y]`p`u>]" - shows - "?prf: \<Prod>u: P x. (Fst x y p u)`(lift[A, P, x, y]`u`p) =[x =[A] y] p" -proof - (path_ind' x y p, quantify_all) - fix x assume [intro]: "x: A" - show "\<And>u. u: P x \<Longrightarrow> - refl(refl x): (Fst x x (refl x) u)`(lift[A, P, x, x]`u`(refl x)) =[x =[A] x] refl x" - unfolding Fst_def by derive - - fix y p assume [intro]: "y: A" "p: x =[A] y" - show "\<Prod>u: P x. (Fst x y p u)`(lift[A, \<lambda>a. P a, x, y]`u`p) =[x =[A] y] p: U i" - proof derive - fix u assume [intro]: "u: P x" - have - "fst[A, P]`<x, u> \<equiv> x" "fst[A, P]`<y, transport[A, P, x, y]`p`u> \<equiv> y" by derive - moreover have - "Fst x y p u: - <x, u> =[\<Sum>(x: A). P x] <y, transport[A, P, x, y]`p`u> \<rightarrow> - fst[A, P]`<x, u> =[A] fst[A, P]`<y, transport[A, P, x, y]`p`u>" - unfolding Fst_def by derive - ultimately show - "Fst x y p u: <x, u> =[\<Sum>(x: A). P x] <y, transport[A, P, x, y]`p`u> \<rightarrow> x =[A] y" - by simp - qed routine -qed fact - - -section \<open>Dependent map\<close> - -schematic_goal dependent_map: - assumes [intro]: - "A: U i" "B: A \<leadsto> U i" - "f: \<Prod>x: A. B x" - "x: A" "y: A" "p: x =[A] y" - shows "?prf: transport[A, B, x, y]`p`(f`x) =[B y] f`y" -proof (path_ind' x y p) - show "\<And>x. x: A \<Longrightarrow> refl (f`x): transport[A, B, x, x]`(refl x)`(f`x) =[B x] f`x" by derive -qed derive - -definition apd :: "[t, t, t \<Rightarrow> t, t, t] \<Rightarrow> t" ("(apd[_, _, _, _, _])") where -"apd[f, A, B, x, y] \<equiv> - \<lambda>p: x =[A] y. indEq (\<lambda>x y p. transport[A, B, x, y]`p`(f`x) =[B y] f`y) (\<lambda>x. refl (f`x)) x y p" - -corollary apd_type: - assumes [intro]: - "A: U i" "B: A \<leadsto> U i" - "f: \<Prod>x: A. B x" - "x: A" "y: A" - shows "apd[f, A, B, x, y]: \<Prod>p: x =[A] y. transport[A, B, x, y]`p`(f`x) =[B y] f`y" -unfolding apd_def by derive - -declare apd_type [intro] - - -end diff --git a/Univalence.thy b/Univalence.thy deleted file mode 100644 index dd8a13c..0000000 --- a/Univalence.thy +++ /dev/null @@ -1,66 +0,0 @@ -(******** -Isabelle/HoTT: Univalence -Feb 2019 - -********) - -theory Univalence -imports Equivalence - -begin - - -schematic_goal type_eq_imp_equiv: - assumes [intro]: "A: U i" "B: U i" - shows "?prf: A =[U i] B \<rightarrow> A \<cong> B" -unfolding equivalence_def -apply (rule Prod_routine, rule Sum_routine) -prefer 3 apply (derive lems: transport_biinv) -done - -(* Copy-paste the derived proof term as the definition of idtoeqv for now; - should really have some automatic export of proof terms, though. -*) -definition idtoeqv :: "[ord, t, t] \<Rightarrow> t" ("(2idtoeqv[_, _, _])") where " -idtoeqv[i, A, B] \<equiv> - \<lambda>p: A =[U i] B. - < transport[U i, Id, A, B]`p, < - < transport[U i, Id, B, A]`(inv[U i, A, B]`p), - happly[x: A. A] - ((transport[U i, Id, B, A]`(inv[U i, A, B]`p)) o[A] transport[U i, Id, A, B]`p) - (id A) - (indEq - (\<lambda>A B p. - (transport[U i, Id, B, A]`(inv[U i, A, B]`p)) o[A] transport[U i, Id, A, B]`p - =[A \<rightarrow> A] id A) - (\<lambda>x. refl (id x)) A B p) - >, - < transport[U i, Id, B, A]`(inv[U i, A, B]`p), - happly[x: B. B] - ((transport[U i, Id, A, B]`p) o[B] (transport[U i, Id, B, A]`(inv[U i, A, B]`p))) - (id B) - (indEq - (\<lambda>A B p. - transport[U i, Id, A, B]`p o[B] (transport[U i, Id, B, A]`(inv[U i, A, B]`p)) - =[B \<rightarrow> B] id B) - (\<lambda>x. refl (id x)) A B p) - > - > - > -" - -corollary idtoeqv_type: - assumes [intro]: "A: U i" "B: U i" "p: A =[U i] B" - shows "idtoeqv[i, A, B]: A =[U i] B \<rightarrow> A \<cong> B" -unfolding idtoeqv_def by (routine add: type_eq_imp_equiv) - -declare idtoeqv_type [intro] - - -text \<open>For now, we use bi-invertibility as our definition of equivalence.\<close> - -axiomatization univalance :: "[ord, t, t] \<Rightarrow> t" -where univalence: "univalence i A B: biinv[A, B] idtoeqv[i, A, B]" - - -end diff --git a/ex/Book/Ch1.thy b/ex/Book/Ch1.thy deleted file mode 100644 index dfb1879..0000000 --- a/ex/Book/Ch1.thy +++ /dev/null @@ -1,50 +0,0 @@ -(* -Title: ex/Book/Ch1.thy -Author: Josh Chen -Date: 2018 - -A formalization of some content of Chapter 1 of the Homotopy Type Theory book. -*) - -theory Ch1 -imports "../../HoTT" - -begin - -chapter \<open>HoTT Book, Chapter 1\<close> - -section \<open>1.6 Dependent pair types (\<Sum>-types)\<close> - -paragraph \<open>Propositional uniqueness principle.\<close> - -schematic_goal - assumes "A: U i" and "B: A \<longrightarrow> U i" and "p: \<Sum>x:A. B x" - shows "?a: p =[\<Sum>x:A. B x] <fst p, snd p>" - -text \<open>Proof by induction on @{term "p: \<Sum>x:A. B x"}:\<close> - -proof (rule Sum_elim[where ?p=p]) - text \<open>We prove the base case.\<close> - fix x y assume asm: "x: A" "y: B x" show "refl <x,y>: <x,y> =[\<Sum>x:A. B x] <fst <x,y>, snd <x,y>>" - proof compute - show "x: A" and "y: B x" by (fact asm)+ \<comment> \<open>Hint the correct types.\<close> - - text \<open>And now @{method derive} takes care of the rest. -\<close> - qed (derive lems: assms asm) -qed (derive lems: assms) - - -section \<open>Exercises\<close> - -paragraph \<open>Exercise 1.13\<close> - -abbreviation "not" ("\<not>_") where "\<not>A \<equiv> A \<rightarrow> \<zero>" - -text "This proof requires the use of universe cumulativity." - -proposition assumes "A: U i" shows "\<^bold>\<lambda>f. f`(inr(\<^bold>\<lambda>a. f`(inl a))): \<not>(\<not>(A + \<not>A))" -by (derive lems: assms) - - -end diff --git a/ex/Methods.thy b/ex/Methods.thy deleted file mode 100644 index 09975b0..0000000 --- a/ex/Methods.thy +++ /dev/null @@ -1,49 +0,0 @@ -(* -Title: ex/Methods.thy -Author: Joshua Chen -Date: 2018 - -Basic HoTT method usage examples. -*) - -theory Methods -imports "../HoTT" - -begin - - -lemma - assumes "A : U(i)" "B: A \<longrightarrow> U(i)" "\<And>x. x : A \<Longrightarrow> C x: B x \<longrightarrow> U(i)" - shows "\<Sum>x:A. \<Prod>y:B x. \<Sum>z:C x y. \<Prod>w:A. x =\<^sub>A w: U(i)" -by (routine add: assms) - -\<comment> \<open>Correctly determines the type of the pair.\<close> -schematic_goal "\<lbrakk>A: U(i); B: U(i); a : A; b : B\<rbrakk> \<Longrightarrow> <a, b> : ?A" -by routine - -\<comment> \<open>Finds pair (too easy).\<close> -schematic_goal "\<lbrakk>A: U(i); B: U(i); a : A; b : B\<rbrakk> \<Longrightarrow> ?x : A \<times> B" -apply (rule intros) -apply assumption+ -done - -\<comment> \<open>Function application. We still often have to explicitly specify types.\<close> -lemma "\<lbrakk>A: U i; a: A\<rbrakk> \<Longrightarrow> (\<^bold>\<lambda>x. <x,0>)`a \<equiv> <a,0>" -proof compute - show "\<And>x. x: A \<Longrightarrow> <x,0>: A \<times> \<nat>" by routine -qed - -text \<open> -The proof below takes a little more work than one might expect; it would be nice to have a one-line method or proof. -\<close> - -lemma "\<lbrakk>A: U i; B: A \<longrightarrow> U i; a: A; b: B a\<rbrakk> \<Longrightarrow> (\<^bold>\<lambda>x y. <x,y>)`a`b \<equiv> <a,b>" -proof (compute, routine) - show "\<lbrakk>A: U i; B: A \<longrightarrow> U i; a: A; b: B a\<rbrakk> \<Longrightarrow> (\<^bold>\<lambda>y. <a,y>)`b \<equiv> <a,b>" - proof compute - show "\<And>b. \<lbrakk>A: U i; B: A \<longrightarrow> U i; a: A; b: B a\<rbrakk> \<Longrightarrow> <a,b>: \<Sum>x:A. B x" by routine - qed -qed - - -end diff --git a/ex/Synthesis.thy b/ex/Synthesis.thy deleted file mode 100644 index 3ee973c..0000000 --- a/ex/Synthesis.thy +++ /dev/null @@ -1,58 +0,0 @@ -(* -Title: ex/Synthesis.thy -Author: Joshua Chen -Date: 2018 - -Examples of synthesis -*) - - -theory Synthesis -imports "../HoTT" - -begin - - -section \<open>Synthesis of the predecessor function\<close> - -text \<open> -In this example we construct a predecessor function for the natural numbers. -This is also done in @{file "~~/src/CTT/ex/Synthesis.thy"}, there the work is much easier as the equality type is extensional. -\<close> - -text \<open>First we show that the property we want is well-defined.\<close> - -lemma pred_welltyped: "\<Sum>pred: \<nat>\<rightarrow>\<nat>. (pred`0 =\<^sub>\<nat> 0) \<times> (\<Prod>n:\<nat>. pred`(succ n) =\<^sub>\<nat> n): U O" -by routine - -text \<open> -Now we look for an inhabitant of this type. -Observe that we're looking for a lambda term @{term pred} satisfying @{term "pred`0 =\<^sub>\<nat> 0"} and @{term "\<Prod>n:\<nat>. pred`(succ n) =\<^sub>\<nat> n"}. -What if we require *definitional* instead of just propositional equality? -\<close> - -schematic_goal "?p`0 \<equiv> 0" and "\<And>n. n: \<nat> \<Longrightarrow> (?p`(succ n)) \<equiv> n" -apply compute -prefer 4 apply compute -apply (rule Nat_routine | compute)+ -oops -\<comment> \<open>Something in the original proof broke when I revamped the theory. The completion of this derivation is left as an exercise to the reader.\<close> - -text \<open> -The above proof finds a candidate, namely @{term "\<lambda>n. ind\<^sub>\<nat> (\<lambda>a b. a) 0 n"}. -We prove this has the required type and properties. -\<close> - -definition pred :: t where "pred \<equiv> \<^bold>\<lambda>n. ind\<^sub>\<nat> (\<lambda>a b. a) 0 n" - -lemma pred_type: "pred: \<nat> \<rightarrow> \<nat>" -unfolding pred_def by routine - -lemma pred_props: "<refl 0, \<^bold>\<lambda>n. refl n>: (pred`0 =\<^sub>\<nat> 0) \<times> (\<Prod>n:\<nat>. pred`(succ n) =\<^sub>\<nat> n)" -unfolding pred_def by derive - -theorem "<pred, <refl(0), \<^bold>\<lambda>n. refl(n)>>: \<Sum>pred:\<nat>\<rightarrow>\<nat> . ((pred`0) =\<^sub>\<nat> 0) \<times> (\<Prod>n:\<nat>. (pred`(succ n)) =\<^sub>\<nat> n)" -by (derive lems: pred_type pred_props) - - -end diff --git a/hott/Basic_Types.thy b/hott/Basic_Types.thy new file mode 100644 index 0000000..85f22a8 --- /dev/null +++ b/hott/Basic_Types.thy @@ -0,0 +1,90 @@ +theory Basic_Types +imports Spartan + +begin + +section \<open>Sum type\<close> + +axiomatization + Sum :: \<open>o \<Rightarrow> o \<Rightarrow> o\<close> and + inl :: \<open>o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> and + inr :: \<open>o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> and + SumInd :: \<open>o \<Rightarrow> o \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> o \<Rightarrow> o\<close> + +notation Sum (infixl "\<or>" 50) + +axiomatization where + SumF: "\<lbrakk>A: U i; B: U i\<rbrakk> \<Longrightarrow> A \<or> B: U i" and + + Sum_inl: "\<lbrakk>a: A; B: U i\<rbrakk> \<Longrightarrow> inl A B a: A \<or> B" and + + Sum_inr: "\<lbrakk>b: B; A: U i\<rbrakk> \<Longrightarrow> inr A B b: A \<or> B" and + + SumE: "\<lbrakk> + s: A \<or> B; + \<And>s. s: A \<or> B \<Longrightarrow> C s: U i; + \<And>a. a: A \<Longrightarrow> c a: C (inl A B a); + \<And>b. b: B \<Longrightarrow> d b: C (inr A B b) + \<rbrakk> \<Longrightarrow> SumInd A B (\<lambda>s. C s) (\<lambda>a. c a) (\<lambda>b. d b) s: C s" and + + Sum_comp_inl: "\<lbrakk> + a: A; + \<And>s. s: A \<or> B \<Longrightarrow> C s: U i; + \<And>a. a: A \<Longrightarrow> c a: C (inl A B a); + \<And>b. b: B \<Longrightarrow> d b: C (inr A B b) + \<rbrakk> \<Longrightarrow> SumInd A B (\<lambda>s. C s) (\<lambda>a. c a) (\<lambda>b. d b) (inl A B a) \<equiv> c a" and + + Sum_comp_inr: "\<lbrakk> + b: B; + \<And>s. s: A \<or> B \<Longrightarrow> C s: U i; + \<And>a. a: A \<Longrightarrow> c a: C (inl A B a); + \<And>b. b: B \<Longrightarrow> d b: C (inr A B b) + \<rbrakk> \<Longrightarrow> SumInd A B (\<lambda>s. C s) (\<lambda>a. c a) (\<lambda>b. d b) (inr A B b) \<equiv> d b" + +lemmas + [intros] = SumF Sum_inl Sum_inr and + [elims] = SumE and + [comps] = Sum_comp_inl Sum_comp_inr + + +section \<open>Empty and unit types\<close> + +axiomatization + Top :: \<open>o\<close> and + tt :: \<open>o\<close> and + TopInd :: \<open>(o \<Rightarrow> o) \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> +and + Bot :: \<open>o\<close> and + BotInd :: \<open>(o \<Rightarrow> o) \<Rightarrow> o \<Rightarrow> o\<close> + +notation Top ("\<top>") and Bot ("\<bottom>") + +axiomatization where + TopF: "\<top>: U i" and + + TopI: "tt: \<top>" and + + TopE: "\<lbrakk>a: \<top>; \<And>x. x: \<top> \<Longrightarrow> C x: U i; c: C tt\<rbrakk> \<Longrightarrow> TopInd (\<lambda>x. C x) c a: C a" and + + Top_comp: "\<lbrakk>\<And>x. x: \<top> \<Longrightarrow> C x: U i; c: C tt\<rbrakk> \<Longrightarrow> TopInd (\<lambda>x. C x) c tt \<equiv> c" +and + BotF: "\<bottom>: U i" and + + BotE: "\<lbrakk>x: \<bottom>; \<And>x. x: \<bottom> \<Longrightarrow> C x: U i\<rbrakk> \<Longrightarrow> BotInd (\<lambda>x. C x) x: C x" + +lemmas + [intros] = TopF TopI BotF and + [elims] = TopE BotE and + [comps] = Top_comp + + +section \<open>Booleans\<close> + +definition "Bool \<equiv> \<top> \<or> \<top>" +definition "true \<equiv> inl \<top> \<top> tt" +definition "false \<equiv> inr \<top> \<top> tt" + +\<comment> \<open>Can define if-then-else etc.\<close> + + +end diff --git a/hott/Nat.thy b/hott/Nat.thy new file mode 100644 index 0000000..f846dbf --- /dev/null +++ b/hott/Nat.thy @@ -0,0 +1,51 @@ +theory Nat +imports Spartan + +begin + +axiomatization + Nat :: \<open>o\<close> and + zero :: \<open>o\<close> ("0") and + suc :: \<open>o \<Rightarrow> o\<close> and + NatInd :: \<open>(o \<Rightarrow> o) \<Rightarrow> o \<Rightarrow> (o \<Rightarrow> o \<Rightarrow> o) \<Rightarrow> o \<Rightarrow> o\<close> +where + NatF: "Nat: U i" and + + Nat_zero: "0: Nat" and + + Nat_suc: "n: Nat \<Longrightarrow> suc n: Nat" and + + NatE: "\<lbrakk> + n: Nat; + n\<^sub>0: Nat; + \<And>n. n: Nat \<Longrightarrow> C n: U i; + \<And>n c. \<lbrakk>n: Nat; c: C n\<rbrakk> \<Longrightarrow> f n c: C (suc n) + \<rbrakk> \<Longrightarrow> NatInd (\<lambda>n. C n) n\<^sub>0 (\<lambda>n c. f n c) n: C n" and + + Nat_comp_zero: "\<lbrakk> + n\<^sub>0: Nat; + \<And>n. n: Nat \<Longrightarrow> C n: U i; + \<And>n c. \<lbrakk>n: Nat; c: C n\<rbrakk> \<Longrightarrow> f n c: C (suc n) + \<rbrakk> \<Longrightarrow> NatInd (\<lambda>n. C n) n\<^sub>0 (\<lambda>n c. f n c) 0 \<equiv> n\<^sub>0" and + + Nat_comp_suc: "\<lbrakk> + n: Nat; + n\<^sub>0: Nat; + \<And>n. n: Nat \<Longrightarrow> C n: U i; + \<And>n c. \<lbrakk>n: Nat; c: C n\<rbrakk> \<Longrightarrow> f n c: C (suc n) + \<rbrakk> \<Longrightarrow> + NatInd (\<lambda>n. C n) n\<^sub>0 (\<lambda>n c. f n c) (suc n) \<equiv> + f n (NatInd (\<lambda>n. C n) n\<^sub>0 (\<lambda>n c. f n c) n)" + +lemmas + [intros] = NatF Nat_zero Nat_suc and + [elims] = NatE and + [comps] = Nat_comp_zero Nat_comp_suc + + +section \<open>Basic arithmetic\<close> + +\<comment> \<open>TODO\<close> + + +end diff --git a/spartan/lib/congruence.ML b/spartan/lib/congruence.ML new file mode 100644 index 0000000..bb7348c --- /dev/null +++ b/spartan/lib/congruence.ML @@ -0,0 +1,16 @@ +signature Sym_Attr_Data = +sig + val name: string + val symmetry_rule: thm +end + +functor Sym_Attr (D: Sym_Attr_Data) = +struct + local + val distinct_prems = the_single o Seq.list_of o Tactic.distinct_subgoals_tac + in + val attr = Thm.rule_attribute [] + (K (fn th => distinct_prems (th RS D.symmetry_rule) handle THM _ => th)) + val setup = Attrib.setup (Binding.name D.name) (Scan.succeed attr) "" + end +end diff --git a/spartan/lib/elimination.ML b/spartan/lib/elimination.ML new file mode 100644 index 0000000..85ce669 --- /dev/null +++ b/spartan/lib/elimination.ML @@ -0,0 +1,33 @@ +(* Title: elimination.ML + Author: Joshua Chen + +Type elimination automation. +*) + +structure Elim = struct + +(* Elimination rule data *) +structure Rules = Generic_Data ( + type T = thm Termtab.table + val empty = Termtab.empty + val extend = I + val merge = Termtab.merge Thm.eq_thm_prop +) + +fun register_rule rl = + let val hd = Term.head_of (Lib.type_of_typing (Thm.major_prem_of rl)) + in Rules.map (Termtab.update (hd, rl)) end + +fun get_rule ctxt = Termtab.lookup (Rules.get (Context.Proof ctxt)) + +fun rules ctxt = map (op #2) (Termtab.dest (Rules.get (Context.Proof ctxt))) + +(* Set up [elims] attribute *) +val _ = Theory.setup ( + Attrib.setup \<^binding>\<open>elims\<close> + (Scan.lift (Scan.succeed (Thm.declaration_attribute register_rule))) "" + #> Global_Theory.add_thms_dynamic (\<^binding>\<open>elims\<close>, + fn context => (rules (Context.proof_of context))) +) + +end diff --git a/spartan/lib/eqsubst.ML b/spartan/lib/eqsubst.ML new file mode 100644 index 0000000..ea6f098 --- /dev/null +++ b/spartan/lib/eqsubst.ML @@ -0,0 +1,434 @@ +(* Title: eqsubst.ML + Author: Lucas Dixon, University of Edinburgh + Modified: Joshua Chen, University of Innsbruck + +Perform a substitution using an equation. + +This code is slightly modified from the original at Tools/eqsubst..ML, +to incorporate auto-typechecking for type theory. +*) + +signature EQSUBST = +sig + type match = + ((indexname * (sort * typ)) list (* type instantiations *) + * (indexname * (typ * term)) list) (* term instantiations *) + * (string * typ) list (* fake named type abs env *) + * (string * typ) list (* type abs env *) + * term (* outer term *) + + type searchinfo = + Proof.context + * int (* maxidx *) + * Zipper.T (* focusterm to search under *) + + datatype 'a skipseq = SkipMore of int | SkipSeq of 'a Seq.seq Seq.seq + + val skip_first_asm_occs_search: ('a -> 'b -> 'c Seq.seq Seq.seq) -> 'a -> int -> 'b -> 'c skipseq + val skip_first_occs_search: int -> ('a -> 'b -> 'c Seq.seq Seq.seq) -> 'a -> 'b -> 'c Seq.seq + val skipto_skipseq: int -> 'a Seq.seq Seq.seq -> 'a skipseq + + (* tactics *) + val eqsubst_asm_tac: Proof.context -> int list -> thm list -> int -> tactic + val eqsubst_asm_tac': Proof.context -> + (searchinfo -> int -> term -> match skipseq) -> int -> thm -> int -> tactic + val eqsubst_tac: Proof.context -> + int list -> (* list of occurrences to rewrite, use [0] for any *) + thm list -> int -> tactic + val eqsubst_tac': Proof.context -> + (searchinfo -> term -> match Seq.seq) (* search function *) + -> thm (* equation theorem to rewrite with *) + -> int (* subgoal number in goal theorem *) + -> thm (* goal theorem *) + -> thm Seq.seq (* rewritten goal theorem *) + + (* search for substitutions *) + val valid_match_start: Zipper.T -> bool + val search_lr_all: Zipper.T -> Zipper.T Seq.seq + val search_lr_valid: (Zipper.T -> bool) -> Zipper.T -> Zipper.T Seq.seq + val searchf_lr_unify_all: searchinfo -> term -> match Seq.seq Seq.seq + val searchf_lr_unify_valid: searchinfo -> term -> match Seq.seq Seq.seq + val searchf_bt_unify_valid: searchinfo -> term -> match Seq.seq Seq.seq +end; + +structure EqSubst: EQSUBST = +struct + +(* changes object "=" to meta "==" which prepares a given rewrite rule *) +fun prep_meta_eq ctxt = + Simplifier.mksimps ctxt #> map Drule.zero_var_indexes; + +(* make free vars into schematic vars with index zero *) +fun unfix_frees frees = + fold (K (Thm.forall_elim_var 0)) frees o Drule.forall_intr_list frees; + + +type match = + ((indexname * (sort * typ)) list (* type instantiations *) + * (indexname * (typ * term)) list) (* term instantiations *) + * (string * typ) list (* fake named type abs env *) + * (string * typ) list (* type abs env *) + * term; (* outer term *) + +type searchinfo = + Proof.context + * int (* maxidx *) + * Zipper.T; (* focusterm to search under *) + + +(* skipping non-empty sub-sequences but when we reach the end + of the seq, remembering how much we have left to skip. *) +datatype 'a skipseq = + SkipMore of int | + SkipSeq of 'a Seq.seq Seq.seq; + +(* given a seqseq, skip the first m non-empty seq's, note deficit *) +fun skipto_skipseq m s = + let + fun skip_occs n sq = + (case Seq.pull sq of + NONE => SkipMore n + | SOME (h, t) => + (case Seq.pull h of + NONE => skip_occs n t + | SOME _ => if n <= 1 then SkipSeq (Seq.cons h t) else skip_occs (n - 1) t)) + in skip_occs m s end; + +(* note: outerterm is the taget with the match replaced by a bound + variable : ie: "P lhs" beocmes "%x. P x" + insts is the types of instantiations of vars in lhs + and typinsts is the type instantiations of types in the lhs + Note: Final rule is the rule lifted into the ontext of the + taget thm. *) +fun mk_foo_match mkuptermfunc Ts t = + let + val ty = Term.type_of t + val bigtype = rev (map snd Ts) ---> ty + fun mk_foo 0 t = t + | mk_foo i t = mk_foo (i - 1) (t $ (Bound (i - 1))) + val num_of_bnds = length Ts + (* foo_term = "fooabs y0 ... yn" where y's are local bounds *) + val foo_term = mk_foo num_of_bnds (Bound num_of_bnds) + in Abs ("fooabs", bigtype, mkuptermfunc foo_term) end; + +(* T is outer bound vars, n is number of locally bound vars *) +(* THINK: is order of Ts correct...? or reversed? *) +fun mk_fake_bound_name n = ":b_" ^ n; +fun fakefree_badbounds Ts t = + let val (FakeTs, Ts, newnames) = + fold_rev (fn (n, ty) => fn (FakeTs, Ts, usednames) => + let + val newname = singleton (Name.variant_list usednames) n + in + ((mk_fake_bound_name newname, ty) :: FakeTs, + (newname, ty) :: Ts, + newname :: usednames) + end) Ts ([], [], []) + in (FakeTs, Ts, Term.subst_bounds (map Free FakeTs, t)) end; + +(* before matching we need to fake the bound vars that are missing an + abstraction. In this function we additionally construct the + abstraction environment, and an outer context term (with the focus + abstracted out) for use in rewriting with RW_Inst.rw *) +fun prep_zipper_match z = + let + val t = Zipper.trm z + val c = Zipper.ctxt z + val Ts = Zipper.C.nty_ctxt c + val (FakeTs', Ts', t') = fakefree_badbounds Ts t + val absterm = mk_foo_match (Zipper.C.apply c) Ts' t' + in + (t', (FakeTs', Ts', absterm)) + end; + +(* Unification with exception handled *) +(* given context, max var index, pat, tgt; returns Seq of instantiations *) +fun clean_unify ctxt ix (a as (pat, tgt)) = + let + (* type info will be re-derived, maybe this can be cached + for efficiency? *) + val pat_ty = Term.type_of pat; + val tgt_ty = Term.type_of tgt; + (* FIXME is it OK to ignore the type instantiation info? + or should I be using it? *) + val typs_unify = + SOME (Sign.typ_unify (Proof_Context.theory_of ctxt) (pat_ty, tgt_ty) (Vartab.empty, ix)) + handle Type.TUNIFY => NONE; + in + (case typs_unify of + SOME (typinsttab, ix2) => + let + (* FIXME is it right to throw away the flexes? + or should I be using them somehow? *) + fun mk_insts env = + (Vartab.dest (Envir.type_env env), + Vartab.dest (Envir.term_env env)); + val initenv = + Envir.Envir {maxidx = ix2, tenv = Vartab.empty, tyenv = typinsttab}; + val useq = Unify.smash_unifiers (Context.Proof ctxt) [a] initenv + handle ListPair.UnequalLengths => Seq.empty + | Term.TERM _ => Seq.empty; + fun clean_unify' useq () = + (case (Seq.pull useq) of + NONE => NONE + | SOME (h, t) => SOME (mk_insts h, Seq.make (clean_unify' t))) + handle ListPair.UnequalLengths => NONE + | Term.TERM _ => NONE; + in + (Seq.make (clean_unify' useq)) + end + | NONE => Seq.empty) + end; + +(* Unification for zippers *) +(* Note: Ts is a modified version of the original names of the outer + bound variables. New names have been introduced to make sure they are + unique w.r.t all names in the term and each other. usednames' is + oldnames + new names. *) +fun clean_unify_z ctxt maxidx pat z = + let val (t, (FakeTs, Ts, absterm)) = prep_zipper_match z in + Seq.map (fn insts => (insts, FakeTs, Ts, absterm)) + (clean_unify ctxt maxidx (t, pat)) + end; + + +fun bot_left_leaf_of (l $ _) = bot_left_leaf_of l + | bot_left_leaf_of (Abs (_, _, t)) = bot_left_leaf_of t + | bot_left_leaf_of x = x; + +(* Avoid considering replacing terms which have a var at the head as + they always succeed trivially, and uninterestingly. *) +fun valid_match_start z = + (case bot_left_leaf_of (Zipper.trm z) of + Var _ => false + | _ => true); + +(* search from top, left to right, then down *) +val search_lr_all = ZipperSearch.all_bl_ur; + +(* search from top, left to right, then down *) +fun search_lr_valid validf = + let + fun sf_valid_td_lr z = + let val here = if validf z then [Zipper.Here z] else [] in + (case Zipper.trm z of + _ $ _ => + [Zipper.LookIn (Zipper.move_down_left z)] @ here @ + [Zipper.LookIn (Zipper.move_down_right z)] + | Abs _ => here @ [Zipper.LookIn (Zipper.move_down_abs z)] + | _ => here) + end; + in Zipper.lzy_search sf_valid_td_lr end; + +(* search from bottom to top, left to right *) +fun search_bt_valid validf = + let + fun sf_valid_td_lr z = + let val here = if validf z then [Zipper.Here z] else [] in + (case Zipper.trm z of + _ $ _ => + [Zipper.LookIn (Zipper.move_down_left z), + Zipper.LookIn (Zipper.move_down_right z)] @ here + | Abs _ => [Zipper.LookIn (Zipper.move_down_abs z)] @ here + | _ => here) + end; + in Zipper.lzy_search sf_valid_td_lr end; + +fun searchf_unify_gen f (ctxt, maxidx, z) lhs = + Seq.map (clean_unify_z ctxt maxidx lhs) (Zipper.limit_apply f z); + +(* search all unifications *) +val searchf_lr_unify_all = searchf_unify_gen search_lr_all; + +(* search only for 'valid' unifiers (non abs subterms and non vars) *) +val searchf_lr_unify_valid = searchf_unify_gen (search_lr_valid valid_match_start); + +val searchf_bt_unify_valid = searchf_unify_gen (search_bt_valid valid_match_start); + +(* apply a substitution in the conclusion of the theorem *) +(* cfvs are certified free var placeholders for goal params *) +(* conclthm is a theorem of for just the conclusion *) +(* m is instantiation/match information *) +(* rule is the equation for substitution *) +fun apply_subst_in_concl ctxt i st (cfvs, conclthm) rule m = + RW_Inst.rw ctxt m rule conclthm + |> unfix_frees cfvs + |> Conv.fconv_rule Drule.beta_eta_conversion + |> (fn r => resolve_tac ctxt [r] i st); + +(* substitute within the conclusion of goal i of gth, using a meta +equation rule. Note that we assume rule has var indicies zero'd *) +fun prep_concl_subst ctxt i gth = + let + val th = Thm.incr_indexes 1 gth; + val tgt_term = Thm.prop_of th; + + val (fixedbody, fvs) = IsaND.fix_alls_term ctxt i tgt_term; + val cfvs = rev (map (Thm.cterm_of ctxt) fvs); + + val conclterm = Logic.strip_imp_concl fixedbody; + val conclthm = Thm.trivial (Thm.cterm_of ctxt conclterm); + val maxidx = Thm.maxidx_of th; + val ft = + (Zipper.move_down_right (* ==> *) + o Zipper.move_down_left (* Trueprop *) + o Zipper.mktop + o Thm.prop_of) conclthm + in + ((cfvs, conclthm), (ctxt, maxidx, ft)) + end; + +(* substitute using an object or meta level equality *) +fun eqsubst_tac' ctxt searchf instepthm i st = + let + val (cvfsconclthm, searchinfo) = prep_concl_subst ctxt i st; + val stepthms = Seq.of_list (prep_meta_eq ctxt instepthm); + fun rewrite_with_thm r = + let val (lhs,_) = Logic.dest_equals (Thm.concl_of r) in + searchf searchinfo lhs + |> Seq.maps (apply_subst_in_concl ctxt i st cvfsconclthm r) + end; + in stepthms |> Seq.maps rewrite_with_thm end; + + +(* General substitution of multiple occurrences using one of + the given theorems *) + +fun skip_first_occs_search occ srchf sinfo lhs = + (case skipto_skipseq occ (srchf sinfo lhs) of + SkipMore _ => Seq.empty + | SkipSeq ss => Seq.flat ss); + +(* The "occs" argument is a list of integers indicating which occurrence +w.r.t. the search order, to rewrite. Backtracking will also find later +occurrences, but all earlier ones are skipped. Thus you can use [0] to +just find all rewrites. *) + +fun eqsubst_tac ctxt occs thms i st = + let val nprems = Thm.nprems_of st in + if nprems < i then Seq.empty else + let + val thmseq = Seq.of_list thms; + fun apply_occ occ st = + thmseq |> Seq.maps (fn r => + eqsubst_tac' ctxt + (skip_first_occs_search occ searchf_lr_unify_valid) r + (i + (Thm.nprems_of st - nprems)) st); + val sorted_occs = Library.sort (rev_order o int_ord) occs; + in + Seq.maps distinct_subgoals_tac (Seq.EVERY (map apply_occ sorted_occs) st) + end + end; + + +(* apply a substitution inside assumption j, keeps asm in the same place *) +fun apply_subst_in_asm ctxt i st rule ((cfvs, j, _, pth),m) = + let + val st2 = Thm.rotate_rule (j - 1) i st; (* put premice first *) + val preelimrule = + RW_Inst.rw ctxt m rule pth + |> (Seq.hd o prune_params_tac ctxt) + |> Thm.permute_prems 0 ~1 (* put old asm first *) + |> unfix_frees cfvs (* unfix any global params *) + |> Conv.fconv_rule Drule.beta_eta_conversion; (* normal form *) + in + (* ~j because new asm starts at back, thus we subtract 1 *) + Seq.map (Thm.rotate_rule (~ j) (Thm.nprems_of rule + i)) + (dresolve_tac ctxt [preelimrule] i st2) + end; + + +(* prepare to substitute within the j'th premise of subgoal i of gth, +using a meta-level equation. Note that we assume rule has var indicies +zero'd. Note that we also assume that premt is the j'th premice of +subgoal i of gth. Note the repetition of work done for each +assumption, i.e. this can be made more efficient for search over +multiple assumptions. *) +fun prep_subst_in_asm ctxt i gth j = + let + val th = Thm.incr_indexes 1 gth; + val tgt_term = Thm.prop_of th; + + val (fixedbody, fvs) = IsaND.fix_alls_term ctxt i tgt_term; + val cfvs = rev (map (Thm.cterm_of ctxt) fvs); + + val asmt = nth (Logic.strip_imp_prems fixedbody) (j - 1); + val asm_nprems = length (Logic.strip_imp_prems asmt); + + val pth = Thm.trivial ((Thm.cterm_of ctxt) asmt); + val maxidx = Thm.maxidx_of th; + + val ft = + (Zipper.move_down_right (* trueprop *) + o Zipper.mktop + o Thm.prop_of) pth + in ((cfvs, j, asm_nprems, pth), (ctxt, maxidx, ft)) end; + +(* prepare subst in every possible assumption *) +fun prep_subst_in_asms ctxt i gth = + map (prep_subst_in_asm ctxt i gth) + ((fn l => Library.upto (1, length l)) + (Logic.prems_of_goal (Thm.prop_of gth) i)); + + +(* substitute in an assumption using an object or meta level equality *) +fun eqsubst_asm_tac' ctxt searchf skipocc instepthm i st = + let + val asmpreps = prep_subst_in_asms ctxt i st; + val stepthms = Seq.of_list (prep_meta_eq ctxt instepthm); + fun rewrite_with_thm r = + let + val (lhs,_) = Logic.dest_equals (Thm.concl_of r); + fun occ_search occ [] = Seq.empty + | occ_search occ ((asminfo, searchinfo)::moreasms) = + (case searchf searchinfo occ lhs of + SkipMore i => occ_search i moreasms + | SkipSeq ss => + Seq.append (Seq.map (Library.pair asminfo) (Seq.flat ss)) + (occ_search 1 moreasms)) (* find later substs also *) + in + occ_search skipocc asmpreps |> Seq.maps (apply_subst_in_asm ctxt i st r) + end; + in stepthms |> Seq.maps rewrite_with_thm end; + + +fun skip_first_asm_occs_search searchf sinfo occ lhs = + skipto_skipseq occ (searchf sinfo lhs); + +fun eqsubst_asm_tac ctxt occs thms i st = + let val nprems = Thm.nprems_of st in + if nprems < i then Seq.empty + else + let + val thmseq = Seq.of_list thms; + fun apply_occ occ st = + thmseq |> Seq.maps (fn r => + eqsubst_asm_tac' ctxt + (skip_first_asm_occs_search searchf_lr_unify_valid) occ r + (i + (Thm.nprems_of st - nprems)) st); + val sorted_occs = Library.sort (rev_order o int_ord) occs; + in + Seq.maps distinct_subgoals_tac (Seq.EVERY (map apply_occ sorted_occs) st) + end + end; + +(* combination method that takes a flag (true indicates that subst + should be done to an assumption, false = apply to the conclusion of + the goal) as well as the theorems to use *) +val _ = + Theory.setup + (Method.setup \<^binding>\<open>sub\<close> + (Scan.lift (Args.mode "asm" -- Scan.optional (Args.parens (Scan.repeat Parse.nat)) [0]) -- + Attrib.thms >> (fn ((asm, occs), inthms) => fn ctxt => + SIMPLE_METHOD' ((if asm then eqsubst_asm_tac else eqsubst_tac) ctxt occs inthms))) + "single-step substitution" + #> + (Method.setup \<^binding>\<open>subst\<close> + (Scan.lift (Args.mode "asm" -- Scan.optional (Args.parens (Scan.repeat Parse.nat)) [0]) -- + Attrib.thms >> (fn ((asm, occs), inthms) => fn ctxt => + SIMPLE_METHOD' (SIDE_CONDS + ((if asm then eqsubst_asm_tac else eqsubst_tac) ctxt occs inthms) + ctxt))) + "single-step substitution with auto-typechecking")) + +end; diff --git a/spartan/lib/equality.ML b/spartan/lib/equality.ML new file mode 100644 index 0000000..79b4086 --- /dev/null +++ b/spartan/lib/equality.ML @@ -0,0 +1,90 @@ +(* Title: equality.ML + Author: Joshua Chen + +Equality reasoning with identity types. +*) + +structure Equality: +sig + +val dest_Id: term -> term * term * term + +val push_hyp_tac: term * term -> Proof.context -> int -> tactic +val induction_tac: term -> term -> term -> term -> Proof.context -> tactic +val equality_context_tac: Facts.ref -> Proof.context -> context_tactic + +end = struct + +fun dest_Id tm = case tm of + Const (\<^const_name>\<open>Id\<close>, _) $ A $ x $ y => (A, x, y) + | _ => error "dest_Id" + +(*Context assumptions that have already been pushed into the type family*) +structure Inserts = Proof_Data ( + type T = term Item_Net.T + val init = K (Item_Net.init Term.aconv_untyped single) +) + +fun push_hyp_tac (t, _) = + Subgoal.FOCUS_PARAMS (fn {context = ctxt, concl, ...} => + let + val (_, C) = Lib.dest_typing (Thm.term_of concl) + val B = Thm.cterm_of ctxt (Lib.lambda_var t C) + val a = Thm.cterm_of ctxt t + (*The resolvent is PiE[where ?B=B and ?a=a]*) + val resolvent = + Drule.infer_instantiate' ctxt [NONE, NONE, SOME B, SOME a] @{thm PiE} + in + HEADGOAL (resolve_tac ctxt [resolvent]) + THEN SOMEGOAL (known_tac ctxt) + end) + +fun induction_tac p A x y ctxt = + let + val [p, A, x, y] = map (Thm.cterm_of ctxt) [p, A, x, y] + in + HEADGOAL (resolve_tac ctxt + [Drule.infer_instantiate' ctxt [SOME p, SOME A, SOME x, SOME y] @{thm IdE}]) + end + +val side_conds_tac = TRY oo typechk_tac + +fun equality_context_tac fact ctxt = + let + val eq_th = Proof_Context.get_fact_single ctxt fact + val (p, (A, x, y)) = (Lib.dest_typing ##> dest_Id) (Thm.prop_of eq_th) + + val hyps = + Facts.props (Proof_Context.facts_of ctxt) + |> filter (fn (th, _) => Lib.is_typing (Thm.prop_of th)) + |> map (Lib.dest_typing o Thm.prop_of o fst) + |> filter_out (fn (t, _) => + Term.aconv (t, p) orelse Item_Net.member (Inserts.get ctxt) t) + |> map (fn (t, T) => ((t, T), Lib.subterm_count_distinct [p, x, y] T)) + |> filter (fn (_, i) => i > 0) + (*`t1: T1` comes before `t2: T2` if T1 contains t2 as subterm. + If they are incomparable, then order by decreasing + `subterm_count [p, x, y] T`*) + |> sort (fn (((t1, _), i), ((_, T2), j)) => + Lib.cond_order (Lib.subterm_order T2 t1) (int_ord (j, i))) + |> map #1 + + val record_inserts = + Inserts.map (fold (fn (t, _) => fn net => Item_Net.update t net) hyps) + + val tac = + fold (fn hyp => fn tac => tac THEN HEADGOAL (push_hyp_tac hyp ctxt)) + hyps all_tac + THEN ( + induction_tac p A x y ctxt + THEN RANGE (replicate 3 (typechk_tac ctxt) @ [side_conds_tac ctxt]) 1 + ) + THEN ( + REPEAT_DETERM_N (length hyps) (SOMEGOAL (resolve_tac ctxt @{thms PiI})) + THEN ALLGOALS (side_conds_tac ctxt) + ) + in + fn (ctxt, st) => Method.CONTEXT (record_inserts ctxt) (tac st) + end + +end diff --git a/spartan/lib/focus.ML b/spartan/lib/focus.ML new file mode 100644 index 0000000..dd8d3d9 --- /dev/null +++ b/spartan/lib/focus.ML @@ -0,0 +1,125 @@ +(* Title: focus.ML + Author: Makarius Wenzel, Joshua Chen + +A modified version of the Isar `subgoal` command +that keeps schematic variables in the goal state. + +Modified from code originally written by Makarius Wenzel. +*) + +local + +fun param_bindings ctxt (param_suffix, raw_param_specs) st = + let + val _ = if Thm.no_prems st then error "No subgoals!" else () + val subgoal = #1 (Logic.dest_implies (Thm.prop_of st)) + val subgoal_params = + map (apfst (Name.internal o Name.clean)) (Term.strip_all_vars subgoal) + |> Term.variant_frees subgoal |> map #1 + + val n = length subgoal_params + val m = length raw_param_specs + val _ = + m <= n orelse + error ("Excessive subgoal parameter specification" ^ + Position.here_list (map snd (drop n raw_param_specs))) + + val param_specs = + raw_param_specs |> map + (fn (NONE, _) => NONE + | (SOME x, pos) => + let + val b = #1 (#1 (Proof_Context.cert_var (Binding.make (x, pos), NONE, NoSyn) ctxt)) + val _ = Variable.check_name b + in SOME b end) + |> param_suffix ? append (replicate (n - m) NONE) + + fun bindings (SOME x :: xs) (_ :: ys) = x :: bindings xs ys + | bindings (NONE :: xs) (y :: ys) = Binding.name y :: bindings xs ys + | bindings _ ys = map Binding.name ys + in bindings param_specs subgoal_params end + +fun gen_schematic_subgoal prep_atts raw_result_binding raw_prems_binding param_specs state = + let + val _ = Proof.assert_backward state + + val state1 = state + |> Proof.map_context (Proof_Context.set_mode Proof_Context.mode_schematic) + |> Proof.refine_insert [] + + val {context = ctxt, facts = facts, goal = st} = Proof.raw_goal state1 + + val result_binding = apsnd (map (prep_atts ctxt)) raw_result_binding + val (prems_binding, do_prems) = + (case raw_prems_binding of + SOME (b, raw_atts) => ((b, map (prep_atts ctxt) raw_atts), true) + | NONE => (Binding.empty_atts, false)) + + val (subgoal_focus, _) = + (if do_prems then Subgoal.focus_prems else Subgoal.focus_params) ctxt + 1 (SOME (param_bindings ctxt param_specs st)) st + + fun after_qed (ctxt'', [[result]]) = + Proof.end_block #> (fn state' => + let + val ctxt' = Proof.context_of state' + val results' = + Proof_Context.export ctxt'' ctxt' (Conjunction.elim_conjunctions result) + in + state' + |> Proof.refine_primitive (fn _ => fn _ => + Subgoal.retrofit ctxt'' ctxt' (#params subgoal_focus) (#asms subgoal_focus) 1 + (Goal.protect 0 result) st + |> Seq.hd) + |> Proof.map_context + (#2 o Proof_Context.note_thmss "" [(result_binding, [(results', [])])]) + end) + #> Proof.reset_facts + #> Proof.enter_backward + in + state1 + |> Proof.enter_forward + |> Proof.using_facts [] + |> Proof.begin_block + |> Proof.map_context (fn _ => + #context subgoal_focus + |> Proof_Context.note_thmss "" [(prems_binding, [(#prems subgoal_focus, [])])] |> #2) + |> Proof.internal_goal (K (K ())) (Proof_Context.get_mode ctxt) true "subgoal" + NONE after_qed [] [] [(Binding.empty_atts, [(Thm.term_of (#concl subgoal_focus), [])])] |> #2 + |> Proof.using_facts facts + |> pair subgoal_focus + end + +val opt_fact_binding = + Scan.optional (Parse.binding -- Parse.opt_attribs || Parse.attribs >> pair Binding.empty) + Binding.empty_atts + +val for_params = + Scan.optional + (\<^keyword>\<open>vars\<close> |-- + Parse.!!! ((Scan.option Parse.dots >> is_some) -- + (Scan.repeat1 (Parse.maybe_position Parse.name_position)))) + (false, []) + +val schematic_subgoal_cmd = gen_schematic_subgoal Attrib.attribute_cmd + +val parser = + opt_fact_binding + -- (Scan.option (\<^keyword>\<open>premises\<close> |-- Parse.!!! opt_fact_binding)) + -- for_params >> (fn ((a, b), c) => + Toplevel.proofs (Seq.make_results o Seq.single o #2 o schematic_subgoal_cmd a b c)) + +in + +(** Outer syntax commands **) + +val _ = Outer_Syntax.command \<^command_keyword>\<open>focus\<close> + "focus on first subgoal within backward refinement, without instantiating schematic vars" + parser + +val _ = Outer_Syntax.command \<^command_keyword>\<open>\<guillemotright>\<close> "focus bullet" parser +val _ = Outer_Syntax.command \<^command_keyword>\<open>\<^item>\<close> "focus bullet" parser +val _ = Outer_Syntax.command \<^command_keyword>\<open>\<^enum>\<close> "focus bullet" parser +val _ = Outer_Syntax.command \<^command_keyword>\<open>~\<close> "focus bullet" parser + +end diff --git a/spartan/lib/goals.ML b/spartan/lib/goals.ML new file mode 100644 index 0000000..ce23751 --- /dev/null +++ b/spartan/lib/goals.ML @@ -0,0 +1,214 @@ +(* Title: goals.ML + Author: Makarius Wenzel, Joshua Chen + +Goal statements and proof term export. + +Modified from code originally written by Makarius Wenzel. +*) + +local + +val long_keyword = + Parse_Spec.includes >> K "" || + Parse_Spec.long_statement_keyword + +val long_statement = + Scan.optional + (Parse_Spec.opt_thm_name ":" --| Scan.ahead long_keyword) + Binding.empty_atts -- + Scan.optional Parse_Spec.includes [] -- Parse_Spec.long_statement + >> (fn ((binding, includes), (elems, concl)) => + (true, binding, includes, elems, concl)) + +val short_statement = + Parse_Spec.statement -- Parse_Spec.if_statement -- Parse.for_fixes + >> (fn ((shows, assumes), fixes) => + (false, Binding.empty_atts, [], + [Element.Fixes fixes, Element.Assumes assumes], + Element.Shows shows)) + +fun prep_statement prep_att prep_stmt raw_elems raw_stmt ctxt = + let + val (stmt, elems_ctxt) = prep_stmt raw_elems raw_stmt ctxt + val prems = Assumption.local_prems_of elems_ctxt ctxt + val stmt_ctxt = fold (fold (Variable.auto_fixes o fst) o snd) + stmt elems_ctxt + in + case raw_stmt of + Element.Shows _ => + let val stmt' = Attrib.map_specs (map prep_att) stmt + in (([], prems, stmt', NONE), stmt_ctxt) end + | Element.Obtains raw_obtains => + let + val asms_ctxt = stmt_ctxt + |> fold (fn ((name, _), asm) => + snd o Proof_Context.add_assms Assumption.assume_export + [((name, [Context_Rules.intro_query NONE]), asm)]) stmt + val that = Assumption.local_prems_of asms_ctxt stmt_ctxt + val ([(_, that')], that_ctxt) = asms_ctxt + |> Proof_Context.set_stmt true + |> Proof_Context.note_thmss "" + [((Binding.name Auto_Bind.thatN, []), [(that, [])])] + ||> Proof_Context.restore_stmt asms_ctxt + + val stmt' = [ + (Binding.empty_atts, + [(#2 (#1 (Obtain.obtain_thesis ctxt)), [])]) + ] + in + ((Obtain.obtains_attribs raw_obtains, prems, stmt', SOME that'), + that_ctxt) + end + end + +fun define_proof_term name (local_name, [th]) lthy = + let + fun make_name_binding suffix local_name = + let val base_local_name = Long_Name.base_name local_name + in + Binding.qualified_name + ((case base_local_name of + "" => name + | _ => base_local_name) + ^(case suffix of + SOME "prf" => "_prf" + | SOME "def" => "_def" + | _ => "")) + end + + val (prems, concl) = + (Logic.strip_assums_hyp (Thm.prop_of th), + Logic.strip_assums_concl (Thm.prop_of th)) + in + if not (Lib.is_typing concl) then + ([], lthy) + else let + val prems_vars = distinct Term.aconv (flat + (map (Lib.collect_subterms is_Var) prems)) + + val concl_vars = Lib.collect_subterms is_Var + (Lib.term_of_typing concl) + + val params = inter Term.aconv concl_vars prems_vars + + val prf_tm = + fold_rev lambda params (Lib.term_of_typing concl) + + val ((_, (_, raw_def)), lthy') = Local_Theory.define + ((make_name_binding NONE local_name, Mixfix.NoSyn), + ((make_name_binding (SOME "prf") local_name, []), prf_tm)) lthy + + val def = + fold + (fn th1 => fn th2 => Thm.combination th2 th1) + (map (Thm.reflexive o Thm.cterm_of lthy) params) + raw_def + + val ((_, def'), lthy'') = Local_Theory.note + ((make_name_binding (SOME "def") local_name, []), [def]) + lthy' + in + (def', lthy'') + end + end + | define_proof_term _ _ _ = error + ("Unimplemented: handling proof terms of multiple facts in" + ^" single result") + +fun gen_schematic_theorem + bundle_includes prep_att prep_stmt + gen_prf long kind before_qed after_qed (name, raw_atts) + raw_includes raw_elems raw_concl int lthy = + let + val _ = Local_Theory.assert lthy; + + val elems = raw_elems |> map (Element.map_ctxt_attrib (prep_att lthy)) + val ((more_atts, prems, stmt, facts), goal_ctxt) = lthy + |> bundle_includes raw_includes + |> prep_statement (prep_att lthy) prep_stmt elems raw_concl + val atts = more_atts @ map (prep_att lthy) raw_atts + val pos = Position.thread_data () + + val prems_name = if long then Auto_Bind.assmsN else Auto_Bind.thatN + + fun after_qed' results goal_ctxt' = + let + val results' = burrow + (map (Goal.norm_result lthy) o Proof_Context.export goal_ctxt' lthy) + results + + val ((res, lthy'), substmts) = + if forall (Binding.is_empty_atts o fst) stmt + then ((map (pair "") results', lthy), false) + else + (Local_Theory.notes_kind kind + (map2 (fn (b, _) => fn ths => (b, [(ths, [])])) stmt results') + lthy, + true) + + val (res', lthy'') = + if gen_prf + then + let + val (prf_tm_defs, lthy'') = + fold + (fn result => fn (defs, lthy) => + apfst (fn new_defs => defs @ new_defs) + (define_proof_term (Binding.name_of name) result lthy)) + res ([], lthy') + + val res_folded = + map (apsnd (map (Local_Defs.fold lthy'' prf_tm_defs))) res + in + Local_Theory.notes_kind kind + [((name, @{attributes [typechk]} @ atts), + [(maps #2 res_folded, [])])] + lthy'' + end + else + Local_Theory.notes_kind kind + [((name, atts), [(maps #2 res, [])])] + lthy' + + val _ = Proof_Display.print_results int pos lthy'' + ((kind, Binding.name_of name), map (fn (_, ths) => ("", ths)) res') + + val _ = + if substmts then map + (fn (name, ths) => Proof_Display.print_results int pos lthy'' + (("and", name), [("", ths)])) + res + else [] + in + after_qed results' lthy'' + end + in + goal_ctxt + |> not (null prems) ? + (Proof_Context.note_thmss "" [((Binding.name prems_name, []), [(prems, [])])] #> snd) + |> Proof.theorem before_qed after_qed' (map snd stmt) + |> (case facts of NONE => I | SOME ths => Proof.refine_insert ths) + end + +val schematic_theorem_cmd = + gen_schematic_theorem + Bundle.includes_cmd + Attrib.check_src + Expression.read_statement + +fun theorem spec descr = + Outer_Syntax.local_theory_to_proof' spec ("state " ^ descr) + (Scan.option (Args.parens (Args.$$$ "derive")) + -- (long_statement || short_statement) >> + (fn (opt_derive, (long, binding, includes, elems, concl)) => + schematic_theorem_cmd + (case opt_derive of SOME "derive" => true | _ => false) + long descr NONE (K I) binding includes elems concl)) +in + +val _ = theorem \<^command_keyword>\<open>Theorem\<close> "Theorem" +val _ = theorem \<^command_keyword>\<open>Lemma\<close> "Lemma" +val _ = theorem \<^command_keyword>\<open>Corollary\<close> "Corollary" +val _ = theorem \<^command_keyword>\<open>Proposition\<close> "Proposition" + +end diff --git a/spartan/lib/implicits.ML b/spartan/lib/implicits.ML new file mode 100644 index 0000000..04fc825 --- /dev/null +++ b/spartan/lib/implicits.ML @@ -0,0 +1,78 @@ +structure Implicits : +sig + +val implicit_defs: Proof.context -> (term * term) Symtab.table +val implicit_defs_attr: attribute +val make_holes: Proof.context -> term -> term + +end = struct + +structure Defs = Generic_Data ( + type T = (term * term) Symtab.table + val empty = Symtab.empty + val extend = I + val merge = Symtab.merge (Term.aconv o #1) +) + +val implicit_defs = Defs.get o Context.Proof + +val implicit_defs_attr = Thm.declaration_attribute (fn th => + let + val (t, def) = Lib.dest_eq (Thm.prop_of th) + val (head, args) = Term.strip_comb t + val def' = fold_rev lambda args def + in + Defs.map (Symtab.update (Term.term_name head, (head, def'))) + end) + +fun make_holes ctxt = + let + fun iarg_to_hole (Const (\<^const_name>\<open>iarg\<close>, T)) = + Const (\<^const_name>\<open>hole\<close>, T) + | iarg_to_hole t = t + + fun expand head args = + let + fun betapplys (head', args') = + Term.betapplys (map_aterms iarg_to_hole head', args') + in + case head of + Abs (x, T, t) => + list_comb (Abs (x, T, Lib.traverse_term expand t), args) + | _ => + case Symtab.lookup (implicit_defs ctxt) (Term.term_name head) of + SOME (t, def) => betapplys + (Envir.expand_atom + (Term.fastype_of head) + (Term.fastype_of t, def), + args) + | NONE => list_comb (head, args) + end + + fun holes_to_vars t = + let + val count = Lib.subterm_count (Const (\<^const_name>\<open>hole\<close>, dummyT)) + + fun subst (Const (\<^const_name>\<open>hole\<close>, T)) (Var (idx, _)::_) Ts = + let + val bounds = map Bound (0 upto (length Ts - 1)) + val T' = foldr1 (op -->) (Ts @ [T]) + in + foldl1 (op $) (Var (idx, T')::bounds) + end + | subst (Abs (x, T, t)) vs Ts = Abs (x, T, subst t vs (T::Ts)) + | subst (t $ u) vs Ts = + let val n = count t + in subst t (take n vs) Ts $ subst u (drop n vs) Ts end + | subst t _ _ = t + + val vars = map (fn n => Var ((n, 0), dummyT)) + (Name.invent (Variable.names_of ctxt) "*" (count t)) + in + subst t vars [] + end + in + Lib.traverse_term expand #> holes_to_vars + end + +end diff --git a/spartan/lib/lib.ML b/spartan/lib/lib.ML new file mode 100644 index 0000000..fd5c597 --- /dev/null +++ b/spartan/lib/lib.ML @@ -0,0 +1,143 @@ +structure Lib : +sig + +(*Lists*) +val max: ('a * 'a -> bool) -> 'a list -> 'a +val maxint: int list -> int + +(*Terms*) +val is_rigid: term -> bool +val dest_eq: term -> term * term +val mk_Var: string -> int -> typ -> term +val lambda_var: term -> term -> term + +val is_typing: term -> bool +val dest_typing: term -> term * term +val term_of_typing: term -> term +val type_of_typing: term -> term +val mk_Pi: term -> term -> term -> term + +val typing_of_term: term -> term + +(*Goals*) +val rigid_typing_concl: term -> bool + +(*Subterms*) +val has_subterm: term list -> term -> bool +val subterm_count: term -> term -> int +val subterm_count_distinct: term list -> term -> int +val traverse_term: (term -> term list -> term) -> term -> term +val collect_subterms: (term -> bool) -> term -> term list + +(*Orderings*) +val subterm_order: term -> term -> order +val cond_order: order -> order -> order + +end = struct + + +(** Lists **) + +fun max gt (x::xs) = fold (fn a => fn b => if gt (a, b) then a else b) xs x + | max _ [] = error "max of empty list" + +val maxint = max (op >) + + +(** Terms **) + +(* Meta *) + +val is_rigid = not o is_Var o head_of + +fun dest_eq (Const (\<^const_name>\<open>Pure.eq\<close>, _) $ t $ def) = (t, def) + | dest_eq _ = error "dest_eq" + +fun mk_Var name idx T = Var ((name, idx), T) + +fun lambda_var x tm = + let + fun var_args (Var (idx, T)) = Var (idx, \<^typ>\<open>o\<close> --> T) $ x + | var_args t = t + in + tm |> map_aterms var_args + |> lambda x + end + +(* Object *) + +fun is_typing (Const (\<^const_name>\<open>has_type\<close>, _) $ _ $ _) = true + | is_typing _ = false + +fun dest_typing (Const (\<^const_name>\<open>has_type\<close>, _) $ t $ T) = (t, T) + | dest_typing _ = error "dest_typing" + +val term_of_typing = #1 o dest_typing +val type_of_typing = #2 o dest_typing + +fun mk_Pi v typ body = Const (\<^const_name>\<open>Pi\<close>, dummyT) $ typ $ lambda v body + +fun typing_of_term tm = \<^const>\<open>has_type\<close> $ tm $ Var (("*?", 0), \<^typ>\<open>o\<close>) + + +(** Goals **) + +fun rigid_typing_concl goal = + let val concl = Logic.strip_assums_concl goal + in is_typing concl andalso is_rigid (term_of_typing concl) end + + +(** Subterms **) + +fun has_subterm tms = + Term.exists_subterm + (foldl1 (op orf) (map (fn t => fn s => Term.aconv_untyped (s, t)) tms)) + +fun subterm_count s t = + let + fun count (t1 $ t2) i = i + count t1 0 + count t2 0 + | count (Abs (_, _, t)) i = i + count t 0 + | count t i = if Term.aconv_untyped (s, t) then i + 1 else i + in + count t 0 + end + +(*Number of distinct subterms in `tms` that appear in `tm`*) +fun subterm_count_distinct tms tm = + length (filter I (map (fn t => has_subterm [t] tm) tms)) + +(* + "Folds" a function f over the term structure of t by traversing t from child + nodes upwards through parents. At each node n in the term syntax tree, f is + additionally passed a list of the results of f at all children of n. +*) +fun traverse_term f t = + let + fun map_aux (Abs (x, T, t)) = Abs (x, T, map_aux t) + | map_aux t = + let + val (head, args) = Term.strip_comb t + val args' = map map_aux args + in + f head args' + end + in + map_aux t + end + +fun collect_subterms f (t $ u) = collect_subterms f t @ collect_subterms f u + | collect_subterms f (Abs (_, _, t)) = collect_subterms f t + | collect_subterms f t = if f t then [t] else [] + + +(** Orderings **) + +fun subterm_order t1 t2 = + if has_subterm [t1] t2 then LESS + else if has_subterm [t2] t1 then GREATER + else EQUAL + +fun cond_order o1 o2 = case o1 of EQUAL => o2 | _ => o1 + + +end diff --git a/spartan/lib/rewrite.ML b/spartan/lib/rewrite.ML new file mode 100644 index 0000000..f9c5d8e --- /dev/null +++ b/spartan/lib/rewrite.ML @@ -0,0 +1,465 @@ +(* Title: rewrite.ML + Author: Christoph Traut, Lars Noschinski, TU Muenchen + Modified: Joshua Chen, University of Innsbruck + +This is a rewrite method that supports subterm-selection based on patterns. + +The patterns accepted by rewrite are of the following form: + <atom> ::= <term> | "concl" | "asm" | "for" "(" <names> ")" + <pattern> ::= (in <atom> | at <atom>) [<pattern>] + <args> ::= [<pattern>] ("to" <term>) <thms> + +This syntax was clearly inspired by Gonthier's and Tassi's language of +patterns but has diverged significantly during its development. + +We also allow introduction of identifiers for bound variables, +which can then be used to match arbitrary subterms inside abstractions. + +This code is slightly modified from the original at HOL/Library/rewrite.ML, +to incorporate auto-typechecking for type theory. +*) + +infix 1 then_pconv; +infix 0 else_pconv; + +signature REWRITE = +sig + type patconv = Proof.context -> Type.tyenv * (string * term) list -> cconv + val then_pconv: patconv * patconv -> patconv + val else_pconv: patconv * patconv -> patconv + val abs_pconv: patconv -> string option * typ -> patconv (*XXX*) + val fun_pconv: patconv -> patconv + val arg_pconv: patconv -> patconv + val imp_pconv: patconv -> patconv + val params_pconv: patconv -> patconv + val forall_pconv: patconv -> string option * typ option -> patconv + val all_pconv: patconv + val for_pconv: patconv -> (string option * typ option) list -> patconv + val concl_pconv: patconv -> patconv + val asm_pconv: patconv -> patconv + val asms_pconv: patconv -> patconv + val judgment_pconv: patconv -> patconv + val in_pconv: patconv -> patconv + val match_pconv: patconv -> term * (string option * typ) list -> patconv + val rewrs_pconv: term option -> thm list -> patconv + + datatype ('a, 'b) pattern = At | In | Term of 'a | Concl | Asm | For of 'b list + + val mk_hole: int -> typ -> term + + val rewrite_conv: Proof.context + -> (term * (string * typ) list, string * typ option) pattern list * term option + -> thm list + -> conv +end + +structure Rewrite : REWRITE = +struct + +datatype ('a, 'b) pattern = At | In | Term of 'a | Concl | Asm | For of 'b list + +exception NO_TO_MATCH + +val holeN = Name.internal "_hole" + +fun prep_meta_eq ctxt = Simplifier.mksimps ctxt #> map Drule.zero_var_indexes + + +(* holes *) + +fun mk_hole i T = Var ((holeN, i), T) + +fun is_hole (Var ((name, _), _)) = (name = holeN) + | is_hole _ = false + +fun is_hole_const (Const (\<^const_name>\<open>rewrite_HOLE\<close>, _)) = true + | is_hole_const _ = false + +val hole_syntax = + let + (* Modified variant of Term.replace_hole *) + fun replace_hole Ts (Const (\<^const_name>\<open>rewrite_HOLE\<close>, T)) i = + (list_comb (mk_hole i (Ts ---> T), map_range Bound (length Ts)), i + 1) + | replace_hole Ts (Abs (x, T, t)) i = + let val (t', i') = replace_hole (T :: Ts) t i + in (Abs (x, T, t'), i') end + | replace_hole Ts (t $ u) i = + let + val (t', i') = replace_hole Ts t i + val (u', i'') = replace_hole Ts u i' + in (t' $ u', i'') end + | replace_hole _ a i = (a, i) + fun prep_holes ts = #1 (fold_map (replace_hole []) ts 1) + in + Context.proof_map (Syntax_Phases.term_check 101 "hole_expansion" (K prep_holes)) + #> Proof_Context.set_mode Proof_Context.mode_pattern + end + + +(* pattern conversions *) + +type patconv = Proof.context -> Type.tyenv * (string * term) list -> cterm -> thm + +fun (cv1 then_pconv cv2) ctxt tytenv ct = (cv1 ctxt tytenv then_conv cv2 ctxt tytenv) ct + +fun (cv1 else_pconv cv2) ctxt tytenv ct = (cv1 ctxt tytenv else_conv cv2 ctxt tytenv) ct + +fun raw_abs_pconv cv ctxt tytenv ct = + case Thm.term_of ct of + Abs _ => CConv.abs_cconv (fn (x, ctxt') => cv x ctxt' tytenv) ctxt ct + | t => raise TERM ("raw_abs_pconv", [t]) + +fun raw_fun_pconv cv ctxt tytenv ct = + case Thm.term_of ct of + _ $ _ => CConv.fun_cconv (cv ctxt tytenv) ct + | t => raise TERM ("raw_fun_pconv", [t]) + +fun raw_arg_pconv cv ctxt tytenv ct = + case Thm.term_of ct of + _ $ _ => CConv.arg_cconv (cv ctxt tytenv) ct + | t => raise TERM ("raw_arg_pconv", [t]) + +fun abs_pconv cv (s,T) ctxt (tyenv, ts) ct = + let val u = Thm.term_of ct + in + case try (fastype_of #> dest_funT) u of + NONE => raise TERM ("abs_pconv: no function type", [u]) + | SOME (U, _) => + let + val tyenv' = + if T = dummyT then tyenv + else Sign.typ_match (Proof_Context.theory_of ctxt) (T, U) tyenv + val eta_expand_cconv = + case u of + Abs _=> Thm.reflexive + | _ => CConv.rewr_cconv @{thm eta_expand} + fun add_ident NONE _ l = l + | add_ident (SOME name) ct l = (name, Thm.term_of ct) :: l + val abs_cv = CConv.abs_cconv (fn (ct, ctxt) => cv ctxt (tyenv', add_ident s ct ts)) ctxt + in (eta_expand_cconv then_conv abs_cv) ct end + handle Pattern.MATCH => raise TYPE ("abs_pconv: types don't match", [T,U], [u]) + end + +fun fun_pconv cv ctxt tytenv ct = + case Thm.term_of ct of + _ $ _ => CConv.fun_cconv (cv ctxt tytenv) ct + | Abs (_, T, _ $ Bound 0) => abs_pconv (fun_pconv cv) (NONE, T) ctxt tytenv ct + | t => raise TERM ("fun_pconv", [t]) + +local + +fun arg_pconv_gen cv0 cv ctxt tytenv ct = + case Thm.term_of ct of + _ $ _ => cv0 (cv ctxt tytenv) ct + | Abs (_, T, _ $ Bound 0) => abs_pconv (arg_pconv_gen cv0 cv) (NONE, T) ctxt tytenv ct + | t => raise TERM ("arg_pconv_gen", [t]) + +in + +fun arg_pconv ctxt = arg_pconv_gen CConv.arg_cconv ctxt +fun imp_pconv ctxt = arg_pconv_gen (CConv.concl_cconv 1) ctxt + +end + +(* Move to B in !!x_1 ... x_n. B. Do not eta-expand *) +fun params_pconv cv ctxt tytenv ct = + let val pconv = + case Thm.term_of ct of + Const (\<^const_name>\<open>Pure.all\<close>, _) $ Abs _ => (raw_arg_pconv o raw_abs_pconv) (fn _ => params_pconv cv) + | Const (\<^const_name>\<open>Pure.all\<close>, _) => raw_arg_pconv (params_pconv cv) + | _ => cv + in pconv ctxt tytenv ct end + +fun forall_pconv cv ident ctxt tytenv ct = + case Thm.term_of ct of + Const (\<^const_name>\<open>Pure.all\<close>, T) $ _ => + let + val def_U = T |> dest_funT |> fst |> dest_funT |> fst + val ident' = apsnd (the_default (def_U)) ident + in arg_pconv (abs_pconv cv ident') ctxt tytenv ct end + | t => raise TERM ("forall_pconv", [t]) + +fun all_pconv _ _ = Thm.reflexive + +fun for_pconv cv idents ctxt tytenv ct = + let + fun f rev_idents (Const (\<^const_name>\<open>Pure.all\<close>, _) $ t) = + let val (rev_idents', cv') = f rev_idents (case t of Abs (_,_,u) => u | _ => t) + in + case rev_idents' of + [] => ([], forall_pconv cv' (NONE, NONE)) + | (x :: xs) => (xs, forall_pconv cv' x) + end + | f rev_idents _ = (rev_idents, cv) + in + case f (rev idents) (Thm.term_of ct) of + ([], cv') => cv' ctxt tytenv ct + | _ => raise CTERM ("for_pconv", [ct]) + end + +fun concl_pconv cv ctxt tytenv ct = + case Thm.term_of ct of + (Const (\<^const_name>\<open>Pure.imp\<close>, _) $ _) $ _ => imp_pconv (concl_pconv cv) ctxt tytenv ct + | _ => cv ctxt tytenv ct + +fun asm_pconv cv ctxt tytenv ct = + case Thm.term_of ct of + (Const (\<^const_name>\<open>Pure.imp\<close>, _) $ _) $ _ => CConv.with_prems_cconv ~1 (cv ctxt tytenv) ct + | t => raise TERM ("asm_pconv", [t]) + +fun asms_pconv cv ctxt tytenv ct = + case Thm.term_of ct of + (Const (\<^const_name>\<open>Pure.imp\<close>, _) $ _) $ _ => + ((CConv.with_prems_cconv ~1 oo cv) else_pconv imp_pconv (asms_pconv cv)) ctxt tytenv ct + | t => raise TERM ("asms_pconv", [t]) + +fun judgment_pconv cv ctxt tytenv ct = + if Object_Logic.is_judgment ctxt (Thm.term_of ct) + then arg_pconv cv ctxt tytenv ct + else cv ctxt tytenv ct + +fun in_pconv cv ctxt tytenv ct = + (cv else_pconv + raw_fun_pconv (in_pconv cv) else_pconv + raw_arg_pconv (in_pconv cv) else_pconv + raw_abs_pconv (fn _ => in_pconv cv)) + ctxt tytenv ct + +fun replace_idents idents t = + let + fun subst ((n1, s)::ss) (t as Free (n2, _)) = if n1 = n2 then s else subst ss t + | subst _ t = t + in Term.map_aterms (subst idents) t end + +fun match_pconv cv (t,fixes) ctxt (tyenv, env_ts) ct = + let + val t' = replace_idents env_ts t + val thy = Proof_Context.theory_of ctxt + val u = Thm.term_of ct + + fun descend_hole fixes (Abs (_, _, t)) = + (case descend_hole fixes t of + NONE => NONE + | SOME (fix :: fixes', pos) => SOME (fixes', abs_pconv pos fix) + | SOME ([], _) => raise Match (* less fixes than abstractions on path to hole *)) + | descend_hole fixes (t as l $ r) = + let val (f, _) = strip_comb t + in + if is_hole f + then SOME (fixes, cv) + else + (case descend_hole fixes l of + SOME (fixes', pos) => SOME (fixes', fun_pconv pos) + | NONE => + (case descend_hole fixes r of + SOME (fixes', pos) => SOME (fixes', arg_pconv pos) + | NONE => NONE)) + end + | descend_hole fixes t = + if is_hole t then SOME (fixes, cv) else NONE + + val to_hole = descend_hole (rev fixes) #> the_default ([], cv) #> snd + in + case try (Pattern.match thy (apply2 Logic.mk_term (t',u))) (tyenv, Vartab.empty) of + NONE => raise TERM ("match_pconv: Does not match pattern", [t, t',u]) + | SOME (tyenv', _) => to_hole t ctxt (tyenv', env_ts) ct + end + +fun rewrs_pconv to thms ctxt (tyenv, env_ts) = + let + fun instantiate_normalize_env ctxt env thm = + let + val prop = Thm.prop_of thm + val norm_type = Envir.norm_type o Envir.type_env + val insts = Term.add_vars prop [] + |> map (fn x as (s, T) => + ((s, norm_type env T), Thm.cterm_of ctxt (Envir.norm_term env (Var x)))) + val tyinsts = Term.add_tvars prop [] + |> map (fn x => (x, Thm.ctyp_of ctxt (norm_type env (TVar x)))) + in Drule.instantiate_normalize (tyinsts, insts) thm end + + fun unify_with_rhs context to env thm = + let + val (_, rhs) = thm |> Thm.concl_of |> Logic.dest_equals + val env' = Pattern.unify context (Logic.mk_term to, Logic.mk_term rhs) env + handle Pattern.Unif => raise NO_TO_MATCH + in env' end + + fun inst_thm_to _ (NONE, _) thm = thm + | inst_thm_to (ctxt : Proof.context) (SOME to, env) thm = + instantiate_normalize_env ctxt (unify_with_rhs (Context.Proof ctxt) to env thm) thm + + fun inst_thm ctxt idents (to, tyenv) thm = + let + (* Replace any identifiers with their corresponding bound variables. *) + val maxidx = Term.maxidx_typs (map (snd o snd) (Vartab.dest tyenv)) 0 + val env = Envir.Envir {maxidx = maxidx, tenv = Vartab.empty, tyenv = tyenv} + val maxidx = Envir.maxidx_of env |> fold Term.maxidx_term (the_list to) + val thm' = Thm.incr_indexes (maxidx + 1) thm + in SOME (inst_thm_to ctxt (Option.map (replace_idents idents) to, env) thm') end + handle NO_TO_MATCH => NONE + + in CConv.rewrs_cconv (map_filter (inst_thm ctxt env_ts (to, tyenv)) thms) end + +fun rewrite_conv ctxt (pattern, to) thms ct = + let + fun apply_pat At = judgment_pconv + | apply_pat In = in_pconv + | apply_pat Asm = params_pconv o asms_pconv + | apply_pat Concl = params_pconv o concl_pconv + | apply_pat (For idents) = (fn cv => for_pconv cv (map (apfst SOME) idents)) + | apply_pat (Term x) = (fn cv => match_pconv cv (apsnd (map (apfst SOME)) x)) + + val cv = fold_rev apply_pat pattern + + fun distinct_prems th = + case Seq.pull (distinct_subgoals_tac th) of + NONE => th + | SOME (th', _) => th' + + val rewrite = rewrs_pconv to (maps (prep_meta_eq ctxt) thms) + in cv rewrite ctxt (Vartab.empty, []) ct |> distinct_prems end + +fun rewrite_export_tac ctxt (pat, pat_ctxt) thms = + let + val export = case pat_ctxt of + NONE => I + | SOME inner => singleton (Proof_Context.export inner ctxt) + in CCONVERSION (export o rewrite_conv ctxt pat thms) end + +val _ = + Theory.setup + let + fun mk_fix s = (Binding.name s, NONE, NoSyn) + + val raw_pattern : (string, binding * string option * mixfix) pattern list parser = + let + val sep = (Args.$$$ "at" >> K At) || (Args.$$$ "in" >> K In) + val atom = (Args.$$$ "asm" >> K Asm) || + (Args.$$$ "concl" >> K Concl) || + (Args.$$$ "for" |-- Args.parens (Scan.optional Parse.vars []) >> For) || + (Parse.term >> Term) + val sep_atom = sep -- atom >> (fn (s,a) => [s,a]) + + fun append_default [] = [Concl, In] + | append_default (ps as Term _ :: _) = Concl :: In :: ps + | append_default [For x, In] = [For x, Concl, In] + | append_default (For x :: (ps as In :: Term _:: _)) = For x :: Concl :: ps + | append_default ps = ps + + in Scan.repeats sep_atom >> (rev #> append_default) end + + fun context_lift (scan : 'a parser) f = fn (context : Context.generic, toks) => + let + val (r, toks') = scan toks + val (r', context') = Context.map_proof_result (fn ctxt => f ctxt r) context + in (r', (context', toks' : Token.T list)) end + + fun read_fixes fixes ctxt = + let fun read_typ (b, rawT, mx) = (b, Option.map (Syntax.read_typ ctxt) rawT, mx) + in Proof_Context.add_fixes (map read_typ fixes) ctxt end + + fun prep_pats ctxt (ps : (string, binding * string option * mixfix) pattern list) = + let + fun add_constrs ctxt n (Abs (x, T, t)) = + let + val (x', ctxt') = yield_singleton Proof_Context.add_fixes (mk_fix x) ctxt + in + (case add_constrs ctxt' (n+1) t of + NONE => NONE + | SOME ((ctxt'', n', xs), t') => + let + val U = Type_Infer.mk_param n [] + val u = Type.constraint (U --> dummyT) (Abs (x, T, t')) + in SOME ((ctxt'', n', (x', U) :: xs), u) end) + end + | add_constrs ctxt n (l $ r) = + (case add_constrs ctxt n l of + SOME (c, l') => SOME (c, l' $ r) + | NONE => + (case add_constrs ctxt n r of + SOME (c, r') => SOME (c, l $ r') + | NONE => NONE)) + | add_constrs ctxt n t = + if is_hole_const t then SOME ((ctxt, n, []), t) else NONE + + fun prep (Term s) (n, ctxt) = + let + val t = Syntax.parse_term ctxt s + val ((ctxt', n', bs), t') = + the_default ((ctxt, n, []), t) (add_constrs ctxt (n+1) t) + in (Term (t', bs), (n', ctxt')) end + | prep (For ss) (n, ctxt) = + let val (ns, ctxt') = read_fixes ss ctxt + in (For ns, (n, ctxt')) end + | prep At (n,ctxt) = (At, (n, ctxt)) + | prep In (n,ctxt) = (In, (n, ctxt)) + | prep Concl (n,ctxt) = (Concl, (n, ctxt)) + | prep Asm (n,ctxt) = (Asm, (n, ctxt)) + + val (xs, (_, ctxt')) = fold_map prep ps (0, ctxt) + + in (xs, ctxt') end + + fun prep_args ctxt (((raw_pats, raw_to), raw_ths)) = + let + + fun check_terms ctxt ps to = + let + fun safe_chop (0: int) xs = ([], xs) + | safe_chop n (x :: xs) = chop (n - 1) xs |>> cons x + | safe_chop _ _ = raise Match + + fun reinsert_pat _ (Term (_, cs)) (t :: ts) = + let val (cs', ts') = safe_chop (length cs) ts + in (Term (t, map dest_Free cs'), ts') end + | reinsert_pat _ (Term _) [] = raise Match + | reinsert_pat ctxt (For ss) ts = + let val fixes = map (fn s => (s, Variable.default_type ctxt s)) ss + in (For fixes, ts) end + | reinsert_pat _ At ts = (At, ts) + | reinsert_pat _ In ts = (In, ts) + | reinsert_pat _ Concl ts = (Concl, ts) + | reinsert_pat _ Asm ts = (Asm, ts) + + fun free_constr (s,T) = Type.constraint T (Free (s, dummyT)) + fun mk_free_constrs (Term (t, cs)) = t :: map free_constr cs + | mk_free_constrs _ = [] + + val ts = maps mk_free_constrs ps @ the_list to + |> Syntax.check_terms (hole_syntax ctxt) + val ctxt' = fold Variable.declare_term ts ctxt + val (ps', (to', ts')) = fold_map (reinsert_pat ctxt') ps ts + ||> (fn xs => case to of NONE => (NONE, xs) | SOME _ => (SOME (hd xs), tl xs)) + val _ = case ts' of (_ :: _) => raise Match | [] => () + in ((ps', to'), ctxt') end + + val (pats, ctxt') = prep_pats ctxt raw_pats + + val ths = Attrib.eval_thms ctxt' raw_ths + val to = Option.map (Syntax.parse_term ctxt') raw_to + + val ((pats', to'), ctxt'') = check_terms ctxt' pats to + + in ((pats', ths, (to', ctxt)), ctxt'') end + + val to_parser = Scan.option ((Args.$$$ "to") |-- Parse.term) + + val subst_parser = + let val scan = raw_pattern -- to_parser -- Parse.thms1 + in context_lift scan prep_args end + in + Method.setup \<^binding>\<open>rewr\<close> (subst_parser >> + (fn (pattern, inthms, (to, pat_ctxt)) => fn orig_ctxt => + SIMPLE_METHOD' + (rewrite_export_tac orig_ctxt ((pattern, to), SOME pat_ctxt) inthms))) + "single-step rewriting, allowing subterm selection via patterns" + #> + (Method.setup \<^binding>\<open>rewrite\<close> (subst_parser >> + (fn (pattern, inthms, (to, pat_ctxt)) => fn orig_ctxt => + SIMPLE_METHOD' (SIDE_CONDS + (rewrite_export_tac orig_ctxt ((pattern, to), SOME pat_ctxt) inthms) + orig_ctxt))) + "single-step rewriting with auto-typechecking") + end +end diff --git a/spartan/lib/tactics.ML b/spartan/lib/tactics.ML new file mode 100644 index 0000000..0e09533 --- /dev/null +++ b/spartan/lib/tactics.ML @@ -0,0 +1,143 @@ +(* Title: tactics.ML + Author: Joshua Chen + +General tactics for dependent type theory. +*) + +structure Tactics: +sig + +val assumptions_tac: Proof.context -> int -> tactic +val known_tac: Proof.context -> int -> tactic +val typechk_tac: Proof.context -> int -> tactic +val auto_typechk: bool Config.T +val SIDE_CONDS: (int -> tactic) -> Proof.context -> int -> tactic +val rule_tac: thm list -> Proof.context -> int -> tactic +val dest_tac: int option -> thm list -> Proof.context -> int -> tactic +val intro_tac: Proof.context -> int -> tactic +val intros_tac: Proof.context -> int -> tactic +val elims_tac: term option -> Proof.context -> int -> tactic + +end = struct + +(*An assumption tactic that only solves typing goals with rigid terms and + judgmental equalities without schematic variables*) +fun assumptions_tac ctxt = SUBGOAL (fn (goal, i) => + let + val concl = Logic.strip_assums_concl goal + in + if + Lib.is_typing concl andalso Lib.is_rigid (Lib.term_of_typing concl) + orelse not ((exists_subterm is_Var) concl) + then assume_tac ctxt i + else no_tac + end) + +(*Solves typing goals with rigid term by resolving with context facts and + simplifier premises, or arbitrary goals by *non-unifying* assumption*) +fun known_tac ctxt = SUBGOAL (fn (goal, i) => + let + val concl = Logic.strip_assums_concl goal + in + ((if Lib.is_typing concl andalso Lib.is_rigid (Lib.term_of_typing concl) + then + let val ths = map fst (Facts.props (Proof_Context.facts_of ctxt)) + in resolve_tac ctxt (ths @ Simplifier.prems_of ctxt) end + else K no_tac) + ORELSE' assumptions_tac ctxt) i + end) + +(*Typechecking: try to solve goals of the form "a: A" where a is rigid*) +fun typechk_tac ctxt = + let + val tac = SUBGOAL (fn (goal, i) => + if Lib.rigid_typing_concl goal + then + let val net = Tactic.build_net + ((Named_Theorems.get ctxt \<^named_theorems>\<open>typechk\<close>) + @(Named_Theorems.get ctxt \<^named_theorems>\<open>intros\<close>) + @(Elim.rules ctxt)) + in (resolve_from_net_tac ctxt net) i end + else no_tac) + in + CHANGED o REPEAT o REPEAT_ALL_NEW (known_tac ctxt ORELSE' tac) + end + +(*Many methods try to automatically discharge side conditions by typechecking. + Switch this flag off to discharge by non-unifying assumption instead.*) +val auto_typechk = Attrib.setup_config_bool \<^binding>\<open>auto_typechk\<close> (K true) + +(*Combinator runs tactic and tries to discharge all new typing side conditions*) +fun SIDE_CONDS tac ctxt = + let + val side_cond_tac = + if Config.get ctxt auto_typechk then typechk_tac ctxt else known_tac ctxt + in + tac THEN_ALL_NEW (TRY o side_cond_tac) + end + +local +fun mk_rules _ ths [] = ths + | mk_rules n ths ths' = + let val ths'' = foldr1 (op @) + (map (fn th => [rotate_prems n (th RS @{thm PiE})] handle THM _ => []) ths') + in + mk_rules n (ths @ ths') ths'' + end +in + +(*Resolves with given rules, discharging as many side conditions as possible*) +fun rule_tac ths ctxt = + SIDE_CONDS (resolve_tac ctxt (mk_rules 0 [] ths)) ctxt + +(*Attempts destruct-resolution with the n-th premise of the given rules*) +fun dest_tac opt_n ths ctxt = SIDE_CONDS (dresolve_tac ctxt + (mk_rules (case opt_n of NONE => 0 | SOME 0 => 0 | SOME n => n-1) [] ths)) + ctxt + +end + +(*Applies some introduction rule*) +fun intro_tac ctxt = SUBGOAL (fn (_, i) => SIDE_CONDS + (resolve_tac ctxt (Named_Theorems.get ctxt \<^named_theorems>\<open>intros\<close>)) ctxt i) + +fun intros_tac ctxt = SUBGOAL (fn (_, i) => + (CHANGED o REPEAT o CHANGED o intro_tac ctxt) i) + +(* Basic elimination tactic *) +(*Only uses existing type judgments from the context + (performs no type synthesis)*) +fun elims_tac opt_tm ctxt = case opt_tm of + NONE => SUBGOAL (fn (_, i) => eresolve_tac ctxt (Elim.rules ctxt) i) + | SOME tm => SUBGOAL (fn (goal, i) => + let + fun elim_rule typing = + let + val hd = head_of (Lib.type_of_typing typing) + val opt_rl = Elim.get_rule ctxt hd + in + case opt_rl of + NONE => Drule.dummy_thm + | SOME rl => Drule.infer_instantiate' ctxt + [SOME (Thm.cterm_of ctxt tm)] rl + end + + val template = Lib.typing_of_term tm + + val facts = Proof_Context.facts_of ctxt + val candidate_typings = Facts.could_unify facts template + val candidate_rules = + map (elim_rule o Thm.prop_of o #1) candidate_typings + + val prems = Logic.strip_assums_hyp goal + val candidate_typings' = + filter (fn prem => Term.could_unify (template, prem)) prems + val candidate_rules' = map elim_rule candidate_typings' + in + (resolve_tac ctxt candidate_rules + ORELSE' eresolve_tac ctxt candidate_rules') i + end) + +end + +open Tactics diff --git a/spartan/theories/Equivalence.thy b/spartan/theories/Equivalence.thy new file mode 100644 index 0000000..44b77dd --- /dev/null +++ b/spartan/theories/Equivalence.thy @@ -0,0 +1,431 @@ +theory Equivalence +imports Identity + +begin + +section \<open>Homotopy\<close> + +definition "homotopy A B f g \<equiv> \<Prod>x: A. f `x =\<^bsub>B x\<^esub> g `x" + +definition homotopy_i (infix "~" 100) + where [implicit]: "f ~ g \<equiv> homotopy ? ? f g" + +translations "f ~ g" \<leftharpoondown> "CONST homotopy A B f g" + +Lemma homotopy_type [typechk]: + assumes + "A: U i" + "\<And>x. x: A \<Longrightarrow> B x: U i" + "f: \<Prod>x: A. B x" "g: \<Prod>x: A. B x" + shows "f ~ g: U i" + unfolding homotopy_def by typechk + +Lemma (derive) homotopy_refl: + assumes + "A: U i" + "f: \<Prod>x: A. B x" + shows "f ~ f" + unfolding homotopy_def by intros + +Lemma (derive) hsym: + assumes + "f: \<Prod>x: A. B x" + "g: \<Prod>x: A. B x" + "A: U i" + "\<And>x. x: A \<Longrightarrow> B x: U i" + shows "H: f ~ g \<Longrightarrow> g ~ f" + unfolding homotopy_def + apply intros + apply (rule pathinv) + \<guillemotright> by (elim H) + \<guillemotright> by typechk + done + +lemmas homotopy_symmetric = hsym[rotated 4] + +text \<open>\<open>hsym\<close> attribute for homotopies:\<close> + +ML \<open> +structure HSym_Attr = Sym_Attr ( + struct + val name = "hsym" + val symmetry_rule = @{thm homotopy_symmetric} + end +) +\<close> + +setup \<open>HSym_Attr.setup\<close> + +Lemma (derive) htrans: + assumes + "f: \<Prod>x: A. B x" + "g: \<Prod>x: A. B x" + "h: \<Prod>x: A. B x" + "A: U i" + "\<And>x. x: A \<Longrightarrow> B x: U i" + shows "\<lbrakk>H1: f ~ g; H2: g ~ h\<rbrakk> \<Longrightarrow> f ~ h" + unfolding homotopy_def + apply intro + \<guillemotright> vars x + apply (rule pathcomp[where ?y="g x"]) + \<^item> by (elim H1) + \<^item> by (elim H2) + done + \<guillemotright> by typechk + done + +lemmas homotopy_transitive = htrans[rotated 5] + +Lemma (derive) commute_homotopy: + assumes + "A: U i" "B: U i" + "x: A" "y: A" + "p: x =\<^bsub>A\<^esub> y" + "f: A \<rightarrow> B" "g: A \<rightarrow> B" + "H: homotopy A (\<lambda>_. B) f g" + shows "(H x) \<bullet> g[p] = f[p] \<bullet> (H y)" + \<comment> \<open>Need this assumption unfolded for typechecking:\<close> + supply assms(8)[unfolded homotopy_def] + apply (equality \<open>p:_\<close>) + focus vars x + apply reduce + \<comment> \<open>Here it would really be nice to have automation for transport and + propositional equality.\<close> + apply (rule use_transport[where ?y="H x \<bullet> refl (g x)"]) + \<guillemotright> by (rule pathcomp_right_refl) + \<guillemotright> by (rule pathinv) (rule pathcomp_left_refl) + \<guillemotright> by typechk + done + done + +Corollary (derive) commute_homotopy': + assumes + "A: U i" + "x: A" + "f: A \<rightarrow> A" + "H: homotopy A (\<lambda>_. A) f (id A)" + shows "H (f x) = f [H x]" +oops + +Lemma homotopy_id_left [typechk]: + assumes "A: U i" "B: U i" "f: A \<rightarrow> B" + shows "homotopy_refl A f: (id B) \<circ> f ~ f" + unfolding homotopy_refl_def homotopy_def by reduce + +Lemma homotopy_id_right [typechk]: + assumes "A: U i" "B: U i" "f: A \<rightarrow> B" + shows "homotopy_refl A f: f \<circ> (id A) ~ f" + unfolding homotopy_refl_def homotopy_def by reduce + +Lemma homotopy_funcomp_left: + assumes + "H: homotopy B C g g'" + "f: A \<rightarrow> B" + "g: \<Prod>x: B. C x" + "g': \<Prod>x: B. C x" + "A: U i" "B: U i" + "\<And>x. x: B \<Longrightarrow> C x: U i" + shows "homotopy A {} (g \<circ>\<^bsub>A\<^esub> f) (g' \<circ>\<^bsub>A\<^esub> f)" + unfolding homotopy_def + apply (intro; reduce) + apply (insert \<open>H: _\<close>[unfolded homotopy_def]) + apply (elim H) + done + +Lemma homotopy_funcomp_right: + assumes + "H: homotopy A (\<lambda>_. B) f f'" + "f: A \<rightarrow> B" + "f': A \<rightarrow> B" + "g: B \<rightarrow> C" + "A: U i" "B: U i" "C: U i" + shows "homotopy A {} (g \<circ>\<^bsub>A\<^esub> f) (g \<circ>\<^bsub>A\<^esub> f')" + unfolding homotopy_def + apply (intro; reduce) + apply (insert \<open>H: _\<close>[unfolded homotopy_def]) + apply (dest PiE, assumption) + apply (rule ap, assumption) + done + + +section \<open>Quasi-inverse and bi-invertibility\<close> + +subsection \<open>Quasi-inverses\<close> + +definition "qinv A B f \<equiv> \<Sum>g: B \<rightarrow> A. + homotopy A (\<lambda>_. A) (g \<circ>\<^bsub>A\<^esub> f) (id A) \<times> homotopy B (\<lambda>_. B) (f \<circ>\<^bsub>B\<^esub> g) (id B)" + +lemma qinv_type [typechk]: + assumes "A: U i" "B: U i" "f: A \<rightarrow> B" + shows "qinv A B f: U i" + unfolding qinv_def by typechk + +definition qinv_i ("qinv") + where [implicit]: "qinv f \<equiv> Equivalence.qinv ? ? f" + +translations "qinv f" \<leftharpoondown> "CONST Equivalence.qinv A B f" + +Lemma (derive) id_qinv: + assumes "A: U i" + shows "qinv (id A)" + unfolding qinv_def + apply intro defer + apply intro defer + apply (rule homotopy_id_right) + apply (rule homotopy_id_left) + done + +Lemma (derive) quasiinv_qinv: + assumes "A: U i" "B: U i" "f: A \<rightarrow> B" + shows "prf: qinv f \<Longrightarrow> qinv (fst prf)" + unfolding qinv_def + apply intro + \<guillemotright> by (rule \<open>f:_\<close>) + \<guillemotright> apply (elim "prf") + focus vars g HH + apply intro + \<^item> by reduce (snd HH) + \<^item> by reduce (fst HH) + done + done + done + +Lemma (derive) funcomp_qinv: + assumes + "A: U i" "B: U i" "C: U i" + "f: A \<rightarrow> B" "g: B \<rightarrow> C" + shows "qinv f \<rightarrow> qinv g \<rightarrow> qinv (g \<circ> f)" + apply (intros, unfold qinv_def, elims) + focus + premises hyps + vars _ _ finv _ ginv _ HfA HfB HgB HgC + + apply intro + apply (rule funcompI[where ?f=ginv and ?g=finv]) + + text \<open>Now a whole bunch of rewrites and we're done.\<close> +oops + +subsection \<open>Bi-invertible maps\<close> + +definition "biinv A B f \<equiv> + (\<Sum>g: B \<rightarrow> A. homotopy A (\<lambda>_. A) (g \<circ>\<^bsub>A\<^esub> f) (id A)) + \<times> (\<Sum>g: B \<rightarrow> A. homotopy B (\<lambda>_. B) (f \<circ>\<^bsub>B\<^esub> g) (id B))" + +lemma biinv_type [typechk]: + assumes "A: U i" "B: U i" "f: A \<rightarrow> B" + shows "biinv A B f: U i" + unfolding biinv_def by typechk + +definition biinv_i ("biinv") + where [implicit]: "biinv f \<equiv> Equivalence.biinv ? ? f" + +translations "biinv f" \<leftharpoondown> "CONST Equivalence.biinv A B f" + +Lemma (derive) qinv_imp_biinv: + assumes + "A: U i" "B: U i" + "f: A \<rightarrow> B" + shows "?prf: qinv f \<rightarrow> biinv f" + apply intros + unfolding qinv_def biinv_def + by (rule Sig_dist_exp) + +text \<open> + Show that bi-invertible maps are quasi-inverses, as a demonstration of how to + work in this system. +\<close> + +Lemma (derive) biinv_imp_qinv: + assumes "A: U i" "B: U i" "f: A \<rightarrow> B" + shows "biinv f \<rightarrow> qinv f" + + text \<open>Split the hypothesis \<^term>\<open>biinv f\<close> into its components:\<close> + apply intro + unfolding biinv_def + apply elims + + text \<open>Name the components:\<close> + focus premises vars _ _ _ g H1 h H2 + thm \<open>g:_\<close> \<open>h:_\<close> \<open>H1:_\<close> \<open>H2:_\<close> + + text \<open> + \<^term>\<open>g\<close> is a quasi-inverse to \<^term>\<open>f\<close>, so the proof will be a triple + \<^term>\<open><g, <?H1, ?H2>>\<close>. + \<close> + unfolding qinv_def + apply intro + \<guillemotright> by (rule \<open>g: _\<close>) + \<guillemotright> apply intro + text \<open>The first part \<^prop>\<open>?H1: g \<circ> f ~ id A\<close> is given by \<^term>\<open>H1\<close>.\<close> + apply (rule \<open>H1: _\<close>) + + text \<open> + It remains to prove \<^prop>\<open>?H2: f \<circ> g ~ id B\<close>. First show that \<open>g ~ h\<close>, + then the result follows from @{thm \<open>H2: f \<circ> h ~ id B\<close>}. Here a proof + block is used to calculate "forward". + \<close> + proof - + have "g \<circ> (id B) ~ g \<circ> f \<circ> h" + by (rule homotopy_funcomp_right) (rule \<open>H2:_\<close>[hsym]) + + moreover have "g \<circ> f \<circ> h ~ (id A) \<circ> h" + by (subst funcomp_assoc[symmetric]) + (rule homotopy_funcomp_left, rule \<open>H1:_\<close>) + + ultimately have "g ~ h" + apply (rewrite to "g \<circ> (id B)" id_right[symmetric]) + apply (rewrite to "(id A) \<circ> h" id_left[symmetric]) + by (rule homotopy_transitive) (assumption, typechk) + + then have "f \<circ> g ~ f \<circ> h" + by (rule homotopy_funcomp_right) + + with \<open>H2:_\<close> + show "f \<circ> g ~ id B" + by (rule homotopy_transitive) (assumption+, typechk) + qed + done + done + +Lemma (derive) id_biinv: + "A: U i \<Longrightarrow> biinv (id A)" + by (rule qinv_imp_biinv) (rule id_qinv) + +Lemma (derive) funcomp_biinv: + assumes + "A: U i" "B: U i" "C: U i" + "f: A \<rightarrow> B" "g: B \<rightarrow> C" + shows "biinv f \<rightarrow> biinv g \<rightarrow> biinv (g \<circ> f)" + apply intros + focus vars biinv\<^sub>f biinv\<^sub>g + + text \<open>Follows from \<open>funcomp_qinv\<close>.\<close> +oops + + +section \<open>Equivalence\<close> + +text \<open> + Following the HoTT book, we first define equivalence in terms of + bi-invertibility. +\<close> + +definition equivalence (infix "\<simeq>" 110) + where "A \<simeq> B \<equiv> \<Sum>f: A \<rightarrow> B. Equivalence.biinv A B f" + +lemma equivalence_type [typechk]: + assumes "A: U i" "B: U i" + shows "A \<simeq> B: U i" + unfolding equivalence_def by typechk + +Lemma (derive) equivalence_refl: + assumes "A: U i" + shows "A \<simeq> A" + unfolding equivalence_def + apply intro defer + apply (rule qinv_imp_biinv) defer + apply (rule id_qinv) + done + +text \<open> + The following could perhaps be easier with transport (but then I think we need + univalence?)... +\<close> + +Lemma (derive) equivalence_symmetric: + assumes "A: U i" "B: U i" + shows "A \<simeq> B \<rightarrow> B \<simeq> A" + apply intros + unfolding equivalence_def + apply elim + \<guillemotright> vars _ f "prf" + apply (dest (4) biinv_imp_qinv) + apply intro + \<^item> unfolding qinv_def by (rule fst[of "(biinv_imp_qinv A B f) prf"]) + \<^item> by (rule qinv_imp_biinv) (rule quasiinv_qinv) + done + done + +Lemma (derive) equivalence_transitive: + assumes "A: U i" "B: U i" "C: U i" + shows "A \<simeq> B \<rightarrow> B \<simeq> C \<rightarrow> A \<simeq> C" + apply intros + unfolding equivalence_def + + text \<open>Use \<open>funcomp_biinv\<close>.\<close> +oops + +text \<open> + Equal types are equivalent. We give two proofs: the first by induction, and + the second by following the HoTT book and showing that transport is an + equivalence. +\<close> + +Lemma + assumes + "A: U i" "B: U i" "p: A =\<^bsub>U i\<^esub> B" + shows "A \<simeq> B" + by (equality \<open>p:_\<close>) (rule equivalence_refl) + +text \<open> + The following proof is wordy because (1) the typechecker doesn't rewrite, and + (2) we don't yet have universe automation. +\<close> + +Lemma (derive) id_imp_equiv: + assumes + "A: U i" "B: U i" "p: A =\<^bsub>U i\<^esub> B" + shows "<trans (id (U i)) p, ?isequiv>: A \<simeq> B" + unfolding equivalence_def + apply intros defer + + \<comment> \<open>Switch off auto-typechecking, which messes with universe levels\<close> + supply [[auto_typechk=false]] + + \<guillemotright> apply (equality \<open>p:_\<close>) + \<^item> premises vars A B + apply (rewrite at A in "A \<rightarrow> B" id_comp[symmetric]) + apply (rewrite at B in "_ \<rightarrow> B" id_comp[symmetric]) + apply (rule transport, rule U_in_U) + apply (rule lift_universe_codomain, rule U_in_U) + apply (typechk, rule U_in_U) + done + \<^item> premises vars A + apply (subst transport_comp) + \<^enum> by (rule U_in_U) + \<^enum> by reduce (rule lift_universe) + \<^enum> by reduce (rule id_biinv) + done + done + + \<guillemotright> \<comment> \<open>Similar proof as in the first subgoal above\<close> + apply (rewrite at A in "A \<rightarrow> B" id_comp[symmetric]) + apply (rewrite at B in "_ \<rightarrow> B" id_comp[symmetric]) + apply (rule transport, rule U_in_U) + apply (rule lift_universe_codomain, rule U_in_U) + apply (typechk, rule U_in_U) + done + done + +(*Uncomment this to see all implicits from here on. +no_translations + "f x" \<leftharpoondown> "f `x" + "x = y" \<leftharpoondown> "x =\<^bsub>A\<^esub> y" + "g \<circ> f" \<leftharpoondown> "g \<circ>\<^bsub>A\<^esub> f" + "p\<inverse>" \<leftharpoondown> "CONST pathinv A x y p" + "p \<bullet> q" \<leftharpoondown> "CONST pathcomp A x y z p q" + "fst" \<leftharpoondown> "CONST Spartan.fst A B" + "snd" \<leftharpoondown> "CONST Spartan.snd A B" + "f[p]" \<leftharpoondown> "CONST ap A B x y f p" + "trans P p" \<leftharpoondown> "CONST transport A P x y p" + "trans_const B p" \<leftharpoondown> "CONST transport_const A B x y p" + "lift P p u" \<leftharpoondown> "CONST pathlift A P x y p u" + "apd f p" \<leftharpoondown> "CONST Identity.apd A P f x y p" + "f ~ g" \<leftharpoondown> "CONST homotopy A B f g" + "qinv f" \<leftharpoondown> "CONST Equivalence.qinv A B f" + "biinv f" \<leftharpoondown> "CONST Equivalence.biinv A B f" +*) + + +end diff --git a/spartan/theories/Identity.thy b/spartan/theories/Identity.thy new file mode 100644 index 0000000..0edf20e --- /dev/null +++ b/spartan/theories/Identity.thy @@ -0,0 +1,433 @@ +chapter \<open>The identity type\<close> + +text \<open> + The identity type, the higher groupoid structure of types, + and type families as fibrations. +\<close> + +theory Identity +imports Spartan + +begin + +axiomatization + Id :: \<open>o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> and + refl :: \<open>o \<Rightarrow> o\<close> and + IdInd :: \<open>o \<Rightarrow> (o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o) \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> + +syntax "_Id" :: \<open>o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> ("(2_ =\<^bsub>_\<^esub>/ _)" [111, 0, 111] 110) + +translations "a =\<^bsub>A\<^esub> b" \<rightleftharpoons> "CONST Id A a b" + +axiomatization where + IdF: "\<lbrakk>A: U i; a: A; b: A\<rbrakk> \<Longrightarrow> a =\<^bsub>A\<^esub> b: U i" and + + IdI: "a: A \<Longrightarrow> refl a: a =\<^bsub>A\<^esub> a" and + + IdE: "\<lbrakk> + p: a =\<^bsub>A\<^esub> b; + a: A; + b: A; + \<And>x y p. \<lbrakk>p: x =\<^bsub>A\<^esub> y; x: A; y: A\<rbrakk> \<Longrightarrow> C x y p: U i; + \<And>x. x: A \<Longrightarrow> f x: C x x (refl x) + \<rbrakk> \<Longrightarrow> IdInd A (\<lambda>x y p. C x y p) f a b p: C a b p" and + + Id_comp: "\<lbrakk> + a: A; + \<And>x y p. \<lbrakk>x: A; y: A; p: x =\<^bsub>A\<^esub> y\<rbrakk> \<Longrightarrow> C x y p: U i; + \<And>x. x: A \<Longrightarrow> f x: C x x (refl x) + \<rbrakk> \<Longrightarrow> IdInd A (\<lambda>x y p. C x y p) f a a (refl a) \<equiv> f a" + +lemmas + [intros] = IdF IdI and + [elims] = IdE and + [comps] = Id_comp + + +section \<open>Induction\<close> + +ML_file \<open>../lib/equality.ML\<close> + +method_setup equality = \<open>Scan.lift Parse.thm >> (fn (fact, _) => fn ctxt => + CONTEXT_METHOD (K (Equality.equality_context_tac fact ctxt)))\<close> + + +section \<open>Symmetry and transitivity\<close> + +Lemma (derive) pathinv: + assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y" + shows "y =\<^bsub>A\<^esub> x" + by (equality \<open>p:_\<close>) intro + +lemma pathinv_comp [comps]: + assumes "x: A" "A: U i" + shows "pathinv A x x (refl x) \<equiv> refl x" + unfolding pathinv_def by reduce + +Lemma (derive) pathcomp: + assumes + "A: U i" "x: A" "y: A" "z: A" + "p: x =\<^bsub>A\<^esub> y" "q: y =\<^bsub>A\<^esub> z" + shows + "x =\<^bsub>A\<^esub> z" + apply (equality \<open>p:_\<close>) + focus premises vars x p + apply (equality \<open>p:_\<close>) + apply intro + done + done + +lemma pathcomp_comp [comps]: + assumes "a: A" "A: U i" + shows "pathcomp A a a a (refl a) (refl a) \<equiv> refl a" + unfolding pathcomp_def by reduce + +text \<open>Set up \<open>sym\<close> attribute for propositional equalities:\<close> + +ML \<open> +structure Id_Sym_Attr = Sym_Attr ( + struct + val name = "sym" + val symmetry_rule = @{thm pathinv[rotated 3]} + end +) +\<close> + +setup \<open>Id_Sym_Attr.setup\<close> + + +section \<open>Notation\<close> + +definition Id_i (infix "=" 110) + where [implicit]: "Id_i x y \<equiv> x =\<^bsub>?\<^esub> y" + +definition pathinv_i ("_\<inverse>" [1000]) + where [implicit]: "pathinv_i p \<equiv> pathinv ? ? ? p" + +definition pathcomp_i (infixl "\<bullet>" 121) + where [implicit]: "pathcomp_i p q \<equiv> pathcomp ? ? ? ? p q" + +translations + "x = y" \<leftharpoondown> "x =\<^bsub>A\<^esub> y" + "p\<inverse>" \<leftharpoondown> "CONST pathinv A x y p" + "p \<bullet> q" \<leftharpoondown> "CONST pathcomp A x y z p q" + + +section \<open>Basic propositional equalities\<close> + +(*TODO: Better integration of equality type directly into reasoning...*) + +Lemma (derive) pathcomp_left_refl: + assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y" + shows "(refl x) \<bullet> p = p" + apply (equality \<open>p:_\<close>) + apply (reduce; intros) + done + +Lemma (derive) pathcomp_right_refl: + assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y" + shows "p \<bullet> (refl y) = p" + apply (equality \<open>p:_\<close>) + apply (reduce; intros) + done + +Lemma (derive) pathcomp_left_inv: + assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y" + shows "p\<inverse> \<bullet> p = refl y" + apply (equality \<open>p:_\<close>) + apply (reduce; intros) + done + +Lemma (derive) pathcomp_right_inv: + assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y" + shows "p \<bullet> p\<inverse> = refl x" + apply (equality \<open>p:_\<close>) + apply (reduce; intros) + done + +Lemma (derive) pathinv_pathinv: + assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y" + shows "p\<inverse>\<inverse> = p" + apply (equality \<open>p:_\<close>) + apply (reduce; intros) + done + +Lemma (derive) pathcomp_assoc: + assumes + "A: U i" "x: A" "y: A" "z: A" "w: A" + "p: x =\<^bsub>A\<^esub> y" "q: y =\<^bsub>A\<^esub> z" "r: z =\<^bsub>A\<^esub> w" + shows "p \<bullet> (q \<bullet> r) = p \<bullet> q \<bullet> r" + apply (equality \<open>p:_\<close>) + focus premises vars x p + apply (equality \<open>p:_\<close>) + focus premises vars x p + apply (equality \<open>p:_\<close>) + apply (reduce; intros) + done + done + done + + +section \<open>Functoriality of functions\<close> + +Lemma (derive) ap: + assumes + "A: U i" "B: U i" + "x: A" "y: A" + "f: A \<rightarrow> B" + "p: x =\<^bsub>A\<^esub> y" + shows "f x = f y" + apply (equality \<open>p:_\<close>) + apply intro + done + +definition ap_i ("_[_]" [1000, 0]) + where [implicit]: "ap_i f p \<equiv> ap ? ? ? ? f p" + +translations "f[p]" \<leftharpoondown> "CONST ap A B x y f p" + +Lemma ap_refl [comps]: + assumes "f: A \<rightarrow> B" "x: A" "A: U i" "B: U i" + shows "f[refl x] \<equiv> refl (f x)" + unfolding ap_def by reduce + +Lemma (derive) ap_pathcomp: + assumes + "A: U i" "B: U i" + "x: A" "y: A" "z: A" + "f: A \<rightarrow> B" + "p: x =\<^bsub>A\<^esub> y" "q: y =\<^bsub>A\<^esub> z" + shows + "f[p \<bullet> q] = f[p] \<bullet> f[q]" + apply (equality \<open>p:_\<close>) + focus premises vars x p + apply (equality \<open>p:_\<close>) + apply (reduce; intro) + done + done + +Lemma (derive) ap_pathinv: + assumes + "A: U i" "B: U i" + "x: A" "y: A" + "f: A \<rightarrow> B" + "p: x =\<^bsub>A\<^esub> y" + shows "f[p\<inverse>] = f[p]\<inverse>" + by (equality \<open>p:_\<close>) (reduce; intro) + +text \<open>The next two proofs currently use some low-level rewriting.\<close> + +Lemma (derive) ap_funcomp: + assumes + "A: U i" "B: U i" "C: U i" + "x: A" "y: A" + "f: A \<rightarrow> B" "g: B \<rightarrow> C" + "p: x =\<^bsub>A\<^esub> y" + shows "(g \<circ> f)[p] = g[f[p]]" + apply (equality \<open>p:_\<close>) + apply (simp only: funcomp_apply_comp[symmetric]) + apply (reduce; intro) + done + +Lemma (derive) ap_id: + assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y" + shows "(id A)[p] = p" + apply (equality \<open>p:_\<close>) + apply (rewrite at "\<hole> = _" id_comp[symmetric]) + apply (rewrite at "_ = \<hole>" id_comp[symmetric]) + apply (reduce; intros) + done + + +section \<open>Transport\<close> + +Lemma (derive) transport: + assumes + "A: U i" + "\<And>x. x: A \<Longrightarrow> P x: U i" + "x: A" "y: A" + "p: x =\<^bsub>A\<^esub> y" + shows "P x \<rightarrow> P y" + by (equality \<open>p:_\<close>) intro + +definition transport_i ("trans") + where [implicit]: "trans P p \<equiv> transport ? P ? ? p" + +translations "trans P p" \<leftharpoondown> "CONST transport A P x y p" + +Lemma transport_comp [comps]: + assumes + "a: A" + "A: U i" + "\<And>x. x: A \<Longrightarrow> P x: U i" + shows "trans P (refl a) \<equiv> id (P a)" + unfolding transport_def id_def by reduce + +\<comment> \<open>TODO: Build transport automation!\<close> + +Lemma use_transport: + assumes + "p: y =\<^bsub>A\<^esub> x" + "u: P x" + "x: A" "y: A" + "A: U i" + "\<And>x. x: A \<Longrightarrow> P x: U i" + shows "trans P p\<inverse> u: P y" + by typechk + +Lemma (derive) transport_left_inv: + assumes + "A: U i" + "\<And>x. x: A \<Longrightarrow> P x: U i" + "x: A" "y: A" + "p: x =\<^bsub>A\<^esub> y" + shows "(trans P p\<inverse>) \<circ> (trans P p) = id (P x)" + by (equality \<open>p:_\<close>) (reduce; intro) + +Lemma (derive) transport_right_inv: + assumes + "A: U i" + "\<And>x. x: A \<Longrightarrow> P x: U i" + "x: A" "y: A" + "p: x =\<^bsub>A\<^esub> y" + shows "(trans P p) \<circ> (trans P p\<inverse>) = id (P y)" + by (equality \<open>p:_\<close>) (reduce; intros) + +Lemma (derive) transport_pathcomp: + assumes + "A: U i" + "\<And>x. x: A \<Longrightarrow> P x: U i" + "x: A" "y: A" "z: A" + "u: P x" + "p: x =\<^bsub>A\<^esub> y" "q: y =\<^bsub>A\<^esub> z" + shows "trans P q (trans P p u) = trans P (p \<bullet> q) u" + apply (equality \<open>p:_\<close>) + focus premises vars x p + apply (equality \<open>p:_\<close>) + apply (reduce; intros) + done + done + +Lemma (derive) transport_compose_typefam: + assumes + "A: U i" "B: U i" + "\<And>x. x: B \<Longrightarrow> P x: U i" + "f: A \<rightarrow> B" + "x: A" "y: A" + "p: x =\<^bsub>A\<^esub> y" + "u: P (f x)" + shows "trans (\<lambda>x. P (f x)) p u = trans P f[p] u" + by (equality \<open>p:_\<close>) (reduce; intros) + +Lemma (derive) transport_function_family: + assumes + "A: U i" + "\<And>x. x: A \<Longrightarrow> P x: U i" + "\<And>x. x: A \<Longrightarrow> Q x: U i" + "f: \<Prod>x: A. P x \<rightarrow> Q x" + "x: A" "y: A" + "u: P x" + "p: x =\<^bsub>A\<^esub> y" + shows "trans Q p ((f x) u) = (f y) (trans P p u)" + by (equality \<open>p:_\<close>) (reduce; intros) + +Lemma (derive) transport_const: + assumes + "A: U i" "B: U i" + "x: A" "y: A" + "p: x =\<^bsub>A\<^esub> y" + shows "\<Prod>b: B. trans (\<lambda>_. B) p b = b" + by (intro, equality \<open>p:_\<close>) (reduce; intro) + +definition transport_const_i ("trans'_const") + where [implicit]: "trans_const B p \<equiv> transport_const ? B ? ? p" + +translations "trans_const B p" \<leftharpoondown> "CONST transport_const A B x y p" + +Lemma transport_const_comp [comps]: + assumes + "x: A" "b: B" + "A: U i" "B: U i" + shows "trans_const B (refl x) b\<equiv> refl b" + unfolding transport_const_def by reduce + +Lemma (derive) pathlift: + assumes + "A: U i" + "\<And>x. x: A \<Longrightarrow> P x: U i" + "x: A" "y: A" + "p: x =\<^bsub>A\<^esub> y" + "u: P x" + shows "<x, u> = <y, trans P p u>" + by (equality \<open>p:_\<close>) (reduce; intros) + +definition pathlift_i ("lift") + where [implicit]: "lift P p u \<equiv> pathlift ? P ? ? p u" + +translations "lift P p u" \<leftharpoondown> "CONST pathlift A P x y p u" + +Lemma pathlift_comp [comps]: + assumes + "u: P x" + "x: A" + "\<And>x. x: A \<Longrightarrow> P x: U i" + "A: U i" + shows "lift P (refl x) u \<equiv> refl <x, u>" + unfolding pathlift_def by reduce + +Lemma (derive) pathlift_fst: + assumes + "A: U i" + "\<And>x. x: A \<Longrightarrow> P x: U i" + "x: A" "y: A" + "u: P x" + "p: x =\<^bsub>A\<^esub> y" + shows "fst[lift P p u] = p" + apply (equality \<open>p:_\<close>) + text \<open>Some rewriting needed here:\<close> + \<guillemotright> vars x y + (*Here an automatic reordering tactic would be helpful*) + apply (rewrite at x in "x = y" fst_comp[symmetric]) + prefer 4 + apply (rewrite at y in "_ = y" fst_comp[symmetric]) + done + \<guillemotright> by reduce intro + done + + +section \<open>Dependent paths\<close> + +Lemma (derive) apd: + assumes + "A: U i" + "\<And>x. x: A \<Longrightarrow> P x: U i" + "f: \<Prod>x: A. P x" + "x: A" "y: A" + "p: x =\<^bsub>A\<^esub> y" + shows "trans P p (f x) = f y" + by (equality \<open>p:_\<close>) (reduce; intros; typechk) + +definition apd_i ("apd") + where [implicit]: "apd f p \<equiv> Identity.apd ? ? f ? ? p" + +translations "apd f p" \<leftharpoondown> "CONST Identity.apd A P f x y p" + +Lemma dependent_map_comp [comps]: + assumes + "f: \<Prod>x: A. P x" + "x: A" + "A: U i" + "\<And>x. x: A \<Longrightarrow> P x: U i" + shows "apd f (refl x) \<equiv> refl (f x)" + unfolding apd_def by reduce + +Lemma (derive) apd_ap: + assumes + "A: U i" "B: U i" + "f: A \<rightarrow> B" + "x: A" "y: A" + "p: x =\<^bsub>A\<^esub> y" + shows "apd f p = trans_const B p (f x) \<bullet> f[p]" + by (equality \<open>p:_\<close>) (reduce; intro) + +end diff --git a/spartan/theories/Spartan.thy b/spartan/theories/Spartan.thy new file mode 100644 index 0000000..fb901d5 --- /dev/null +++ b/spartan/theories/Spartan.thy @@ -0,0 +1,463 @@ +text \<open>Spartan type theory\<close> + +theory Spartan +imports + Pure + "HOL-Eisbach.Eisbach" + "HOL-Eisbach.Eisbach_Tools" +keywords + "Theorem" "Lemma" "Corollary" "Proposition" :: thy_goal_stmt and + "focus" "\<guillemotright>" "\<^item>" "\<^enum>" "~" :: prf_script_goal % "proof" and + "derive" "vars":: quasi_command and + "print_coercions" :: thy_decl + +begin + + +section \<open>Preamble\<close> + +declare [[eta_contract=false]] + + +section \<open>Metatype setup\<close> + +typedecl o + + +section \<open>Judgments\<close> + +judgment has_type :: \<open>o \<Rightarrow> o \<Rightarrow> prop\<close> ("(2_:/ _)" 999) + + +section \<open>Universes\<close> + +typedecl lvl \<comment> \<open>Universe levels\<close> + +axiomatization + O :: \<open>lvl\<close> and + S :: \<open>lvl \<Rightarrow> lvl\<close> and + lt :: \<open>lvl \<Rightarrow> lvl \<Rightarrow> prop\<close> (infix "<" 900) + where + O_min: "O < S i" and + lt_S: "i < S i" and + lt_trans: "i < j \<Longrightarrow> j < k \<Longrightarrow> i < k" + +axiomatization U :: \<open>lvl \<Rightarrow> o\<close> where + U_hierarchy: "i < j \<Longrightarrow> U i: U j" and + U_cumulative: "A: U i \<Longrightarrow> i < j \<Longrightarrow> A: U j" + +lemma U_in_U: + "U i: U (S i)" + by (rule U_hierarchy, rule lt_S) + +lemma lift_universe: + "A: U i \<Longrightarrow> A: U (S i)" + by (erule U_cumulative, rule lt_S) + + +section \<open>\<Prod>-type\<close> + +axiomatization + Pi :: \<open>o \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> o\<close> and + lam :: \<open>o \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> o\<close> and + app :: \<open>o \<Rightarrow> o \<Rightarrow> o\<close> ("(1_ `_)" [120, 121] 120) + +syntax + "_Pi" :: \<open>idt \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> ("(2\<Prod>_: _./ _)" 30) + "_lam" :: \<open>idt \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> ("(2\<lambda>_: _./ _)" 30) +translations + "\<Prod>x: A. B" \<rightleftharpoons> "CONST Pi A (\<lambda>x. B)" + "\<lambda>x: A. b" \<rightleftharpoons> "CONST lam A (\<lambda>x. b)" + +abbreviation Fn (infixr "\<rightarrow>" 40) where "A \<rightarrow> B \<equiv> \<Prod>_: A. B" + +axiomatization where + PiF: "\<lbrakk>\<And>x. x: A \<Longrightarrow> B x: U i; A: U i\<rbrakk> \<Longrightarrow> \<Prod>x: A. B x: U i" and + + PiI: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x: B x; A: U i\<rbrakk> \<Longrightarrow> \<lambda>x: A. b x: \<Prod>x: A. B x" and + + PiE: "\<lbrakk>f: \<Prod>x: A. B x; a: A\<rbrakk> \<Longrightarrow> f `a: B a" and + + beta: "\<lbrakk>a: A; \<And>x. x: A \<Longrightarrow> b x: B x\<rbrakk> \<Longrightarrow> (\<lambda>x: A. b x) `a \<equiv> b a" and + + eta: "f: \<Prod>x: A. B x \<Longrightarrow> \<lambda>x: A. f `x \<equiv> f" and + + Pi_cong: "\<lbrakk> + A: U i; + \<And>x. x: A \<Longrightarrow> B x: U i; + \<And>x. x: A \<Longrightarrow> B' x: U i; + \<And>x. x: A \<Longrightarrow> B x \<equiv> B' x + \<rbrakk> \<Longrightarrow> \<Prod>x: A. B x \<equiv> \<Prod>x: A. B' x" and + + lam_cong: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x \<equiv> c x; A: U i\<rbrakk> \<Longrightarrow> \<lambda>x: A. b x \<equiv> \<lambda>x: A. c x" + + +section \<open>\<Sum>-type\<close> + +axiomatization + Sig :: \<open>o \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> o\<close> and + pair :: \<open>o \<Rightarrow> o \<Rightarrow> o\<close> ("(2<_,/ _>)") and + SigInd :: \<open>o \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> (o \<Rightarrow> o \<Rightarrow> o) \<Rightarrow> o \<Rightarrow> o\<close> + +syntax "_Sum" :: \<open>idt \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> ("(2\<Sum>_: _./ _)" 20) + +translations "\<Sum>x: A. B" \<rightleftharpoons> "CONST Sig A (\<lambda>x. B)" + +abbreviation Prod (infixl "\<times>" 50) + where "A \<times> B \<equiv> \<Sum>_: A. B" + +axiomatization where + SigF: "\<lbrakk>\<And>x. x: A \<Longrightarrow> B x: U i; A: U i\<rbrakk> \<Longrightarrow> \<Sum>x: A. B x: U i" and + + SigI: "\<lbrakk>\<And>x. x: A \<Longrightarrow> B x: U i; a: A; b: B a\<rbrakk> \<Longrightarrow> <a, b>: \<Sum>x: A. B x" and + + SigE: "\<lbrakk> + p: \<Sum>x: A. B x; + A: U i; + \<And>x. x : A \<Longrightarrow> B x: U i; + \<And>p. p: \<Sum>x: A. B x \<Longrightarrow> C p: U i; + \<And>x y. \<lbrakk>x: A; y: B x\<rbrakk> \<Longrightarrow> f x y: C <x, y> + \<rbrakk> \<Longrightarrow> SigInd A (\<lambda>x. B x) (\<lambda>p. C p) f p: C p" and + + Sig_comp: "\<lbrakk> + a: A; + b: B a; + \<And>x. x: A \<Longrightarrow> B x: U i; + \<And>p. p: \<Sum>x: A. B x \<Longrightarrow> C p: U i; + \<And>x y. \<lbrakk>x: A; y: B x\<rbrakk> \<Longrightarrow> f x y: C <x, y> + \<rbrakk> \<Longrightarrow> SigInd A (\<lambda>x. B x) (\<lambda>p. C p) f <a, b> \<equiv> f a b" and + + Sig_cong: "\<lbrakk> + \<And>x. x: A \<Longrightarrow> B x \<equiv> B' x; + A: U i; + \<And>x. x : A \<Longrightarrow> B x: U i; + \<And>x. x : A \<Longrightarrow> B' x: U i + \<rbrakk> \<Longrightarrow> \<Sum>x: A. B x \<equiv> \<Sum>x: A. B' x" + + + + + +section \<open>Proof commands\<close> + +named_theorems typechk + +ML_file \<open>../lib/lib.ML\<close> +ML_file \<open>../lib/goals.ML\<close> +ML_file \<open>../lib/focus.ML\<close> + + +section \<open>Congruence automation\<close> + +ML_file \<open>../lib/congruence.ML\<close> + + +section \<open>Methods\<close> + +ML_file \<open>../lib/elimination.ML\<close> \<comment> \<open>declares the [elims] attribute\<close> + +named_theorems intros and comps +lemmas + [intros] = PiF PiI SigF SigI and + [elims] = PiE SigE and + [comps] = beta Sig_comp and + [cong] = Pi_cong lam_cong Sig_cong + +ML_file \<open>../lib/tactics.ML\<close> + +method_setup assumptions = + \<open>Scan.succeed (fn ctxt => SIMPLE_METHOD ( + CHANGED (TRYALL (assumptions_tac ctxt))))\<close> + +method_setup known = + \<open>Scan.succeed (fn ctxt => SIMPLE_METHOD ( + CHANGED (TRYALL (known_tac ctxt))))\<close> + +method_setup intro = + \<open>Scan.succeed (fn ctxt => SIMPLE_METHOD (HEADGOAL (intro_tac ctxt)))\<close> + +method_setup intros = + \<open>Scan.succeed (fn ctxt => SIMPLE_METHOD (HEADGOAL (intros_tac ctxt)))\<close> + +method_setup elim = + \<open>Scan.option Args.term >> (fn tm => fn ctxt => + SIMPLE_METHOD' (SIDE_CONDS (elims_tac tm ctxt) ctxt))\<close> + +method elims = elim+ + +method_setup typechk = + \<open>Scan.succeed (fn ctxt => SIMPLE_METHOD ( + CHANGED (ALLGOALS (TRY o typechk_tac ctxt))))\<close> + +method_setup rule = + \<open>Attrib.thms >> (fn ths => fn ctxt => + SIMPLE_METHOD (HEADGOAL (rule_tac ths ctxt)))\<close> + +method_setup dest = + \<open>Scan.lift (Scan.option (Args.parens Parse.int)) -- Attrib.thms + >> (fn (opt_n, ths) => fn ctxt => + SIMPLE_METHOD (HEADGOAL (dest_tac opt_n ths ctxt)))\<close> + +subsection \<open>Rewriting\<close> + +\<comment> \<open>\<open>subst\<close> method\<close> +ML_file "~~/src/Tools/misc_legacy.ML" +ML_file "~~/src/Tools/IsaPlanner/isand.ML" +ML_file "~~/src/Tools/IsaPlanner/rw_inst.ML" +ML_file "~~/src/Tools/IsaPlanner/zipper.ML" +ML_file "../lib/eqsubst.ML" + +\<comment> \<open>\<open>rewrite\<close> method\<close> +consts rewrite_HOLE :: "'a::{}" ("\<hole>") + +lemma eta_expand: + fixes f :: "'a::{} \<Rightarrow> 'b::{}" + shows "f \<equiv> \<lambda>x. f x" . + +lemma rewr_imp: + assumes "PROP A \<equiv> PROP B" + shows "(PROP A \<Longrightarrow> PROP C) \<equiv> (PROP B \<Longrightarrow> PROP C)" + apply (rule Pure.equal_intr_rule) + apply (drule equal_elim_rule2[OF assms]; assumption) + apply (drule equal_elim_rule1[OF assms]; assumption) + done + +lemma imp_cong_eq: + "(PROP A \<Longrightarrow> (PROP B \<Longrightarrow> PROP C) \<equiv> (PROP B' \<Longrightarrow> PROP C')) \<equiv> + ((PROP B \<Longrightarrow> PROP A \<Longrightarrow> PROP C) \<equiv> (PROP B' \<Longrightarrow> PROP A \<Longrightarrow> PROP C'))" + apply (Pure.intro Pure.equal_intr_rule) + apply (drule (1) cut_rl; drule Pure.equal_elim_rule1 Pure.equal_elim_rule2; assumption)+ + apply (drule Pure.equal_elim_rule1 Pure.equal_elim_rule2; assumption)+ + done + +ML_file \<open>~~/src/HOL/Library/cconv.ML\<close> +ML_file \<open>../lib/rewrite.ML\<close> + +\<comment> \<open>\<open>reduce\<close> method computes terms via judgmental equalities\<close> +setup \<open> + map_theory_simpset (fn ctxt => + ctxt addSolver (mk_solver "" typechk_tac)) +\<close> + +method reduce uses add = (simp add: comps add | subst comps, reduce add: add)+ + + +section \<open>Implicit notations\<close> + +text \<open> + \<open>?\<close> is used to mark implicit arguments in definitions, while \<open>{}\<close> is expanded + immediately for elaboration in statements. +\<close> + +consts + iarg :: \<open>'a\<close> ("?") + hole :: \<open>'b\<close> ("{}") + +ML_file \<open>../lib/implicits.ML\<close> + +attribute_setup implicit = \<open>Scan.succeed Implicits.implicit_defs_attr\<close> + +ML \<open> +val _ = Context.>> + (Syntax_Phases.term_check 1 "" (fn ctxt => map (Implicits.make_holes ctxt))) +\<close> + +text \<open>Automatically insert inhabitation judgments where needed:\<close> + +consts inhabited :: \<open>o \<Rightarrow> prop\<close> ("(_)") +translations "CONST inhabited A" \<rightharpoonup> "CONST has_type {} A" + + +section \<open>Lambda coercion\<close> + +\<comment> \<open>Coerce object lambdas to meta-lambdas\<close> +abbreviation (input) lambda :: \<open>o \<Rightarrow> o \<Rightarrow> o\<close> + where "lambda f \<equiv> \<lambda>x. f `x" + +ML_file \<open>~~/src/Tools/subtyping.ML\<close> +declare [[coercion_enabled, coercion lambda]] + +translations "f x" \<leftharpoondown> "f `x" + + +section \<open>Functions\<close> + +lemma eta_exp: + assumes "f: \<Prod>x: A. B x" + shows "f \<equiv> \<lambda>x: A. f x" + by (rule eta[symmetric]) + +lemma lift_universe_codomain: + assumes "A: U i" "f: A \<rightarrow> U j" + shows "f: A \<rightarrow> U (S j)" + apply (sub eta_exp) + apply known + apply (Pure.rule intros; rule lift_universe) + done + +subsection \<open>Function composition\<close> + +definition "funcomp A g f \<equiv> \<lambda>x: A. g `(f `x)" + +syntax + "_funcomp" :: \<open>o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> ("(2_ \<circ>\<^bsub>_\<^esub>/ _)" [111, 0, 110] 110) +translations + "g \<circ>\<^bsub>A\<^esub> f" \<rightleftharpoons> "CONST funcomp A g f" + +lemma funcompI [typechk]: + assumes + "A: U i" + "B: U i" + "\<And>x. x: B \<Longrightarrow> C x: U i" + "f: A \<rightarrow> B" + "g: \<Prod>x: B. C x" + shows + "g \<circ>\<^bsub>A\<^esub> f: \<Prod>x: A. C (f x)" + unfolding funcomp_def by typechk + +lemma funcomp_assoc [comps]: + assumes + "f: A \<rightarrow> B" + "g: B \<rightarrow> C" + "h: \<Prod>x: C. D x" + "A: U i" + shows + "(h \<circ>\<^bsub>B\<^esub> g) \<circ>\<^bsub>A\<^esub> f \<equiv> h \<circ>\<^bsub>A\<^esub> g \<circ>\<^bsub>A\<^esub> f" + unfolding funcomp_def by reduce + +lemma funcomp_lambda_comp [comps]: + assumes + "A: U i" + "\<And>x. x: A \<Longrightarrow> b x: B" + "\<And>x. x: B \<Longrightarrow> c x: C x" + shows + "(\<lambda>x: B. c x) \<circ>\<^bsub>A\<^esub> (\<lambda>x: A. b x) \<equiv> \<lambda>x: A. c (b x)" + unfolding funcomp_def by reduce + +lemma funcomp_apply_comp [comps]: + assumes + "f: A \<rightarrow> B" "g: \<Prod>x: B. C x" + "x: A" + "A: U i" "B: U i" + "\<And>x y. x: B \<Longrightarrow> C x: U i" + shows "(g \<circ>\<^bsub>A\<^esub> f) x \<equiv> g (f x)" + unfolding funcomp_def by reduce + +text \<open>Notation:\<close> + +definition funcomp_i (infixr "\<circ>" 120) + where [implicit]: "funcomp_i g f \<equiv> g \<circ>\<^bsub>?\<^esub> f" + +translations "g \<circ> f" \<leftharpoondown> "g \<circ>\<^bsub>A\<^esub> f" + +subsection \<open>Identity function\<close> + +definition id where "id A \<equiv> \<lambda>x: A. x" + +lemma + idI [typechk]: "A: U i \<Longrightarrow> id A: A \<rightarrow> A" and + id_comp [comps]: "x: A \<Longrightarrow> (id A) x \<equiv> x" + unfolding id_def by reduce + +lemma id_left [comps]: + assumes "f: A \<rightarrow> B" "A: U i" "B: U i" + shows "(id B) \<circ>\<^bsub>A\<^esub> f \<equiv> f" + unfolding id_def + by (subst eta_exp[of f]) (reduce, rule eta) + +lemma id_right [comps]: + assumes "f: A \<rightarrow> B" "A: U i" "B: U i" + shows "f \<circ>\<^bsub>A\<^esub> (id A) \<equiv> f" + unfolding id_def + by (subst eta_exp[of f]) (reduce, rule eta) + +lemma id_U [typechk]: + "id (U i): U i \<rightarrow> U i" + by typechk (fact U_in_U) + + +section \<open>Pairs\<close> + +definition "fst A B \<equiv> \<lambda>p: \<Sum>x: A. B x. SigInd A B (\<lambda>_. A) (\<lambda>x y. x) p" +definition "snd A B \<equiv> \<lambda>p: \<Sum>x: A. B x. SigInd A B (\<lambda>p. B (fst A B p)) (\<lambda>x y. y) p" + +lemma fst_type [typechk]: + assumes "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i" + shows "fst A B: (\<Sum>x: A. B x) \<rightarrow> A" + unfolding fst_def by typechk + +lemma fst_comp [comps]: + assumes + "a: A" + "b: B a" + "A: U i" + "\<And>x. x: A \<Longrightarrow> B x: U i" + shows "fst A B <a, b> \<equiv> a" + unfolding fst_def by reduce + +lemma snd_type [typechk]: + assumes "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i" + shows "snd A B: \<Prod>p: \<Sum>x: A. B x. B (fst A B p)" + unfolding snd_def by typechk reduce + +lemma snd_comp [comps]: + assumes "a: A" "b: B a" "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i" + shows "snd A B <a, b> \<equiv> b" + unfolding snd_def by reduce + +subsection \<open>Notation\<close> + +definition fst_i ("fst") + where [implicit]: "fst \<equiv> Spartan.fst ? ?" + +definition snd_i ("snd") + where [implicit]: "snd \<equiv> Spartan.snd ? ?" + +translations + "fst" \<leftharpoondown> "CONST Spartan.fst A B" + "snd" \<leftharpoondown> "CONST Spartan.snd A B" + +subsection \<open>Projections\<close> + +Lemma fst [typechk]: + assumes + "p: \<Sum>x: A. B x" + "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i" + shows "fst p: A" + by typechk + +Lemma snd [typechk]: + assumes + "p: \<Sum>x: A. B x" + "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i" + shows "snd p: B (fst p)" + by typechk + +method fst for p::o = rule fst[of p] +method snd for p::o = rule snd[of p] + +subsection \<open>Properties of \<Sigma>\<close> + +Lemma (derive) Sig_dist_exp: + assumes + "p: \<Sum>x: A. B x \<times> C x" + "A: U i" + "\<And>x. x: A \<Longrightarrow> B x: U i" + "\<And>x. x: A \<Longrightarrow> C x: U i" + shows "(\<Sum>x: A. B x) \<times> (\<Sum>x: A. C x)" + apply (elim p) + focus vars x y + apply intro + \<guillemotright> apply intro + apply assumption + apply (fst y) + done + \<guillemotright> apply intro + apply assumption + apply (snd y) + done + done + done + + +end diff --git a/tests/Test.thy b/tests/Test.thy deleted file mode 100644 index 6f9f996..0000000 --- a/tests/Test.thy +++ /dev/null @@ -1,110 +0,0 @@ -(* -Title: tests/Test.thy -Author: Joshua Chen -Date: 2018 - -This is an old test suite from early implementations. -It is not always guaranteed to be up to date or to reflect most recent versions of the theory. -*) - -theory Test -imports "../HoTT" - -begin - - -text \<open> -A bunch of theorems and other statements for sanity-checking, as well as things that should be automatically simplified. - -Things that *should* be automated: -\<^item> Checking that @{term A} is a well-formed type, when writing things like @{prop "x: A"} and @{prop "A: U i"}. -\<^item> Checking that the argument to a (dependent/non-dependent) function matches the type? Also the arguments to a pair? -\<close> - -declare[[unify_trace_simp, unify_trace_types, simp_trace, simp_trace_depth_limit=5]] -\<comment> \<open>Turn on trace for unification and the Simplifier, for debugging.\<close> - - -section \<open>\<Prod>-type\<close> - -subsection \<open>Typing functions\<close> - -text \<open>Declaring @{thm Prod_intro} with the @{attribute intro} attribute enables @{method rule} to prove the following.\<close> - -proposition assumes "A : U(i)" shows "\<^bold>\<lambda>x. x: A \<rightarrow> A" -by (routine add: assms) - -proposition - assumes "A : U(i)" and "A \<equiv> B" - shows "\<^bold>\<lambda>x. x : B \<rightarrow> A" -proof - - have "A \<rightarrow> A \<equiv> B \<rightarrow> A" using assms by simp - moreover have "\<^bold>\<lambda>x. x : A \<rightarrow> A" by (routine add: assms) - ultimately show "\<^bold>\<lambda>x. x : B \<rightarrow> A" by simp -qed - -proposition - assumes "A : U(i)" and "B : U(i)" - shows "\<^bold>\<lambda>x y. x : A \<rightarrow> B \<rightarrow> A" -by (routine add: assms) - -subsection \<open>Function application\<close> - -proposition assumes "a : A" shows "(\<^bold>\<lambda>x. x)`a \<equiv> a" -by (derive lems: assms) - -lemma - assumes "a : A" and "\<And>x. x: A \<Longrightarrow> B(x): U(i)" - shows "(\<^bold>\<lambda>x y. y)`a \<equiv> \<^bold>\<lambda>y. y" -by (derive lems: assms) - -lemma "\<lbrakk>A: U(i); B: U(i); a : A; b : B\<rbrakk> \<Longrightarrow> (\<^bold>\<lambda>x y. y)`a`b \<equiv> b" -by derive - -lemma "\<lbrakk>A: U(i); a : A\<rbrakk> \<Longrightarrow> (\<^bold>\<lambda>x y. f x y)`a \<equiv> \<^bold>\<lambda>y. f a y" -proof derive -oops \<comment> \<open>Missing some premises.\<close> - -lemma "\<lbrakk>a : A; b : B(a); c : C(a)(b)\<rbrakk> \<Longrightarrow> (\<^bold>\<lambda>x. \<^bold>\<lambda>y. \<^bold>\<lambda>z. f x y z)`a`b`c \<equiv> f a b c" -proof derive -oops - - -subsection \<open>Currying functions\<close> - -proposition curried_function_formation: - assumes "A : U(i)" and "B: A \<longrightarrow> U(i)" and "\<And>x. C(x): B(x) \<longrightarrow> U(i)" - shows "\<Prod>x:A. \<Prod>y:B(x). C x y : U(i)" -by (routine add: assms) - -proposition higher_order_currying_formation: - assumes - "A: U(i)" and "B: A \<longrightarrow> U(i)" and - "\<And>x y. y: B(x) \<Longrightarrow> C(x)(y): U(i)" and - "\<And>x y z. z : C(x)(y) \<Longrightarrow> D(x)(y)(z): U(i)" - shows "\<Prod>x:A. \<Prod>y:B(x). \<Prod>z:C(x)(y). D(x)(y)(z): U(i)" -by (routine add: assms) - -lemma curried_type_judgment: - assumes "A: U(i)" and "B: A \<longrightarrow> U(i)" and "\<And>x y. \<lbrakk>x : A; y : B(x)\<rbrakk> \<Longrightarrow> f x y : C x y" - shows "\<^bold>\<lambda>x y. f x y : \<Prod>x:A. \<Prod>y:B(x). C x y" -by (routine add: assms) - - -text \<open> -Polymorphic identity function is now trivial due to lambda expression polymorphism. -It was more involved in previous monomorphic incarnations. -\<close> - -lemma "x: A \<Longrightarrow> id`x \<equiv> x" -by derive - - -section \<open>Natural numbers\<close> - -text \<open>Automatic proof methods recognize natural numbers.\<close> - -proposition "succ(succ(succ 0)): \<nat>" by routine - - -end |