aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJosh Chen2018-05-29 12:28:13 +0200
committerJosh Chen2018-05-29 12:28:13 +0200
commit607c3971e08d1ded22bd9f1cabdd309653af1248 (patch)
tree256da6d96c3310c72a4fa2e87043382c77440cf0
parent120879c099a2fb71e67a41a1c852c5db65e9eb4f (diff)
More rigorous rules for Product type. Propositions and proofs all working, but have to think about maybe relaxing the computation rule, or else automating the currying of dependent functions.
-rw-r--r--HoTT.thy29
-rw-r--r--HoTT_Theorems.thy67
2 files changed, 72 insertions, 24 deletions
diff --git a/HoTT.thy b/HoTT.thy
index 4713a0d..96bd3c1 100644
--- a/HoTT.thy
+++ b/HoTT.thy
@@ -54,24 +54,27 @@ subsubsection \<open>Dependent function/product\<close>
consts
Prod :: "[Term, (Term \<Rightarrow> Term)] \<Rightarrow> Term"
+ lambda :: "[Term, (Term \<Rightarrow> Term)] \<Rightarrow> Term"
syntax
- "_Prod" :: "[idt, Term, Term] \<Rightarrow> Term" ("(3\<Prod>_:_./ _)" 10)
+ "_Prod" :: "[idt, Term, Term] \<Rightarrow> Term" ("(3\<Prod>_:_./ _)" 10)
+ "__lambda" :: "[idt, Term, Term] \<Rightarrow> Term" ("(3\<^bold>\<lambda>_:_./ _)" 10)
translations
"\<Prod>x:A. B" \<rightleftharpoons> "CONST Prod A (\<lambda>x. B)"
-(* The above syntax translation binds the x in the expression B *)
+ "\<^bold>\<lambda>x:A. b" \<rightleftharpoons> "CONST lambda A (\<lambda>x. b)"
+(* The above syntax translations bind the x in the expressions B, b. *)
abbreviation Function :: "[Term, Term] \<Rightarrow> Term" (infixr "\<rightarrow>" 30)
where "A\<rightarrow>B \<equiv> \<Prod>_:A. B"
axiomatization
- lambda :: "(Term \<Rightarrow> Term) \<Rightarrow> Term" (binder "\<^bold>\<lambda>" 10) and
- appl :: "[Term, Term] \<Rightarrow> Term" ("(3_`/_)" [10, 10] 60)
+ appl :: "[Term, Term] \<Rightarrow> Term" (infixl "`" 60)
where
Prod_form: "\<And>(A::Term) (B::Term \<Rightarrow> Term). \<lbrakk>A : U; B : A \<rightarrow> U\<rbrakk> \<Longrightarrow> \<Prod>x:A. B(x) : U" and
- Prod_intro [intro]: "\<And>(A::Term) (B::Term \<Rightarrow> Term) (b::Term \<Rightarrow> Term). \<lbrakk>A : U; B : A \<rightarrow> U; \<And>x::Term. x : A \<Longrightarrow> b(x) : B(x)\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>x. b(x) : \<Prod>x:A. B(x)" and
+ Prod_intro [intro]: "\<And>(A::Term) (B::Term \<Rightarrow> Term) (b::Term \<Rightarrow> Term). \<lbrakk>A : U; B : A \<rightarrow> U; \<And>x::Term. x : A \<Longrightarrow> b(x) : B(x)\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>x:A. b(x) : \<Prod>x:A. B(x)" and
Prod_elim [elim]: "\<And>(A::Term) (B::Term \<Rightarrow> Term) (f::Term) (a::Term). \<lbrakk>f : \<Prod>x:A. B(x); a : A\<rbrakk> \<Longrightarrow> f`a : B(a)" and
- Prod_comp [simp]: "\<And>(b::Term \<Rightarrow> Term) (a::Term). (\<^bold>\<lambda>x. b(x))`a \<equiv> b(a)" and
- Prod_uniq [simp]: "\<And>(A::Term) (B::Term \<Rightarrow> Term) (f::Term). f : \<Prod>x:A. B(x) \<Longrightarrow> \<^bold>\<lambda>x. (f`x) \<equiv> f"
+ Prod_comp [simp]: "\<And>(A::Term) (B::Term \<Rightarrow> Term) (b::Term \<Rightarrow> Term) (a::Term). \<lbrakk>A : U; B : A \<rightarrow> U; \<And>x::Term. x : A \<Longrightarrow> b(x) : B(x); a : A\<rbrakk> \<Longrightarrow> (\<^bold>\<lambda>x:A. b(x))`a \<equiv> b(a)" and
+ Prod_uniq [simp]: "\<And>(A::Term) (B::Term \<Rightarrow> Term) (f::Term). f : \<Prod>x:A. B(x) \<Longrightarrow> \<^bold>\<lambda>x:A. (f`x) \<equiv> f"
+(* Thinking about the premises for the computation rule... they make simplification rather cumbersome, should I remove them? Would this potentially result in logical problems with being able to state untrue statements? (But probably not prove them?) *)
text "Note that the syntax \<open>\<^bold>\<lambda>\<close> (bold lambda) used for dependent functions clashes with the proof term syntax (cf. \<section>2.5.2 of the Isabelle/Isar Implementation)."
@@ -90,17 +93,19 @@ abbreviation Pair :: "[Term, Term] \<Rightarrow> Term" (infixr "\<times>" 50)
where "A\<times>B \<equiv> \<Sum>_:A. B"
axiomatization
- pair :: "[Term, Term] \<Rightarrow> Term" ("(1'(_,/ _'))") and
+ pair :: "[Term, Term] \<Rightarrow> Term" ("(1'(_,/ _'))") and
indSum :: "[Term \<Rightarrow> Term, Term \<Rightarrow> Term, Term] \<Rightarrow> Term"
where
Sum_form: "\<And>(A::Term) (B::Term \<Rightarrow> Term). \<lbrakk>A : U; B: A \<rightarrow> U\<rbrakk> \<Longrightarrow> \<Sum>x:A. B(x) : U" and
- Sum_intro: "\<And>(A::Term) (B::Term \<Rightarrow> Term) (a::Term) (b::Term). \<lbrakk>A : U; B: A \<rightarrow> U; a : A; b : B(a)\<rbrakk> \<Longrightarrow> (a, b) : \<Sum>x:A. B(x)" and
- Sum_elim: "\<And>(A::Term) (B::Term \<Rightarrow> Term) (C::Term \<Rightarrow> Term) (f::Term \<Rightarrow> Term) (p::Term). \<lbrakk>A : U; B: A \<rightarrow> U; C: \<Sum>x:A. B(x) \<rightarrow> U; \<And>x y::Term. \<lbrakk>x : A; y : B(x)\<rbrakk> \<Longrightarrow> f((x,y)) : C((x,y)); p : \<Sum>x:A. B(x)\<rbrakk> \<Longrightarrow> (indSum C f p) : C(p)"
- Sum_comp: ""
+ Sum_intro [intro]: "\<And>(A::Term) (B::Term \<Rightarrow> Term) (a::Term) (b::Term). \<lbrakk>A : U; B: A \<rightarrow> U; a : A; b : B(a)\<rbrakk> \<Longrightarrow> (a, b) : \<Sum>x:A. B(x)" and
+ Sum_elim [elim]: "\<And>(A::Term) (B::Term \<Rightarrow> Term) (C::Term \<Rightarrow> Term) (f::Term \<Rightarrow> Term) (p::Term). \<lbrakk>A : U; B: A \<rightarrow> U; C: \<Sum>x:A. B(x) \<rightarrow> U; \<And>x y::Term. \<lbrakk>x : A; y : B(x)\<rbrakk> \<Longrightarrow> f((x,y)) : C((x,y)); p : \<Sum>x:A. B(x)\<rbrakk> \<Longrightarrow> (indSum C f p) : C(p)" and
+ Sum_comp [simp]: "\<And>(A::Term) (B::Term \<Rightarrow> Term) (C::Term \<Rightarrow> Term) (f::Term \<Rightarrow> Term) (a::Term) (b::Term). \<lbrakk>A : U; B: A \<rightarrow> U; C: \<Sum>x:A. B(x) \<rightarrow> U; \<And>x y::Term. \<lbrakk>x : A; y : B(x)\<rbrakk> \<Longrightarrow> f((x,y)) : C((x,y)); a : A; b : B(a)\<rbrakk> \<Longrightarrow> (indSum C f (a,b)) \<equiv> f((a,b))"
-text "A choice had to be made for the elimination rule: we formalize the function \<open>f\<close> taking \<open>a : A\<close> and \<open>b : B(x)\<close> and returning \<open>C((a,b))\<close> as a meta-lambda \<open>f::Term \<Rightarrow> Term\<close> instead of an object dependent function \<open>f : \<Prod>x:A. B(x)\<close>.
+text "A choice had to be made for the elimination rule: we formalize the function \<open>f\<close> taking \<open>a : A\<close> and \<open>b : B(x)\<close> and returning \<open>C((a,b))\<close> as a meta level \<open>f::Term \<Rightarrow> Term\<close> instead of an object logic dependent function \<open>f : \<Prod>x:A. B(x)\<close>.
However we should be able to later show the equivalence of the formalizations."
+
+
\<comment> \<open>Projection onto first component\<close>
(*
definition proj1 :: "Term \<Rightarrow> Term \<Rightarrow> Term" ("(proj1\<langle>_,_\<rangle>)") where
diff --git a/HoTT_Theorems.thy b/HoTT_Theorems.thy
index 33b0957..d83a08c 100644
--- a/HoTT_Theorems.thy
+++ b/HoTT_Theorems.thy
@@ -6,23 +6,25 @@ text "A bunch of theorems and other statements for sanity-checking, as well as t
Things that *should* be automated:
\<bullet> Checking that \<open>A\<close> is a well-formed type, when writing things like \<open>x : A\<close> and \<open>A : U\<close>.
-"
+ \<bullet> Checking that the argument to a (dependent/non-dependent) function matches the type? Also the arguments to a pair?"
\<comment> \<open>Turn on trace for unification and the simplifier, for debugging.\<close>
-declare[[unify_trace_simp, unify_trace_types, simp_trace]]
+declare[[unify_trace_simp, unify_trace_types, simp_trace, simp_trace_depth_limit=2]]
section \<open>Functions\<close>
+subsection \<open>Typing functions\<close>
+
text "Declaring \<open>Prod_intro\<close> with the \<open>intro\<close> attribute (in HoTT.thy) enables \<open>standard\<close> to prove the following."
-lemma id_function: "A : U \<Longrightarrow> \<^bold>\<lambda>x. x : A\<rightarrow>A" ..
+lemma id_function: "A : U \<Longrightarrow> \<^bold>\<lambda>x:A. x : A\<rightarrow>A" ..
text "Here is the same result, stated and proved differently.
The standard method invoked after the keyword \<open>proof\<close> is applied to the goal \<open>\<^bold>\<lambda>x. x: A\<rightarrow>A\<close>, and so we need to show the prover how to continue, as opposed to the previous lemma."
lemma
assumes "A : U"
- shows "\<^bold>\<lambda>x. x : A\<rightarrow>A"
+ shows "\<^bold>\<lambda>x:A. x : A\<rightarrow>A"
proof
show "A : U" using assms .
show "\<lambda>x. A : A \<rightarrow> U" using assms ..
@@ -31,29 +33,29 @@ qed
text "Note that there is no provision for declaring the type of bound variables outside of the scope of a lambda expression.
More generally, we cannot write an assumption stating 'Let \<open>x\<close> be a variable of type \<open>A\<close>'."
-proposition "\<lbrakk>A : U; A \<equiv> B\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>x. x : B\<rightarrow>A"
+proposition "\<lbrakk>A : U; A \<equiv> B\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>x:A. x : B\<rightarrow>A"
proof -
assume
1: "A : U" and
2: "A \<equiv> B"
- from id_function[OF 1] have 3: "\<^bold>\<lambda>x. x : A\<rightarrow>A" .
+ from id_function[OF 1] have 3: "\<^bold>\<lambda>x:A. x : A\<rightarrow>A" .
from 2 have "A\<rightarrow>A \<equiv> B\<rightarrow>A" by simp
- with 3 show "\<^bold>\<lambda>x. x : B\<rightarrow>A" ..
+ with 3 show "\<^bold>\<lambda>x:A. x : B\<rightarrow>A" ..
qed
text "It is instructive to try to prove \<open>\<lbrakk>A : U; B : U\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>x. \<^bold>\<lambda>y. x : A\<rightarrow>B\<rightarrow>A\<close>.
First we prove an intermediate step."
-lemma constant_function: "\<lbrakk>A : U; B : U; x : A\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>y. x : B\<rightarrow>A" ..
+lemma constant_function: "\<lbrakk>A : U; B : U; x : A\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>y:B. x : B\<rightarrow>A" ..
text "And now the actual result:"
proposition
assumes 1: "A : U" and 2: "B : U"
- shows "\<^bold>\<lambda>x. \<^bold>\<lambda>y. x : A\<rightarrow>B\<rightarrow>A"
+ shows "\<^bold>\<lambda>x:A. \<^bold>\<lambda>y:B. x : A\<rightarrow>B\<rightarrow>A"
proof
show "A : U" using assms(1) .
- show "\<And>x. x : A \<Longrightarrow> \<^bold>\<lambda>y. x : B \<rightarrow> A" using assms by (rule constant_function)
+ show "\<And>x. x : A \<Longrightarrow> \<^bold>\<lambda>y:B. x : B \<rightarrow> A" using assms by (rule constant_function)
from assms have "B \<rightarrow> A : U" by (rule Prod_formation)
then show "\<lambda>x. B \<rightarrow> A: A \<rightarrow> U" using assms(1) by (rule constant_type_family)
@@ -61,13 +63,54 @@ qed
text "Maybe a nicer way to write it:"
-proposition "\<lbrakk>A : U; B: U\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>x. \<^bold>\<lambda>y. x : A\<rightarrow>B\<rightarrow>A"
+proposition alternating_function: "\<lbrakk>A : U; B: U\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>x:A. \<^bold>\<lambda>y:B. x : A\<rightarrow>B\<rightarrow>A"
proof
fix x
- show "\<lbrakk>A : U; B : U; x : A\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>y. x : B \<rightarrow> A" by (rule constant_function)
+ show "\<lbrakk>A : U; B : U; x : A\<rbrakk> \<Longrightarrow> \<^bold>\<lambda>y:B. x : B \<rightarrow> A" by (rule constant_function)
show "\<lbrakk>A : U; B : U\<rbrakk> \<Longrightarrow> B\<rightarrow>A : U" by (rule Prod_formation)
qed
+subsection \<open>Function application\<close>
+
+lemma "\<lbrakk>A : U; a : A\<rbrakk> \<Longrightarrow> (\<^bold>\<lambda>x:A. x)`a \<equiv> a" by simp
+
+lemma
+ assumes
+ "A:U" and
+ "B:U" and
+ "a:A" and
+ "b:B"
+ shows "(\<^bold>\<lambda>x:A. \<^bold>\<lambda>y:B. x)`a`b \<equiv> a"
+proof -
+ have "(\<^bold>\<lambda>x:A. \<^bold>\<lambda>y:B. x)`a \<equiv> \<^bold>\<lambda>y:B. a"
+ proof (rule Prod_comp[of A "\<lambda>_. B\<rightarrow>A"])
+ have "B \<rightarrow> A : U" using constant_type_family[OF assms(1) assms(2)] assms(2) by (rule Prod_formation)
+ then show "\<lambda>x. B \<rightarrow> A: A \<rightarrow> U" using assms(1) by (rule constant_type_family[of "B\<rightarrow>A"])
+
+ show "\<And>x. x : A \<Longrightarrow> \<^bold>\<lambda>y:B. x : B \<rightarrow> A" using assms(2) assms(1) ..
+ show "A:U" using assms(1) .
+ show "a:A" using assms(3) .
+ qed (* Why do I need to do the above for the last two goals? Can't Isabelle do it automatically? *)
+
+ then have "(\<^bold>\<lambda>x:A. \<^bold>\<lambda>y:B. x)`a`b \<equiv> (\<^bold>\<lambda>y:B. a)`b" by simp
+
+ also have "(\<^bold>\<lambda>y:B. a)`b \<equiv> a"
+ proof (rule Prod_comp[of B "\<lambda>_. A"])
+ show "\<lambda>y. A: B \<rightarrow> U" using assms(1) assms(2) by (rule constant_type_family)
+ show "\<And>y. y : B \<Longrightarrow> a : A" using assms(3) .
+ show "B:U" using assms(2) .
+ show "b:B" using assms(4) .
+ qed
+
+ finally show "(\<^bold>\<lambda>x:A. \<^bold>\<lambda>y:B. x)`a`b \<equiv> a" .
+qed
+
+text "Polymorphic identity function."
+
+consts Ui::Term
+definition Id where "Id \<equiv> \<^bold>\<lambda>A:Ui. \<^bold>\<lambda>x:A. x"
+(* Have to think about universes... *)
+
section \<open>Nats\<close>
text "Here's a dumb proof that 2 is a natural number."