aboutsummaryrefslogtreecommitdiff
path: root/spartan/theories
diff options
context:
space:
mode:
Diffstat (limited to 'spartan/theories')
-rw-r--r--spartan/theories/Equivalence.thy431
-rw-r--r--spartan/theories/Identity.thy433
-rw-r--r--spartan/theories/Spartan.thy463
3 files changed, 1327 insertions, 0 deletions
diff --git a/spartan/theories/Equivalence.thy b/spartan/theories/Equivalence.thy
new file mode 100644
index 0000000..44b77dd
--- /dev/null
+++ b/spartan/theories/Equivalence.thy
@@ -0,0 +1,431 @@
+theory Equivalence
+imports Identity
+
+begin
+
+section \<open>Homotopy\<close>
+
+definition "homotopy A B f g \<equiv> \<Prod>x: A. f `x =\<^bsub>B x\<^esub> g `x"
+
+definition homotopy_i (infix "~" 100)
+ where [implicit]: "f ~ g \<equiv> homotopy ? ? f g"
+
+translations "f ~ g" \<leftharpoondown> "CONST homotopy A B f g"
+
+Lemma homotopy_type [typechk]:
+ assumes
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> B x: U i"
+ "f: \<Prod>x: A. B x" "g: \<Prod>x: A. B x"
+ shows "f ~ g: U i"
+ unfolding homotopy_def by typechk
+
+Lemma (derive) homotopy_refl:
+ assumes
+ "A: U i"
+ "f: \<Prod>x: A. B x"
+ shows "f ~ f"
+ unfolding homotopy_def by intros
+
+Lemma (derive) hsym:
+ assumes
+ "f: \<Prod>x: A. B x"
+ "g: \<Prod>x: A. B x"
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> B x: U i"
+ shows "H: f ~ g \<Longrightarrow> g ~ f"
+ unfolding homotopy_def
+ apply intros
+ apply (rule pathinv)
+ \<guillemotright> by (elim H)
+ \<guillemotright> by typechk
+ done
+
+lemmas homotopy_symmetric = hsym[rotated 4]
+
+text \<open>\<open>hsym\<close> attribute for homotopies:\<close>
+
+ML \<open>
+structure HSym_Attr = Sym_Attr (
+ struct
+ val name = "hsym"
+ val symmetry_rule = @{thm homotopy_symmetric}
+ end
+)
+\<close>
+
+setup \<open>HSym_Attr.setup\<close>
+
+Lemma (derive) htrans:
+ assumes
+ "f: \<Prod>x: A. B x"
+ "g: \<Prod>x: A. B x"
+ "h: \<Prod>x: A. B x"
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> B x: U i"
+ shows "\<lbrakk>H1: f ~ g; H2: g ~ h\<rbrakk> \<Longrightarrow> f ~ h"
+ unfolding homotopy_def
+ apply intro
+ \<guillemotright> vars x
+ apply (rule pathcomp[where ?y="g x"])
+ \<^item> by (elim H1)
+ \<^item> by (elim H2)
+ done
+ \<guillemotright> by typechk
+ done
+
+lemmas homotopy_transitive = htrans[rotated 5]
+
+Lemma (derive) commute_homotopy:
+ assumes
+ "A: U i" "B: U i"
+ "x: A" "y: A"
+ "p: x =\<^bsub>A\<^esub> y"
+ "f: A \<rightarrow> B" "g: A \<rightarrow> B"
+ "H: homotopy A (\<lambda>_. B) f g"
+ shows "(H x) \<bullet> g[p] = f[p] \<bullet> (H y)"
+ \<comment> \<open>Need this assumption unfolded for typechecking:\<close>
+ supply assms(8)[unfolded homotopy_def]
+ apply (equality \<open>p:_\<close>)
+ focus vars x
+ apply reduce
+ \<comment> \<open>Here it would really be nice to have automation for transport and
+ propositional equality.\<close>
+ apply (rule use_transport[where ?y="H x \<bullet> refl (g x)"])
+ \<guillemotright> by (rule pathcomp_right_refl)
+ \<guillemotright> by (rule pathinv) (rule pathcomp_left_refl)
+ \<guillemotright> by typechk
+ done
+ done
+
+Corollary (derive) commute_homotopy':
+ assumes
+ "A: U i"
+ "x: A"
+ "f: A \<rightarrow> A"
+ "H: homotopy A (\<lambda>_. A) f (id A)"
+ shows "H (f x) = f [H x]"
+oops
+
+Lemma homotopy_id_left [typechk]:
+ assumes "A: U i" "B: U i" "f: A \<rightarrow> B"
+ shows "homotopy_refl A f: (id B) \<circ> f ~ f"
+ unfolding homotopy_refl_def homotopy_def by reduce
+
+Lemma homotopy_id_right [typechk]:
+ assumes "A: U i" "B: U i" "f: A \<rightarrow> B"
+ shows "homotopy_refl A f: f \<circ> (id A) ~ f"
+ unfolding homotopy_refl_def homotopy_def by reduce
+
+Lemma homotopy_funcomp_left:
+ assumes
+ "H: homotopy B C g g'"
+ "f: A \<rightarrow> B"
+ "g: \<Prod>x: B. C x"
+ "g': \<Prod>x: B. C x"
+ "A: U i" "B: U i"
+ "\<And>x. x: B \<Longrightarrow> C x: U i"
+ shows "homotopy A {} (g \<circ>\<^bsub>A\<^esub> f) (g' \<circ>\<^bsub>A\<^esub> f)"
+ unfolding homotopy_def
+ apply (intro; reduce)
+ apply (insert \<open>H: _\<close>[unfolded homotopy_def])
+ apply (elim H)
+ done
+
+Lemma homotopy_funcomp_right:
+ assumes
+ "H: homotopy A (\<lambda>_. B) f f'"
+ "f: A \<rightarrow> B"
+ "f': A \<rightarrow> B"
+ "g: B \<rightarrow> C"
+ "A: U i" "B: U i" "C: U i"
+ shows "homotopy A {} (g \<circ>\<^bsub>A\<^esub> f) (g \<circ>\<^bsub>A\<^esub> f')"
+ unfolding homotopy_def
+ apply (intro; reduce)
+ apply (insert \<open>H: _\<close>[unfolded homotopy_def])
+ apply (dest PiE, assumption)
+ apply (rule ap, assumption)
+ done
+
+
+section \<open>Quasi-inverse and bi-invertibility\<close>
+
+subsection \<open>Quasi-inverses\<close>
+
+definition "qinv A B f \<equiv> \<Sum>g: B \<rightarrow> A.
+ homotopy A (\<lambda>_. A) (g \<circ>\<^bsub>A\<^esub> f) (id A) \<times> homotopy B (\<lambda>_. B) (f \<circ>\<^bsub>B\<^esub> g) (id B)"
+
+lemma qinv_type [typechk]:
+ assumes "A: U i" "B: U i" "f: A \<rightarrow> B"
+ shows "qinv A B f: U i"
+ unfolding qinv_def by typechk
+
+definition qinv_i ("qinv")
+ where [implicit]: "qinv f \<equiv> Equivalence.qinv ? ? f"
+
+translations "qinv f" \<leftharpoondown> "CONST Equivalence.qinv A B f"
+
+Lemma (derive) id_qinv:
+ assumes "A: U i"
+ shows "qinv (id A)"
+ unfolding qinv_def
+ apply intro defer
+ apply intro defer
+ apply (rule homotopy_id_right)
+ apply (rule homotopy_id_left)
+ done
+
+Lemma (derive) quasiinv_qinv:
+ assumes "A: U i" "B: U i" "f: A \<rightarrow> B"
+ shows "prf: qinv f \<Longrightarrow> qinv (fst prf)"
+ unfolding qinv_def
+ apply intro
+ \<guillemotright> by (rule \<open>f:_\<close>)
+ \<guillemotright> apply (elim "prf")
+ focus vars g HH
+ apply intro
+ \<^item> by reduce (snd HH)
+ \<^item> by reduce (fst HH)
+ done
+ done
+ done
+
+Lemma (derive) funcomp_qinv:
+ assumes
+ "A: U i" "B: U i" "C: U i"
+ "f: A \<rightarrow> B" "g: B \<rightarrow> C"
+ shows "qinv f \<rightarrow> qinv g \<rightarrow> qinv (g \<circ> f)"
+ apply (intros, unfold qinv_def, elims)
+ focus
+ premises hyps
+ vars _ _ finv _ ginv _ HfA HfB HgB HgC
+
+ apply intro
+ apply (rule funcompI[where ?f=ginv and ?g=finv])
+
+ text \<open>Now a whole bunch of rewrites and we're done.\<close>
+oops
+
+subsection \<open>Bi-invertible maps\<close>
+
+definition "biinv A B f \<equiv>
+ (\<Sum>g: B \<rightarrow> A. homotopy A (\<lambda>_. A) (g \<circ>\<^bsub>A\<^esub> f) (id A))
+ \<times> (\<Sum>g: B \<rightarrow> A. homotopy B (\<lambda>_. B) (f \<circ>\<^bsub>B\<^esub> g) (id B))"
+
+lemma biinv_type [typechk]:
+ assumes "A: U i" "B: U i" "f: A \<rightarrow> B"
+ shows "biinv A B f: U i"
+ unfolding biinv_def by typechk
+
+definition biinv_i ("biinv")
+ where [implicit]: "biinv f \<equiv> Equivalence.biinv ? ? f"
+
+translations "biinv f" \<leftharpoondown> "CONST Equivalence.biinv A B f"
+
+Lemma (derive) qinv_imp_biinv:
+ assumes
+ "A: U i" "B: U i"
+ "f: A \<rightarrow> B"
+ shows "?prf: qinv f \<rightarrow> biinv f"
+ apply intros
+ unfolding qinv_def biinv_def
+ by (rule Sig_dist_exp)
+
+text \<open>
+ Show that bi-invertible maps are quasi-inverses, as a demonstration of how to
+ work in this system.
+\<close>
+
+Lemma (derive) biinv_imp_qinv:
+ assumes "A: U i" "B: U i" "f: A \<rightarrow> B"
+ shows "biinv f \<rightarrow> qinv f"
+
+ text \<open>Split the hypothesis \<^term>\<open>biinv f\<close> into its components:\<close>
+ apply intro
+ unfolding biinv_def
+ apply elims
+
+ text \<open>Name the components:\<close>
+ focus premises vars _ _ _ g H1 h H2
+ thm \<open>g:_\<close> \<open>h:_\<close> \<open>H1:_\<close> \<open>H2:_\<close>
+
+ text \<open>
+ \<^term>\<open>g\<close> is a quasi-inverse to \<^term>\<open>f\<close>, so the proof will be a triple
+ \<^term>\<open><g, <?H1, ?H2>>\<close>.
+ \<close>
+ unfolding qinv_def
+ apply intro
+ \<guillemotright> by (rule \<open>g: _\<close>)
+ \<guillemotright> apply intro
+ text \<open>The first part \<^prop>\<open>?H1: g \<circ> f ~ id A\<close> is given by \<^term>\<open>H1\<close>.\<close>
+ apply (rule \<open>H1: _\<close>)
+
+ text \<open>
+ It remains to prove \<^prop>\<open>?H2: f \<circ> g ~ id B\<close>. First show that \<open>g ~ h\<close>,
+ then the result follows from @{thm \<open>H2: f \<circ> h ~ id B\<close>}. Here a proof
+ block is used to calculate "forward".
+ \<close>
+ proof -
+ have "g \<circ> (id B) ~ g \<circ> f \<circ> h"
+ by (rule homotopy_funcomp_right) (rule \<open>H2:_\<close>[hsym])
+
+ moreover have "g \<circ> f \<circ> h ~ (id A) \<circ> h"
+ by (subst funcomp_assoc[symmetric])
+ (rule homotopy_funcomp_left, rule \<open>H1:_\<close>)
+
+ ultimately have "g ~ h"
+ apply (rewrite to "g \<circ> (id B)" id_right[symmetric])
+ apply (rewrite to "(id A) \<circ> h" id_left[symmetric])
+ by (rule homotopy_transitive) (assumption, typechk)
+
+ then have "f \<circ> g ~ f \<circ> h"
+ by (rule homotopy_funcomp_right)
+
+ with \<open>H2:_\<close>
+ show "f \<circ> g ~ id B"
+ by (rule homotopy_transitive) (assumption+, typechk)
+ qed
+ done
+ done
+
+Lemma (derive) id_biinv:
+ "A: U i \<Longrightarrow> biinv (id A)"
+ by (rule qinv_imp_biinv) (rule id_qinv)
+
+Lemma (derive) funcomp_biinv:
+ assumes
+ "A: U i" "B: U i" "C: U i"
+ "f: A \<rightarrow> B" "g: B \<rightarrow> C"
+ shows "biinv f \<rightarrow> biinv g \<rightarrow> biinv (g \<circ> f)"
+ apply intros
+ focus vars biinv\<^sub>f biinv\<^sub>g
+
+ text \<open>Follows from \<open>funcomp_qinv\<close>.\<close>
+oops
+
+
+section \<open>Equivalence\<close>
+
+text \<open>
+ Following the HoTT book, we first define equivalence in terms of
+ bi-invertibility.
+\<close>
+
+definition equivalence (infix "\<simeq>" 110)
+ where "A \<simeq> B \<equiv> \<Sum>f: A \<rightarrow> B. Equivalence.biinv A B f"
+
+lemma equivalence_type [typechk]:
+ assumes "A: U i" "B: U i"
+ shows "A \<simeq> B: U i"
+ unfolding equivalence_def by typechk
+
+Lemma (derive) equivalence_refl:
+ assumes "A: U i"
+ shows "A \<simeq> A"
+ unfolding equivalence_def
+ apply intro defer
+ apply (rule qinv_imp_biinv) defer
+ apply (rule id_qinv)
+ done
+
+text \<open>
+ The following could perhaps be easier with transport (but then I think we need
+ univalence?)...
+\<close>
+
+Lemma (derive) equivalence_symmetric:
+ assumes "A: U i" "B: U i"
+ shows "A \<simeq> B \<rightarrow> B \<simeq> A"
+ apply intros
+ unfolding equivalence_def
+ apply elim
+ \<guillemotright> vars _ f "prf"
+ apply (dest (4) biinv_imp_qinv)
+ apply intro
+ \<^item> unfolding qinv_def by (rule fst[of "(biinv_imp_qinv A B f) prf"])
+ \<^item> by (rule qinv_imp_biinv) (rule quasiinv_qinv)
+ done
+ done
+
+Lemma (derive) equivalence_transitive:
+ assumes "A: U i" "B: U i" "C: U i"
+ shows "A \<simeq> B \<rightarrow> B \<simeq> C \<rightarrow> A \<simeq> C"
+ apply intros
+ unfolding equivalence_def
+
+ text \<open>Use \<open>funcomp_biinv\<close>.\<close>
+oops
+
+text \<open>
+ Equal types are equivalent. We give two proofs: the first by induction, and
+ the second by following the HoTT book and showing that transport is an
+ equivalence.
+\<close>
+
+Lemma
+ assumes
+ "A: U i" "B: U i" "p: A =\<^bsub>U i\<^esub> B"
+ shows "A \<simeq> B"
+ by (equality \<open>p:_\<close>) (rule equivalence_refl)
+
+text \<open>
+ The following proof is wordy because (1) the typechecker doesn't rewrite, and
+ (2) we don't yet have universe automation.
+\<close>
+
+Lemma (derive) id_imp_equiv:
+ assumes
+ "A: U i" "B: U i" "p: A =\<^bsub>U i\<^esub> B"
+ shows "<trans (id (U i)) p, ?isequiv>: A \<simeq> B"
+ unfolding equivalence_def
+ apply intros defer
+
+ \<comment> \<open>Switch off auto-typechecking, which messes with universe levels\<close>
+ supply [[auto_typechk=false]]
+
+ \<guillemotright> apply (equality \<open>p:_\<close>)
+ \<^item> premises vars A B
+ apply (rewrite at A in "A \<rightarrow> B" id_comp[symmetric])
+ apply (rewrite at B in "_ \<rightarrow> B" id_comp[symmetric])
+ apply (rule transport, rule U_in_U)
+ apply (rule lift_universe_codomain, rule U_in_U)
+ apply (typechk, rule U_in_U)
+ done
+ \<^item> premises vars A
+ apply (subst transport_comp)
+ \<^enum> by (rule U_in_U)
+ \<^enum> by reduce (rule lift_universe)
+ \<^enum> by reduce (rule id_biinv)
+ done
+ done
+
+ \<guillemotright> \<comment> \<open>Similar proof as in the first subgoal above\<close>
+ apply (rewrite at A in "A \<rightarrow> B" id_comp[symmetric])
+ apply (rewrite at B in "_ \<rightarrow> B" id_comp[symmetric])
+ apply (rule transport, rule U_in_U)
+ apply (rule lift_universe_codomain, rule U_in_U)
+ apply (typechk, rule U_in_U)
+ done
+ done
+
+(*Uncomment this to see all implicits from here on.
+no_translations
+ "f x" \<leftharpoondown> "f `x"
+ "x = y" \<leftharpoondown> "x =\<^bsub>A\<^esub> y"
+ "g \<circ> f" \<leftharpoondown> "g \<circ>\<^bsub>A\<^esub> f"
+ "p\<inverse>" \<leftharpoondown> "CONST pathinv A x y p"
+ "p \<bullet> q" \<leftharpoondown> "CONST pathcomp A x y z p q"
+ "fst" \<leftharpoondown> "CONST Spartan.fst A B"
+ "snd" \<leftharpoondown> "CONST Spartan.snd A B"
+ "f[p]" \<leftharpoondown> "CONST ap A B x y f p"
+ "trans P p" \<leftharpoondown> "CONST transport A P x y p"
+ "trans_const B p" \<leftharpoondown> "CONST transport_const A B x y p"
+ "lift P p u" \<leftharpoondown> "CONST pathlift A P x y p u"
+ "apd f p" \<leftharpoondown> "CONST Identity.apd A P f x y p"
+ "f ~ g" \<leftharpoondown> "CONST homotopy A B f g"
+ "qinv f" \<leftharpoondown> "CONST Equivalence.qinv A B f"
+ "biinv f" \<leftharpoondown> "CONST Equivalence.biinv A B f"
+*)
+
+
+end
diff --git a/spartan/theories/Identity.thy b/spartan/theories/Identity.thy
new file mode 100644
index 0000000..0edf20e
--- /dev/null
+++ b/spartan/theories/Identity.thy
@@ -0,0 +1,433 @@
+chapter \<open>The identity type\<close>
+
+text \<open>
+ The identity type, the higher groupoid structure of types,
+ and type families as fibrations.
+\<close>
+
+theory Identity
+imports Spartan
+
+begin
+
+axiomatization
+ Id :: \<open>o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> and
+ refl :: \<open>o \<Rightarrow> o\<close> and
+ IdInd :: \<open>o \<Rightarrow> (o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o) \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close>
+
+syntax "_Id" :: \<open>o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> ("(2_ =\<^bsub>_\<^esub>/ _)" [111, 0, 111] 110)
+
+translations "a =\<^bsub>A\<^esub> b" \<rightleftharpoons> "CONST Id A a b"
+
+axiomatization where
+ IdF: "\<lbrakk>A: U i; a: A; b: A\<rbrakk> \<Longrightarrow> a =\<^bsub>A\<^esub> b: U i" and
+
+ IdI: "a: A \<Longrightarrow> refl a: a =\<^bsub>A\<^esub> a" and
+
+ IdE: "\<lbrakk>
+ p: a =\<^bsub>A\<^esub> b;
+ a: A;
+ b: A;
+ \<And>x y p. \<lbrakk>p: x =\<^bsub>A\<^esub> y; x: A; y: A\<rbrakk> \<Longrightarrow> C x y p: U i;
+ \<And>x. x: A \<Longrightarrow> f x: C x x (refl x)
+ \<rbrakk> \<Longrightarrow> IdInd A (\<lambda>x y p. C x y p) f a b p: C a b p" and
+
+ Id_comp: "\<lbrakk>
+ a: A;
+ \<And>x y p. \<lbrakk>x: A; y: A; p: x =\<^bsub>A\<^esub> y\<rbrakk> \<Longrightarrow> C x y p: U i;
+ \<And>x. x: A \<Longrightarrow> f x: C x x (refl x)
+ \<rbrakk> \<Longrightarrow> IdInd A (\<lambda>x y p. C x y p) f a a (refl a) \<equiv> f a"
+
+lemmas
+ [intros] = IdF IdI and
+ [elims] = IdE and
+ [comps] = Id_comp
+
+
+section \<open>Induction\<close>
+
+ML_file \<open>../lib/equality.ML\<close>
+
+method_setup equality = \<open>Scan.lift Parse.thm >> (fn (fact, _) => fn ctxt =>
+ CONTEXT_METHOD (K (Equality.equality_context_tac fact ctxt)))\<close>
+
+
+section \<open>Symmetry and transitivity\<close>
+
+Lemma (derive) pathinv:
+ assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y"
+ shows "y =\<^bsub>A\<^esub> x"
+ by (equality \<open>p:_\<close>) intro
+
+lemma pathinv_comp [comps]:
+ assumes "x: A" "A: U i"
+ shows "pathinv A x x (refl x) \<equiv> refl x"
+ unfolding pathinv_def by reduce
+
+Lemma (derive) pathcomp:
+ assumes
+ "A: U i" "x: A" "y: A" "z: A"
+ "p: x =\<^bsub>A\<^esub> y" "q: y =\<^bsub>A\<^esub> z"
+ shows
+ "x =\<^bsub>A\<^esub> z"
+ apply (equality \<open>p:_\<close>)
+ focus premises vars x p
+ apply (equality \<open>p:_\<close>)
+ apply intro
+ done
+ done
+
+lemma pathcomp_comp [comps]:
+ assumes "a: A" "A: U i"
+ shows "pathcomp A a a a (refl a) (refl a) \<equiv> refl a"
+ unfolding pathcomp_def by reduce
+
+text \<open>Set up \<open>sym\<close> attribute for propositional equalities:\<close>
+
+ML \<open>
+structure Id_Sym_Attr = Sym_Attr (
+ struct
+ val name = "sym"
+ val symmetry_rule = @{thm pathinv[rotated 3]}
+ end
+)
+\<close>
+
+setup \<open>Id_Sym_Attr.setup\<close>
+
+
+section \<open>Notation\<close>
+
+definition Id_i (infix "=" 110)
+ where [implicit]: "Id_i x y \<equiv> x =\<^bsub>?\<^esub> y"
+
+definition pathinv_i ("_\<inverse>" [1000])
+ where [implicit]: "pathinv_i p \<equiv> pathinv ? ? ? p"
+
+definition pathcomp_i (infixl "\<bullet>" 121)
+ where [implicit]: "pathcomp_i p q \<equiv> pathcomp ? ? ? ? p q"
+
+translations
+ "x = y" \<leftharpoondown> "x =\<^bsub>A\<^esub> y"
+ "p\<inverse>" \<leftharpoondown> "CONST pathinv A x y p"
+ "p \<bullet> q" \<leftharpoondown> "CONST pathcomp A x y z p q"
+
+
+section \<open>Basic propositional equalities\<close>
+
+(*TODO: Better integration of equality type directly into reasoning...*)
+
+Lemma (derive) pathcomp_left_refl:
+ assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y"
+ shows "(refl x) \<bullet> p = p"
+ apply (equality \<open>p:_\<close>)
+ apply (reduce; intros)
+ done
+
+Lemma (derive) pathcomp_right_refl:
+ assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y"
+ shows "p \<bullet> (refl y) = p"
+ apply (equality \<open>p:_\<close>)
+ apply (reduce; intros)
+ done
+
+Lemma (derive) pathcomp_left_inv:
+ assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y"
+ shows "p\<inverse> \<bullet> p = refl y"
+ apply (equality \<open>p:_\<close>)
+ apply (reduce; intros)
+ done
+
+Lemma (derive) pathcomp_right_inv:
+ assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y"
+ shows "p \<bullet> p\<inverse> = refl x"
+ apply (equality \<open>p:_\<close>)
+ apply (reduce; intros)
+ done
+
+Lemma (derive) pathinv_pathinv:
+ assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y"
+ shows "p\<inverse>\<inverse> = p"
+ apply (equality \<open>p:_\<close>)
+ apply (reduce; intros)
+ done
+
+Lemma (derive) pathcomp_assoc:
+ assumes
+ "A: U i" "x: A" "y: A" "z: A" "w: A"
+ "p: x =\<^bsub>A\<^esub> y" "q: y =\<^bsub>A\<^esub> z" "r: z =\<^bsub>A\<^esub> w"
+ shows "p \<bullet> (q \<bullet> r) = p \<bullet> q \<bullet> r"
+ apply (equality \<open>p:_\<close>)
+ focus premises vars x p
+ apply (equality \<open>p:_\<close>)
+ focus premises vars x p
+ apply (equality \<open>p:_\<close>)
+ apply (reduce; intros)
+ done
+ done
+ done
+
+
+section \<open>Functoriality of functions\<close>
+
+Lemma (derive) ap:
+ assumes
+ "A: U i" "B: U i"
+ "x: A" "y: A"
+ "f: A \<rightarrow> B"
+ "p: x =\<^bsub>A\<^esub> y"
+ shows "f x = f y"
+ apply (equality \<open>p:_\<close>)
+ apply intro
+ done
+
+definition ap_i ("_[_]" [1000, 0])
+ where [implicit]: "ap_i f p \<equiv> ap ? ? ? ? f p"
+
+translations "f[p]" \<leftharpoondown> "CONST ap A B x y f p"
+
+Lemma ap_refl [comps]:
+ assumes "f: A \<rightarrow> B" "x: A" "A: U i" "B: U i"
+ shows "f[refl x] \<equiv> refl (f x)"
+ unfolding ap_def by reduce
+
+Lemma (derive) ap_pathcomp:
+ assumes
+ "A: U i" "B: U i"
+ "x: A" "y: A" "z: A"
+ "f: A \<rightarrow> B"
+ "p: x =\<^bsub>A\<^esub> y" "q: y =\<^bsub>A\<^esub> z"
+ shows
+ "f[p \<bullet> q] = f[p] \<bullet> f[q]"
+ apply (equality \<open>p:_\<close>)
+ focus premises vars x p
+ apply (equality \<open>p:_\<close>)
+ apply (reduce; intro)
+ done
+ done
+
+Lemma (derive) ap_pathinv:
+ assumes
+ "A: U i" "B: U i"
+ "x: A" "y: A"
+ "f: A \<rightarrow> B"
+ "p: x =\<^bsub>A\<^esub> y"
+ shows "f[p\<inverse>] = f[p]\<inverse>"
+ by (equality \<open>p:_\<close>) (reduce; intro)
+
+text \<open>The next two proofs currently use some low-level rewriting.\<close>
+
+Lemma (derive) ap_funcomp:
+ assumes
+ "A: U i" "B: U i" "C: U i"
+ "x: A" "y: A"
+ "f: A \<rightarrow> B" "g: B \<rightarrow> C"
+ "p: x =\<^bsub>A\<^esub> y"
+ shows "(g \<circ> f)[p] = g[f[p]]"
+ apply (equality \<open>p:_\<close>)
+ apply (simp only: funcomp_apply_comp[symmetric])
+ apply (reduce; intro)
+ done
+
+Lemma (derive) ap_id:
+ assumes "A: U i" "x: A" "y: A" "p: x =\<^bsub>A\<^esub> y"
+ shows "(id A)[p] = p"
+ apply (equality \<open>p:_\<close>)
+ apply (rewrite at "\<hole> = _" id_comp[symmetric])
+ apply (rewrite at "_ = \<hole>" id_comp[symmetric])
+ apply (reduce; intros)
+ done
+
+
+section \<open>Transport\<close>
+
+Lemma (derive) transport:
+ assumes
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ "x: A" "y: A"
+ "p: x =\<^bsub>A\<^esub> y"
+ shows "P x \<rightarrow> P y"
+ by (equality \<open>p:_\<close>) intro
+
+definition transport_i ("trans")
+ where [implicit]: "trans P p \<equiv> transport ? P ? ? p"
+
+translations "trans P p" \<leftharpoondown> "CONST transport A P x y p"
+
+Lemma transport_comp [comps]:
+ assumes
+ "a: A"
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ shows "trans P (refl a) \<equiv> id (P a)"
+ unfolding transport_def id_def by reduce
+
+\<comment> \<open>TODO: Build transport automation!\<close>
+
+Lemma use_transport:
+ assumes
+ "p: y =\<^bsub>A\<^esub> x"
+ "u: P x"
+ "x: A" "y: A"
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ shows "trans P p\<inverse> u: P y"
+ by typechk
+
+Lemma (derive) transport_left_inv:
+ assumes
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ "x: A" "y: A"
+ "p: x =\<^bsub>A\<^esub> y"
+ shows "(trans P p\<inverse>) \<circ> (trans P p) = id (P x)"
+ by (equality \<open>p:_\<close>) (reduce; intro)
+
+Lemma (derive) transport_right_inv:
+ assumes
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ "x: A" "y: A"
+ "p: x =\<^bsub>A\<^esub> y"
+ shows "(trans P p) \<circ> (trans P p\<inverse>) = id (P y)"
+ by (equality \<open>p:_\<close>) (reduce; intros)
+
+Lemma (derive) transport_pathcomp:
+ assumes
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ "x: A" "y: A" "z: A"
+ "u: P x"
+ "p: x =\<^bsub>A\<^esub> y" "q: y =\<^bsub>A\<^esub> z"
+ shows "trans P q (trans P p u) = trans P (p \<bullet> q) u"
+ apply (equality \<open>p:_\<close>)
+ focus premises vars x p
+ apply (equality \<open>p:_\<close>)
+ apply (reduce; intros)
+ done
+ done
+
+Lemma (derive) transport_compose_typefam:
+ assumes
+ "A: U i" "B: U i"
+ "\<And>x. x: B \<Longrightarrow> P x: U i"
+ "f: A \<rightarrow> B"
+ "x: A" "y: A"
+ "p: x =\<^bsub>A\<^esub> y"
+ "u: P (f x)"
+ shows "trans (\<lambda>x. P (f x)) p u = trans P f[p] u"
+ by (equality \<open>p:_\<close>) (reduce; intros)
+
+Lemma (derive) transport_function_family:
+ assumes
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ "\<And>x. x: A \<Longrightarrow> Q x: U i"
+ "f: \<Prod>x: A. P x \<rightarrow> Q x"
+ "x: A" "y: A"
+ "u: P x"
+ "p: x =\<^bsub>A\<^esub> y"
+ shows "trans Q p ((f x) u) = (f y) (trans P p u)"
+ by (equality \<open>p:_\<close>) (reduce; intros)
+
+Lemma (derive) transport_const:
+ assumes
+ "A: U i" "B: U i"
+ "x: A" "y: A"
+ "p: x =\<^bsub>A\<^esub> y"
+ shows "\<Prod>b: B. trans (\<lambda>_. B) p b = b"
+ by (intro, equality \<open>p:_\<close>) (reduce; intro)
+
+definition transport_const_i ("trans'_const")
+ where [implicit]: "trans_const B p \<equiv> transport_const ? B ? ? p"
+
+translations "trans_const B p" \<leftharpoondown> "CONST transport_const A B x y p"
+
+Lemma transport_const_comp [comps]:
+ assumes
+ "x: A" "b: B"
+ "A: U i" "B: U i"
+ shows "trans_const B (refl x) b\<equiv> refl b"
+ unfolding transport_const_def by reduce
+
+Lemma (derive) pathlift:
+ assumes
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ "x: A" "y: A"
+ "p: x =\<^bsub>A\<^esub> y"
+ "u: P x"
+ shows "<x, u> = <y, trans P p u>"
+ by (equality \<open>p:_\<close>) (reduce; intros)
+
+definition pathlift_i ("lift")
+ where [implicit]: "lift P p u \<equiv> pathlift ? P ? ? p u"
+
+translations "lift P p u" \<leftharpoondown> "CONST pathlift A P x y p u"
+
+Lemma pathlift_comp [comps]:
+ assumes
+ "u: P x"
+ "x: A"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ "A: U i"
+ shows "lift P (refl x) u \<equiv> refl <x, u>"
+ unfolding pathlift_def by reduce
+
+Lemma (derive) pathlift_fst:
+ assumes
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ "x: A" "y: A"
+ "u: P x"
+ "p: x =\<^bsub>A\<^esub> y"
+ shows "fst[lift P p u] = p"
+ apply (equality \<open>p:_\<close>)
+ text \<open>Some rewriting needed here:\<close>
+ \<guillemotright> vars x y
+ (*Here an automatic reordering tactic would be helpful*)
+ apply (rewrite at x in "x = y" fst_comp[symmetric])
+ prefer 4
+ apply (rewrite at y in "_ = y" fst_comp[symmetric])
+ done
+ \<guillemotright> by reduce intro
+ done
+
+
+section \<open>Dependent paths\<close>
+
+Lemma (derive) apd:
+ assumes
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ "f: \<Prod>x: A. P x"
+ "x: A" "y: A"
+ "p: x =\<^bsub>A\<^esub> y"
+ shows "trans P p (f x) = f y"
+ by (equality \<open>p:_\<close>) (reduce; intros; typechk)
+
+definition apd_i ("apd")
+ where [implicit]: "apd f p \<equiv> Identity.apd ? ? f ? ? p"
+
+translations "apd f p" \<leftharpoondown> "CONST Identity.apd A P f x y p"
+
+Lemma dependent_map_comp [comps]:
+ assumes
+ "f: \<Prod>x: A. P x"
+ "x: A"
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> P x: U i"
+ shows "apd f (refl x) \<equiv> refl (f x)"
+ unfolding apd_def by reduce
+
+Lemma (derive) apd_ap:
+ assumes
+ "A: U i" "B: U i"
+ "f: A \<rightarrow> B"
+ "x: A" "y: A"
+ "p: x =\<^bsub>A\<^esub> y"
+ shows "apd f p = trans_const B p (f x) \<bullet> f[p]"
+ by (equality \<open>p:_\<close>) (reduce; intro)
+
+end
diff --git a/spartan/theories/Spartan.thy b/spartan/theories/Spartan.thy
new file mode 100644
index 0000000..fb901d5
--- /dev/null
+++ b/spartan/theories/Spartan.thy
@@ -0,0 +1,463 @@
+text \<open>Spartan type theory\<close>
+
+theory Spartan
+imports
+ Pure
+ "HOL-Eisbach.Eisbach"
+ "HOL-Eisbach.Eisbach_Tools"
+keywords
+ "Theorem" "Lemma" "Corollary" "Proposition" :: thy_goal_stmt and
+ "focus" "\<guillemotright>" "\<^item>" "\<^enum>" "~" :: prf_script_goal % "proof" and
+ "derive" "vars":: quasi_command and
+ "print_coercions" :: thy_decl
+
+begin
+
+
+section \<open>Preamble\<close>
+
+declare [[eta_contract=false]]
+
+
+section \<open>Metatype setup\<close>
+
+typedecl o
+
+
+section \<open>Judgments\<close>
+
+judgment has_type :: \<open>o \<Rightarrow> o \<Rightarrow> prop\<close> ("(2_:/ _)" 999)
+
+
+section \<open>Universes\<close>
+
+typedecl lvl \<comment> \<open>Universe levels\<close>
+
+axiomatization
+ O :: \<open>lvl\<close> and
+ S :: \<open>lvl \<Rightarrow> lvl\<close> and
+ lt :: \<open>lvl \<Rightarrow> lvl \<Rightarrow> prop\<close> (infix "<" 900)
+ where
+ O_min: "O < S i" and
+ lt_S: "i < S i" and
+ lt_trans: "i < j \<Longrightarrow> j < k \<Longrightarrow> i < k"
+
+axiomatization U :: \<open>lvl \<Rightarrow> o\<close> where
+ U_hierarchy: "i < j \<Longrightarrow> U i: U j" and
+ U_cumulative: "A: U i \<Longrightarrow> i < j \<Longrightarrow> A: U j"
+
+lemma U_in_U:
+ "U i: U (S i)"
+ by (rule U_hierarchy, rule lt_S)
+
+lemma lift_universe:
+ "A: U i \<Longrightarrow> A: U (S i)"
+ by (erule U_cumulative, rule lt_S)
+
+
+section \<open>\<Prod>-type\<close>
+
+axiomatization
+ Pi :: \<open>o \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> o\<close> and
+ lam :: \<open>o \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> o\<close> and
+ app :: \<open>o \<Rightarrow> o \<Rightarrow> o\<close> ("(1_ `_)" [120, 121] 120)
+
+syntax
+ "_Pi" :: \<open>idt \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> ("(2\<Prod>_: _./ _)" 30)
+ "_lam" :: \<open>idt \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> ("(2\<lambda>_: _./ _)" 30)
+translations
+ "\<Prod>x: A. B" \<rightleftharpoons> "CONST Pi A (\<lambda>x. B)"
+ "\<lambda>x: A. b" \<rightleftharpoons> "CONST lam A (\<lambda>x. b)"
+
+abbreviation Fn (infixr "\<rightarrow>" 40) where "A \<rightarrow> B \<equiv> \<Prod>_: A. B"
+
+axiomatization where
+ PiF: "\<lbrakk>\<And>x. x: A \<Longrightarrow> B x: U i; A: U i\<rbrakk> \<Longrightarrow> \<Prod>x: A. B x: U i" and
+
+ PiI: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x: B x; A: U i\<rbrakk> \<Longrightarrow> \<lambda>x: A. b x: \<Prod>x: A. B x" and
+
+ PiE: "\<lbrakk>f: \<Prod>x: A. B x; a: A\<rbrakk> \<Longrightarrow> f `a: B a" and
+
+ beta: "\<lbrakk>a: A; \<And>x. x: A \<Longrightarrow> b x: B x\<rbrakk> \<Longrightarrow> (\<lambda>x: A. b x) `a \<equiv> b a" and
+
+ eta: "f: \<Prod>x: A. B x \<Longrightarrow> \<lambda>x: A. f `x \<equiv> f" and
+
+ Pi_cong: "\<lbrakk>
+ A: U i;
+ \<And>x. x: A \<Longrightarrow> B x: U i;
+ \<And>x. x: A \<Longrightarrow> B' x: U i;
+ \<And>x. x: A \<Longrightarrow> B x \<equiv> B' x
+ \<rbrakk> \<Longrightarrow> \<Prod>x: A. B x \<equiv> \<Prod>x: A. B' x" and
+
+ lam_cong: "\<lbrakk>\<And>x. x: A \<Longrightarrow> b x \<equiv> c x; A: U i\<rbrakk> \<Longrightarrow> \<lambda>x: A. b x \<equiv> \<lambda>x: A. c x"
+
+
+section \<open>\<Sum>-type\<close>
+
+axiomatization
+ Sig :: \<open>o \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> o\<close> and
+ pair :: \<open>o \<Rightarrow> o \<Rightarrow> o\<close> ("(2<_,/ _>)") and
+ SigInd :: \<open>o \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> (o \<Rightarrow> o) \<Rightarrow> (o \<Rightarrow> o \<Rightarrow> o) \<Rightarrow> o \<Rightarrow> o\<close>
+
+syntax "_Sum" :: \<open>idt \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> ("(2\<Sum>_: _./ _)" 20)
+
+translations "\<Sum>x: A. B" \<rightleftharpoons> "CONST Sig A (\<lambda>x. B)"
+
+abbreviation Prod (infixl "\<times>" 50)
+ where "A \<times> B \<equiv> \<Sum>_: A. B"
+
+axiomatization where
+ SigF: "\<lbrakk>\<And>x. x: A \<Longrightarrow> B x: U i; A: U i\<rbrakk> \<Longrightarrow> \<Sum>x: A. B x: U i" and
+
+ SigI: "\<lbrakk>\<And>x. x: A \<Longrightarrow> B x: U i; a: A; b: B a\<rbrakk> \<Longrightarrow> <a, b>: \<Sum>x: A. B x" and
+
+ SigE: "\<lbrakk>
+ p: \<Sum>x: A. B x;
+ A: U i;
+ \<And>x. x : A \<Longrightarrow> B x: U i;
+ \<And>p. p: \<Sum>x: A. B x \<Longrightarrow> C p: U i;
+ \<And>x y. \<lbrakk>x: A; y: B x\<rbrakk> \<Longrightarrow> f x y: C <x, y>
+ \<rbrakk> \<Longrightarrow> SigInd A (\<lambda>x. B x) (\<lambda>p. C p) f p: C p" and
+
+ Sig_comp: "\<lbrakk>
+ a: A;
+ b: B a;
+ \<And>x. x: A \<Longrightarrow> B x: U i;
+ \<And>p. p: \<Sum>x: A. B x \<Longrightarrow> C p: U i;
+ \<And>x y. \<lbrakk>x: A; y: B x\<rbrakk> \<Longrightarrow> f x y: C <x, y>
+ \<rbrakk> \<Longrightarrow> SigInd A (\<lambda>x. B x) (\<lambda>p. C p) f <a, b> \<equiv> f a b" and
+
+ Sig_cong: "\<lbrakk>
+ \<And>x. x: A \<Longrightarrow> B x \<equiv> B' x;
+ A: U i;
+ \<And>x. x : A \<Longrightarrow> B x: U i;
+ \<And>x. x : A \<Longrightarrow> B' x: U i
+ \<rbrakk> \<Longrightarrow> \<Sum>x: A. B x \<equiv> \<Sum>x: A. B' x"
+
+
+
+
+
+section \<open>Proof commands\<close>
+
+named_theorems typechk
+
+ML_file \<open>../lib/lib.ML\<close>
+ML_file \<open>../lib/goals.ML\<close>
+ML_file \<open>../lib/focus.ML\<close>
+
+
+section \<open>Congruence automation\<close>
+
+ML_file \<open>../lib/congruence.ML\<close>
+
+
+section \<open>Methods\<close>
+
+ML_file \<open>../lib/elimination.ML\<close> \<comment> \<open>declares the [elims] attribute\<close>
+
+named_theorems intros and comps
+lemmas
+ [intros] = PiF PiI SigF SigI and
+ [elims] = PiE SigE and
+ [comps] = beta Sig_comp and
+ [cong] = Pi_cong lam_cong Sig_cong
+
+ML_file \<open>../lib/tactics.ML\<close>
+
+method_setup assumptions =
+ \<open>Scan.succeed (fn ctxt => SIMPLE_METHOD (
+ CHANGED (TRYALL (assumptions_tac ctxt))))\<close>
+
+method_setup known =
+ \<open>Scan.succeed (fn ctxt => SIMPLE_METHOD (
+ CHANGED (TRYALL (known_tac ctxt))))\<close>
+
+method_setup intro =
+ \<open>Scan.succeed (fn ctxt => SIMPLE_METHOD (HEADGOAL (intro_tac ctxt)))\<close>
+
+method_setup intros =
+ \<open>Scan.succeed (fn ctxt => SIMPLE_METHOD (HEADGOAL (intros_tac ctxt)))\<close>
+
+method_setup elim =
+ \<open>Scan.option Args.term >> (fn tm => fn ctxt =>
+ SIMPLE_METHOD' (SIDE_CONDS (elims_tac tm ctxt) ctxt))\<close>
+
+method elims = elim+
+
+method_setup typechk =
+ \<open>Scan.succeed (fn ctxt => SIMPLE_METHOD (
+ CHANGED (ALLGOALS (TRY o typechk_tac ctxt))))\<close>
+
+method_setup rule =
+ \<open>Attrib.thms >> (fn ths => fn ctxt =>
+ SIMPLE_METHOD (HEADGOAL (rule_tac ths ctxt)))\<close>
+
+method_setup dest =
+ \<open>Scan.lift (Scan.option (Args.parens Parse.int)) -- Attrib.thms
+ >> (fn (opt_n, ths) => fn ctxt =>
+ SIMPLE_METHOD (HEADGOAL (dest_tac opt_n ths ctxt)))\<close>
+
+subsection \<open>Rewriting\<close>
+
+\<comment> \<open>\<open>subst\<close> method\<close>
+ML_file "~~/src/Tools/misc_legacy.ML"
+ML_file "~~/src/Tools/IsaPlanner/isand.ML"
+ML_file "~~/src/Tools/IsaPlanner/rw_inst.ML"
+ML_file "~~/src/Tools/IsaPlanner/zipper.ML"
+ML_file "../lib/eqsubst.ML"
+
+\<comment> \<open>\<open>rewrite\<close> method\<close>
+consts rewrite_HOLE :: "'a::{}" ("\<hole>")
+
+lemma eta_expand:
+ fixes f :: "'a::{} \<Rightarrow> 'b::{}"
+ shows "f \<equiv> \<lambda>x. f x" .
+
+lemma rewr_imp:
+ assumes "PROP A \<equiv> PROP B"
+ shows "(PROP A \<Longrightarrow> PROP C) \<equiv> (PROP B \<Longrightarrow> PROP C)"
+ apply (rule Pure.equal_intr_rule)
+ apply (drule equal_elim_rule2[OF assms]; assumption)
+ apply (drule equal_elim_rule1[OF assms]; assumption)
+ done
+
+lemma imp_cong_eq:
+ "(PROP A \<Longrightarrow> (PROP B \<Longrightarrow> PROP C) \<equiv> (PROP B' \<Longrightarrow> PROP C')) \<equiv>
+ ((PROP B \<Longrightarrow> PROP A \<Longrightarrow> PROP C) \<equiv> (PROP B' \<Longrightarrow> PROP A \<Longrightarrow> PROP C'))"
+ apply (Pure.intro Pure.equal_intr_rule)
+ apply (drule (1) cut_rl; drule Pure.equal_elim_rule1 Pure.equal_elim_rule2; assumption)+
+ apply (drule Pure.equal_elim_rule1 Pure.equal_elim_rule2; assumption)+
+ done
+
+ML_file \<open>~~/src/HOL/Library/cconv.ML\<close>
+ML_file \<open>../lib/rewrite.ML\<close>
+
+\<comment> \<open>\<open>reduce\<close> method computes terms via judgmental equalities\<close>
+setup \<open>
+ map_theory_simpset (fn ctxt =>
+ ctxt addSolver (mk_solver "" typechk_tac))
+\<close>
+
+method reduce uses add = (simp add: comps add | subst comps, reduce add: add)+
+
+
+section \<open>Implicit notations\<close>
+
+text \<open>
+ \<open>?\<close> is used to mark implicit arguments in definitions, while \<open>{}\<close> is expanded
+ immediately for elaboration in statements.
+\<close>
+
+consts
+ iarg :: \<open>'a\<close> ("?")
+ hole :: \<open>'b\<close> ("{}")
+
+ML_file \<open>../lib/implicits.ML\<close>
+
+attribute_setup implicit = \<open>Scan.succeed Implicits.implicit_defs_attr\<close>
+
+ML \<open>
+val _ = Context.>>
+ (Syntax_Phases.term_check 1 "" (fn ctxt => map (Implicits.make_holes ctxt)))
+\<close>
+
+text \<open>Automatically insert inhabitation judgments where needed:\<close>
+
+consts inhabited :: \<open>o \<Rightarrow> prop\<close> ("(_)")
+translations "CONST inhabited A" \<rightharpoonup> "CONST has_type {} A"
+
+
+section \<open>Lambda coercion\<close>
+
+\<comment> \<open>Coerce object lambdas to meta-lambdas\<close>
+abbreviation (input) lambda :: \<open>o \<Rightarrow> o \<Rightarrow> o\<close>
+ where "lambda f \<equiv> \<lambda>x. f `x"
+
+ML_file \<open>~~/src/Tools/subtyping.ML\<close>
+declare [[coercion_enabled, coercion lambda]]
+
+translations "f x" \<leftharpoondown> "f `x"
+
+
+section \<open>Functions\<close>
+
+lemma eta_exp:
+ assumes "f: \<Prod>x: A. B x"
+ shows "f \<equiv> \<lambda>x: A. f x"
+ by (rule eta[symmetric])
+
+lemma lift_universe_codomain:
+ assumes "A: U i" "f: A \<rightarrow> U j"
+ shows "f: A \<rightarrow> U (S j)"
+ apply (sub eta_exp)
+ apply known
+ apply (Pure.rule intros; rule lift_universe)
+ done
+
+subsection \<open>Function composition\<close>
+
+definition "funcomp A g f \<equiv> \<lambda>x: A. g `(f `x)"
+
+syntax
+ "_funcomp" :: \<open>o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o\<close> ("(2_ \<circ>\<^bsub>_\<^esub>/ _)" [111, 0, 110] 110)
+translations
+ "g \<circ>\<^bsub>A\<^esub> f" \<rightleftharpoons> "CONST funcomp A g f"
+
+lemma funcompI [typechk]:
+ assumes
+ "A: U i"
+ "B: U i"
+ "\<And>x. x: B \<Longrightarrow> C x: U i"
+ "f: A \<rightarrow> B"
+ "g: \<Prod>x: B. C x"
+ shows
+ "g \<circ>\<^bsub>A\<^esub> f: \<Prod>x: A. C (f x)"
+ unfolding funcomp_def by typechk
+
+lemma funcomp_assoc [comps]:
+ assumes
+ "f: A \<rightarrow> B"
+ "g: B \<rightarrow> C"
+ "h: \<Prod>x: C. D x"
+ "A: U i"
+ shows
+ "(h \<circ>\<^bsub>B\<^esub> g) \<circ>\<^bsub>A\<^esub> f \<equiv> h \<circ>\<^bsub>A\<^esub> g \<circ>\<^bsub>A\<^esub> f"
+ unfolding funcomp_def by reduce
+
+lemma funcomp_lambda_comp [comps]:
+ assumes
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> b x: B"
+ "\<And>x. x: B \<Longrightarrow> c x: C x"
+ shows
+ "(\<lambda>x: B. c x) \<circ>\<^bsub>A\<^esub> (\<lambda>x: A. b x) \<equiv> \<lambda>x: A. c (b x)"
+ unfolding funcomp_def by reduce
+
+lemma funcomp_apply_comp [comps]:
+ assumes
+ "f: A \<rightarrow> B" "g: \<Prod>x: B. C x"
+ "x: A"
+ "A: U i" "B: U i"
+ "\<And>x y. x: B \<Longrightarrow> C x: U i"
+ shows "(g \<circ>\<^bsub>A\<^esub> f) x \<equiv> g (f x)"
+ unfolding funcomp_def by reduce
+
+text \<open>Notation:\<close>
+
+definition funcomp_i (infixr "\<circ>" 120)
+ where [implicit]: "funcomp_i g f \<equiv> g \<circ>\<^bsub>?\<^esub> f"
+
+translations "g \<circ> f" \<leftharpoondown> "g \<circ>\<^bsub>A\<^esub> f"
+
+subsection \<open>Identity function\<close>
+
+definition id where "id A \<equiv> \<lambda>x: A. x"
+
+lemma
+ idI [typechk]: "A: U i \<Longrightarrow> id A: A \<rightarrow> A" and
+ id_comp [comps]: "x: A \<Longrightarrow> (id A) x \<equiv> x"
+ unfolding id_def by reduce
+
+lemma id_left [comps]:
+ assumes "f: A \<rightarrow> B" "A: U i" "B: U i"
+ shows "(id B) \<circ>\<^bsub>A\<^esub> f \<equiv> f"
+ unfolding id_def
+ by (subst eta_exp[of f]) (reduce, rule eta)
+
+lemma id_right [comps]:
+ assumes "f: A \<rightarrow> B" "A: U i" "B: U i"
+ shows "f \<circ>\<^bsub>A\<^esub> (id A) \<equiv> f"
+ unfolding id_def
+ by (subst eta_exp[of f]) (reduce, rule eta)
+
+lemma id_U [typechk]:
+ "id (U i): U i \<rightarrow> U i"
+ by typechk (fact U_in_U)
+
+
+section \<open>Pairs\<close>
+
+definition "fst A B \<equiv> \<lambda>p: \<Sum>x: A. B x. SigInd A B (\<lambda>_. A) (\<lambda>x y. x) p"
+definition "snd A B \<equiv> \<lambda>p: \<Sum>x: A. B x. SigInd A B (\<lambda>p. B (fst A B p)) (\<lambda>x y. y) p"
+
+lemma fst_type [typechk]:
+ assumes "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i"
+ shows "fst A B: (\<Sum>x: A. B x) \<rightarrow> A"
+ unfolding fst_def by typechk
+
+lemma fst_comp [comps]:
+ assumes
+ "a: A"
+ "b: B a"
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> B x: U i"
+ shows "fst A B <a, b> \<equiv> a"
+ unfolding fst_def by reduce
+
+lemma snd_type [typechk]:
+ assumes "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i"
+ shows "snd A B: \<Prod>p: \<Sum>x: A. B x. B (fst A B p)"
+ unfolding snd_def by typechk reduce
+
+lemma snd_comp [comps]:
+ assumes "a: A" "b: B a" "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i"
+ shows "snd A B <a, b> \<equiv> b"
+ unfolding snd_def by reduce
+
+subsection \<open>Notation\<close>
+
+definition fst_i ("fst")
+ where [implicit]: "fst \<equiv> Spartan.fst ? ?"
+
+definition snd_i ("snd")
+ where [implicit]: "snd \<equiv> Spartan.snd ? ?"
+
+translations
+ "fst" \<leftharpoondown> "CONST Spartan.fst A B"
+ "snd" \<leftharpoondown> "CONST Spartan.snd A B"
+
+subsection \<open>Projections\<close>
+
+Lemma fst [typechk]:
+ assumes
+ "p: \<Sum>x: A. B x"
+ "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i"
+ shows "fst p: A"
+ by typechk
+
+Lemma snd [typechk]:
+ assumes
+ "p: \<Sum>x: A. B x"
+ "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i"
+ shows "snd p: B (fst p)"
+ by typechk
+
+method fst for p::o = rule fst[of p]
+method snd for p::o = rule snd[of p]
+
+subsection \<open>Properties of \<Sigma>\<close>
+
+Lemma (derive) Sig_dist_exp:
+ assumes
+ "p: \<Sum>x: A. B x \<times> C x"
+ "A: U i"
+ "\<And>x. x: A \<Longrightarrow> B x: U i"
+ "\<And>x. x: A \<Longrightarrow> C x: U i"
+ shows "(\<Sum>x: A. B x) \<times> (\<Sum>x: A. C x)"
+ apply (elim p)
+ focus vars x y
+ apply intro
+ \<guillemotright> apply intro
+ apply assumption
+ apply (fst y)
+ done
+ \<guillemotright> apply intro
+ apply assumption
+ apply (snd y)
+ done
+ done
+ done
+
+
+end