aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--hott/Equivalence.thy34
-rw-r--r--hott/Identity.thy47
-rw-r--r--hott/Nat.thy14
-rw-r--r--spartan/core/Spartan.thy31
-rw-r--r--spartan/core/context_facts.ML44
-rw-r--r--spartan/core/elaborated_statement.ML31
-rw-r--r--spartan/core/elimination.ML2
-rw-r--r--spartan/core/focus.ML3
-rw-r--r--spartan/core/goals.ML7
-rw-r--r--spartan/core/lib.ML16
-rw-r--r--spartan/core/tactics.ML14
-rw-r--r--spartan/core/typechecking.ML96
-rw-r--r--spartan/core/types.ML141
-rw-r--r--spartan/lib/List.thy2
-rw-r--r--spartan/lib/Maybe.thy6
-rw-r--r--spartan/lib/Prelude.thy3
16 files changed, 313 insertions, 178 deletions
diff --git a/hott/Equivalence.thy b/hott/Equivalence.thy
index 7d1f2b1..a57ed44 100644
--- a/hott/Equivalence.thy
+++ b/hott/Equivalence.thy
@@ -52,7 +52,7 @@ Lemma (def) hsym:
shows "g ~ f"
unfolding homotopy_def
proof intro
- fix x assume "x: A" then have "f x = g x"
+ fix x assuming "x: A" then have "f x = g x"
by (htpy H)
thus "g x = f x"
by (rule pathinv) fact
@@ -70,7 +70,7 @@ Lemma (def) htrans:
shows "f ~ h"
unfolding homotopy_def
proof intro
- fix x assume "x: A"
+ fix x assuming "x: A"
have *: "f x = g x" "g x = h x"
by (htpy H1, htpy H2)
show "f x = h x"
@@ -119,7 +119,7 @@ Lemma funcomp_right_htpy:
shows "(g \<circ> f) ~ (g \<circ> f')"
unfolding homotopy_def
proof (intro, reduce)
- fix x assume "x: A"
+ fix x assuming "x: A"
have *: "f x = f' x"
by (htpy H)
show "g (f x) = g (f' x)"
@@ -154,18 +154,18 @@ Corollary (def) commute_homotopy':
"H: f ~ (id A)"
shows "H (f x) = f [H x]"
proof -
- from \<open>H: f ~ id A\<close> have "H: \<Prod>x: A. f x = x"
+ from \<open>H: f ~ id A\<close> have [type]: "H: \<Prod>x: A. f x = x"
by (reduce add: homotopy_def)
- have "(id A)[H x]: f x = x"
+ have *: "(id A)[H x]: f x = x"
by (rewrite at "\<hole> = _" id_comp[symmetric],
rewrite at "_ = \<hole>" id_comp[symmetric])
have "H (f x) \<bullet> H x = H (f x) \<bullet> (id A)[H x]"
- by (rule left_whisker, transport eq: ap_id) (reduce+, refl)
+ by (rule left_whisker, fact *, transport eq: ap_id) (reduce+, refl)
also have [simplified id_comp]: "H (f x) \<bullet> (id A)[H x] = f[H x] \<bullet> H x"
by (rule commute_homotopy)
- finally have *: "{}" by this
+ finally have *: "{}" using * by this
show "H (f x) = f [H x]"
by pathcomp_cancelr (fact, typechk+)
@@ -179,7 +179,7 @@ subsection \<open>Quasi-inverses\<close>
definition "is_qinv A B f \<equiv> \<Sum>g: B \<rightarrow> A.
homotopy A (fn _. A) (g \<circ>\<^bsub>A\<^esub> f) (id A) \<times> homotopy B (fn _. B) (f \<circ>\<^bsub>B\<^esub> g) (id B)"
-lemma is_qinv_type [type]:
+Lemma is_qinv_type [type]:
assumes "A: U i" "B: U i" "f: A \<rightarrow> B"
shows "is_qinv A B f: U i"
unfolding is_qinv_def
@@ -266,7 +266,7 @@ definition "is_biinv A B f \<equiv>
(\<Sum>g: B \<rightarrow> A. homotopy A (fn _. A) (g \<circ>\<^bsub>A\<^esub> f) (id A))
\<times> (\<Sum>g: B \<rightarrow> A. homotopy B (fn _. B) (f \<circ>\<^bsub>B\<^esub> g) (id B))"
-lemma is_biinv_type [type]:
+Lemma is_biinv_type [type]:
assumes "A: U i" "B: U i" "f: A \<rightarrow> B"
shows "is_biinv A B f: U i"
unfolding is_biinv_def by typechk
@@ -365,7 +365,7 @@ text \<open>
definition equivalence (infix "\<simeq>" 110)
where "A \<simeq> B \<equiv> \<Sum>f: A \<rightarrow> B. Equivalence.is_biinv A B f"
-lemma equivalence_type [type]:
+Lemma equivalence_type [type]:
assumes "A: U i" "B: U i"
shows "A \<simeq> B: U i"
unfolding equivalence_def by typechk
@@ -432,28 +432,24 @@ Lemma (def) equiv_if_equal:
\<^enum> vars A B
apply (rewrite at A in "A \<rightarrow> B" id_comp[symmetric])
using [[solve_side_conds=1]]
- apply (rewrite at B in "_ \<rightarrow> B" id_comp[symmetric], fact)
+ apply (rewrite at B in "_ \<rightarrow> B" id_comp[symmetric])
apply (rule transport, rule Ui_in_USi)
- apply (rule lift_universe_codomain, rule Ui_in_USi)
- apply (typechk, rule Ui_in_USi)
- by facts
+ by (rule lift_universe_codomain, rule Ui_in_USi)
\<^enum> vars A
using [[solve_side_conds=1]]
apply (subst transport_comp)
\<circ> by (rule Ui_in_USi)
\<circ> by reduce (rule in_USi_if_in_Ui)
- \<circ> by reduce (rule id_is_biinv, fact)
+ \<circ> by reduce (rule id_is_biinv)
done
done
\<^item> \<comment> \<open>Similar proof as in the first subgoal above\<close>
apply (rewrite at A in "A \<rightarrow> B" id_comp[symmetric])
using [[solve_side_conds=1]]
- apply (rewrite at B in "_ \<rightarrow> B" id_comp[symmetric], fact)
+ apply (rewrite at B in "_ \<rightarrow> B" id_comp[symmetric])
apply (rule transport, rule Ui_in_USi)
- apply (rule lift_universe_codomain, rule Ui_in_USi)
- apply (typechk, rule Ui_in_USi)
- by facts
+ by (rule lift_universe_codomain, rule Ui_in_USi)
done
(*Uncomment this to see all implicits from here on.
diff --git a/hott/Identity.thy b/hott/Identity.thy
index 4829b6f..247d6a4 100644
--- a/hott/Identity.thy
+++ b/hott/Identity.thy
@@ -82,7 +82,7 @@ Lemma (def) pathinv:
shows "y =\<^bsub>A\<^esub> x"
by (eq p) intro
-lemma pathinv_comp [comp]:
+Lemma pathinv_comp [comp]:
assumes "A: U i" "x: A"
shows "pathinv A x x (refl x) \<equiv> refl x"
unfolding pathinv_def by reduce
@@ -94,11 +94,11 @@ Lemma (def) pathcomp:
shows
"x =\<^bsub>A\<^esub> z"
proof (eq p)
- fix x q assume "x: A" and "q: x =\<^bsub>A\<^esub> z"
+ fix x q assuming "x: A" and "q: x =\<^bsub>A\<^esub> z"
show "x =\<^bsub>A\<^esub> z" by (eq q) refl
qed
-lemma pathcomp_comp [comp]:
+Lemma pathcomp_comp [comp]:
assumes "A: U i" "a: A"
shows "pathcomp A a a a (refl a) (refl a) \<equiv> refl a"
unfolding pathcomp_def by reduce
@@ -491,9 +491,9 @@ Lemma (def) right_whisker:
shows "p \<bullet> r = q \<bullet> r"
apply (eq r)
focus vars x s t proof -
- have "t \<bullet> refl x = t" by (rule pathcomp_refl)
- also have ".. = s" by fact
- also have ".. = s \<bullet> refl x" by (rule pathcomp_refl[symmetric])
+ have "s \<bullet> refl x = s" by (rule pathcomp_refl)
+ also have ".. = t" by fact
+ also have ".. = t \<bullet> refl x" by (rule pathcomp_refl[symmetric])
finally show "{}" by this
qed
done
@@ -505,9 +505,9 @@ Lemma (def) left_whisker:
shows "r \<bullet> p = r \<bullet> q"
apply (eq r)
focus vars x s t proof -
- have "refl x \<bullet> t = t" by (rule refl_pathcomp)
- also have ".. = s" by fact
- also have ".. = refl x \<bullet> s" by (rule refl_pathcomp[symmetric])
+ have "refl x \<bullet> s = s" by (rule refl_pathcomp)
+ also have ".. = t" by fact
+ also have ".. = refl x \<bullet> t" by (rule refl_pathcomp[symmetric])
finally show "{}" by this
qed
done
@@ -542,14 +542,13 @@ text \<open>Conditions under which horizontal path-composition is defined.\<clos
locale horiz_pathcomposable =
fixes
i A a b c p q r s
-assumes assums:
+assumes [type]:
"A: U i" "a: A" "b: A" "c: A"
"p: a =\<^bsub>A\<^esub> b" "q: a =\<^bsub>A\<^esub> b"
"r: b =\<^bsub>A\<^esub> c" "s: b =\<^bsub>A\<^esub> c"
begin
Lemma (def) horiz_pathcomp:
- notes assums
assumes "\<alpha>: p = q" "\<beta>: r = s"
shows "p \<bullet> r = q \<bullet> s"
proof (rule pathcomp)
@@ -560,7 +559,6 @@ qed typechk
text \<open>A second horizontal composition:\<close>
Lemma (def) horiz_pathcomp':
- notes assums
assumes "\<alpha>: p = q" "\<beta>: r = s"
shows "p \<bullet> r = q \<bullet> s"
proof (rule pathcomp)
@@ -572,13 +570,12 @@ notation horiz_pathcomp (infix "\<star>" 121)
notation horiz_pathcomp' (infix "\<star>''" 121)
Lemma (def) horiz_pathcomp_eq_horiz_pathcomp':
- notes assums
assumes "\<alpha>: p = q" "\<beta>: r = s"
shows "\<alpha> \<star> \<beta> = \<alpha> \<star>' \<beta>"
unfolding horiz_pathcomp_def horiz_pathcomp'_def
apply (eq \<alpha>, eq \<beta>)
focus vars p apply (eq p)
- focus vars a q by (eq q) (reduce, refl)
+ focus vars a _ _ _ r by (eq r) (reduce, refl)
done
done
@@ -597,7 +594,7 @@ Lemma \<Omega>2_alt_def:
section \<open>Eckmann-Hilton\<close>
-context fixes i A a assumes "A: U i" "a: A"
+context fixes i A a assumes [type]: "A: U i" "a: A"
begin
interpretation \<Omega>2:
@@ -619,14 +616,18 @@ Lemma (def) pathcomp_eq_horiz_pathcomp:
assumes "\<alpha>: \<Omega>2 A a" "\<beta>: \<Omega>2 A a"
shows "\<alpha> \<bullet> \<beta> = \<alpha> \<star> \<beta>"
unfolding \<Omega>2.horiz_pathcomp_def
- using assms[unfolded \<Omega>2_alt_def]
+ (*FIXME: Definitional unfolding + normalization; shouldn't need explicit
+ unfolding*)
+ using assms[unfolded \<Omega>2_alt_def, type]
proof (reduce, rule pathinv)
\<comment> \<open>Propositional equality rewriting needs to be improved\<close>
- have "{} = refl (refl a) \<bullet> \<alpha>" by (rule pathcomp_refl)
+ have "refl (refl a) \<bullet> \<alpha> \<bullet> refl (refl a) = refl (refl a) \<bullet> \<alpha>"
+ by (rule pathcomp_refl)
also have ".. = \<alpha>" by (rule refl_pathcomp)
finally have eq\<alpha>: "{} = \<alpha>" by this
- have "{} = refl (refl a) \<bullet> \<beta>" by (rule pathcomp_refl)
+ have "refl (refl a) \<bullet> \<beta> \<bullet> refl (refl a) = refl (refl a) \<bullet> \<beta>"
+ by (rule pathcomp_refl)
also have ".. = \<beta>" by (rule refl_pathcomp)
finally have eq\<beta>: "{} = \<beta>" by this
@@ -640,13 +641,15 @@ Lemma (def) pathcomp_eq_horiz_pathcomp':
assumes "\<alpha>: \<Omega>2 A a" "\<beta>: \<Omega>2 A a"
shows "\<alpha> \<star>' \<beta> = \<beta> \<bullet> \<alpha>"
unfolding \<Omega>2.horiz_pathcomp'_def
- using assms[unfolded \<Omega>2_alt_def]
+ using assms[unfolded \<Omega>2_alt_def, type]
proof reduce
- have "{} = refl (refl a) \<bullet> \<beta>" by (rule pathcomp_refl)
+ have "refl (refl a) \<bullet> \<beta> \<bullet> refl (refl a) = refl (refl a) \<bullet> \<beta>"
+ by (rule pathcomp_refl)
also have ".. = \<beta>" by (rule refl_pathcomp)
finally have eq\<beta>: "{} = \<beta>" by this
- have "{} = refl (refl a) \<bullet> \<alpha>" by (rule pathcomp_refl)
+ have "refl (refl a) \<bullet> \<alpha> \<bullet> refl (refl a) = refl (refl a) \<bullet> \<alpha>"
+ by (rule pathcomp_refl)
also have ".. = \<alpha>" by (rule refl_pathcomp)
finally have eq\<alpha>: "{} = \<alpha>" by this
@@ -659,7 +662,7 @@ Lemma (def) pathcomp_eq_horiz_pathcomp':
Lemma (def) eckmann_hilton:
assumes "\<alpha>: \<Omega>2 A a" "\<beta>: \<Omega>2 A a"
shows "\<alpha> \<bullet> \<beta> = \<beta> \<bullet> \<alpha>"
- using assms[unfolded \<Omega>2_alt_def]
+ using assms[unfolded \<Omega>2_alt_def, type]
proof -
have "\<alpha> \<bullet> \<beta> = \<alpha> \<star> \<beta>"
by (rule pathcomp_eq_horiz_pathcomp)
diff --git a/hott/Nat.thy b/hott/Nat.thy
index 716703a..f45387c 100644
--- a/hott/Nat.thy
+++ b/hott/Nat.thy
@@ -62,17 +62,17 @@ subsection \<open>Addition\<close>
definition add (infixl "+" 120) where "m + n \<equiv> NatRec Nat m (K suc) n"
-lemma add_type [type]:
+Lemma add_type [type]:
assumes "m: Nat" "n: Nat"
shows "m + n: Nat"
unfolding add_def by typechk
-lemma add_zero [comp]:
+Lemma add_zero [comp]:
assumes "m: Nat"
shows "m + 0 \<equiv> m"
unfolding add_def by reduce
-lemma add_suc [comp]:
+Lemma add_suc [comp]:
assumes "m: Nat" "n: Nat"
shows "m + suc n \<equiv> suc (m + n)"
unfolding add_def by reduce
@@ -123,22 +123,22 @@ subsection \<open>Multiplication\<close>
definition mul (infixl "*" 121) where "m * n \<equiv> NatRec Nat 0 (K $ add m) n"
-lemma mul_type [type]:
+Lemma mul_type [type]:
assumes "m: Nat" "n: Nat"
shows "m * n: Nat"
unfolding mul_def by typechk
-lemma mul_zero [comp]:
+Lemma mul_zero [comp]:
assumes "n: Nat"
shows "n * 0 \<equiv> 0"
unfolding mul_def by reduce
-lemma mul_one [comp]:
+Lemma mul_one [comp]:
assumes "n: Nat"
shows "n * 1 \<equiv> n"
unfolding mul_def by reduce
-lemma mul_suc [comp]:
+Lemma mul_suc [comp]:
assumes "m: Nat" "n: Nat"
shows "m * suc n \<equiv> m + m * n"
unfolding mul_def by reduce
diff --git a/spartan/core/Spartan.thy b/spartan/core/Spartan.thy
index 7f13036..412881b 100644
--- a/spartan/core/Spartan.thy
+++ b/spartan/core/Spartan.thy
@@ -189,7 +189,7 @@ ML_file \<open>context_tactical.ML\<close>
subsection \<open>Type-checking/inference\<close>
\<comment> \<open>Rule attributes for the type-checker\<close>
-named_theorems form and intr and comp and type
+named_theorems form and intr and comp
\<comment> \<open>Defines elimination automation and the `elim` attribute\<close>
ML_file \<open>elimination.ML\<close>
@@ -202,7 +202,7 @@ lemmas
[comp] = beta Sig_comp and
[cong] = Pi_cong lam_cong Sig_cong
-ML_file \<open>typechecking.ML\<close>
+ML_file \<open>types.ML\<close>
method_setup typechk =
\<open>Scan.succeed (K (CONTEXT_METHOD (
@@ -214,6 +214,7 @@ method_setup known =
subsection \<open>Statement commands\<close>
+ML_file \<open>context_facts.ML\<close>
ML_file \<open>focus.ML\<close>
ML_file \<open>elaboration.ML\<close>
ML_file \<open>elaborated_statement.ML\<close>
@@ -374,12 +375,12 @@ translations "f x" \<leftharpoondown> "f `x"
section \<open>Functions\<close>
-lemma eta_exp:
+Lemma eta_exp:
assumes "f: \<Prod>x: A. B x"
shows "f \<equiv> \<lambda>x: A. f x"
by (rule eta[symmetric])
-lemma refine_codomain:
+Lemma refine_codomain:
assumes
"A: U i"
"f: \<Prod>x: A. B x"
@@ -387,7 +388,7 @@ lemma refine_codomain:
shows "f: \<Prod>x: A. C x"
by (subst eta_exp)
-lemma lift_universe_codomain:
+Lemma lift_universe_codomain:
assumes "A: U i" "f: A \<rightarrow> U j"
shows "f: A \<rightarrow> U (S j)"
using in_USi_if_in_Ui[of "f {}"]
@@ -402,7 +403,7 @@ syntax
translations
"g \<circ>\<^bsub>A\<^esub> f" \<rightleftharpoons> "CONST funcomp A g f"
-lemma funcompI [type]:
+Lemma funcompI [type]:
assumes
"A: U i"
"B: U i"
@@ -413,7 +414,7 @@ lemma funcompI [type]:
"g \<circ>\<^bsub>A\<^esub> f: \<Prod>x: A. C (f x)"
unfolding funcomp_def by typechk
-lemma funcomp_assoc [comp]:
+Lemma funcomp_assoc [comp]:
assumes
"A: U i"
"f: A \<rightarrow> B"
@@ -423,7 +424,7 @@ lemma funcomp_assoc [comp]:
"(h \<circ>\<^bsub>B\<^esub> g) \<circ>\<^bsub>A\<^esub> f \<equiv> h \<circ>\<^bsub>A\<^esub> g \<circ>\<^bsub>A\<^esub> f"
unfolding funcomp_def by reduce
-lemma funcomp_lambda_comp [comp]:
+Lemma funcomp_lambda_comp [comp]:
assumes
"A: U i"
"\<And>x. x: A \<Longrightarrow> b x: B"
@@ -432,7 +433,7 @@ lemma funcomp_lambda_comp [comp]:
"(\<lambda>x: B. c x) \<circ>\<^bsub>A\<^esub> (\<lambda>x: A. b x) \<equiv> \<lambda>x: A. c (b x)"
unfolding funcomp_def by reduce
-lemma funcomp_apply_comp [comp]:
+Lemma funcomp_apply_comp [comp]:
assumes
"A: U i" "B: U i" "\<And>x y. x: B \<Longrightarrow> C x: U i"
"f: A \<rightarrow> B" "g: \<Prod>x: B. C x"
@@ -456,12 +457,12 @@ lemma
id_comp [comp]: "x: A \<Longrightarrow> (id A) x \<equiv> x" \<comment> \<open>for the occasional manual rewrite\<close>
by reduce+
-lemma id_left [comp]:
+Lemma id_left [comp]:
assumes "A: U i" "B: U i" "f: A \<rightarrow> B"
shows "(id B) \<circ>\<^bsub>A\<^esub> f \<equiv> f"
by (subst eta_exp[of f]) (reduce, rule eta)
-lemma id_right [comp]:
+Lemma id_right [comp]:
assumes "A: U i" "B: U i" "f: A \<rightarrow> B"
shows "f \<circ>\<^bsub>A\<^esub> (id A) \<equiv> f"
by (subst eta_exp[of f]) (reduce, rule eta)
@@ -476,23 +477,23 @@ section \<open>Pairs\<close>
definition "fst A B \<equiv> \<lambda>p: \<Sum>x: A. B x. SigInd A B (fn _. A) (fn x y. x) p"
definition "snd A B \<equiv> \<lambda>p: \<Sum>x: A. B x. SigInd A B (fn p. B (fst A B p)) (fn x y. y) p"
-lemma fst_type [type]:
+Lemma fst_type [type]:
assumes "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i"
shows "fst A B: (\<Sum>x: A. B x) \<rightarrow> A"
unfolding fst_def by typechk
-lemma fst_comp [comp]:
+Lemma fst_comp [comp]:
assumes
"A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i" "a: A" "b: B a"
shows "fst A B <a, b> \<equiv> a"
unfolding fst_def by reduce
-lemma snd_type [type]:
+Lemma snd_type [type]:
assumes "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i"
shows "snd A B: \<Prod>p: \<Sum>x: A. B x. B (fst A B p)"
unfolding snd_def by typechk reduce
-lemma snd_comp [comp]:
+Lemma snd_comp [comp]:
assumes "A: U i" "\<And>x. x: A \<Longrightarrow> B x: U i" "a: A" "b: B a"
shows "snd A B <a, b> \<equiv> b"
unfolding snd_def by reduce
diff --git a/spartan/core/context_facts.ML b/spartan/core/context_facts.ML
new file mode 100644
index 0000000..4b4bcd9
--- /dev/null
+++ b/spartan/core/context_facts.ML
@@ -0,0 +1,44 @@
+structure Context_Facts: sig
+
+val Eq: Proof.context -> thm Item_Net.T
+val eq: Proof.context -> thm list
+val eq_of: Proof.context -> term -> thm list
+val register_eq: thm -> Context.generic -> Context.generic
+val register_eqs: thm list -> Context.generic -> Context.generic
+val register_facts: thm list -> Proof.context -> Proof.context
+
+end = struct
+
+
+(* Equality statements *)
+
+structure Eq = Generic_Data (
+ type T = thm Item_Net.T
+ val empty = Item_Net.init Thm.eq_thm
+ (single o (#1 o Lib.dest_eq) o Thm.concl_of)
+ val extend = I
+ val merge = Item_Net.merge
+)
+
+val Eq = Eq.get o Context.Proof
+val eq = Item_Net.content o Eq
+fun eq_of ctxt tm = Item_Net.retrieve (Eq ctxt) tm
+
+fun register_eq rule =
+ if Lib.is_eq (Thm.concl_of rule) then Eq.map (Item_Net.update rule)
+ else error "Not a definitional equality judgment"
+
+fun register_eqs rules = foldr1 (op o) (map register_eq rules)
+
+
+(* Context assumptions *)
+
+fun register_facts ths =
+ let
+ val (facts, conds, eqs) = Lib.partition_judgments ths
+ val f = Types.register_knowns facts handle Empty => I
+ val c = Types.register_conds conds handle Empty => I
+ val e = register_eqs eqs handle Empty => I
+ in Context.proof_map (e o c o f) end
+
+end
diff --git a/spartan/core/elaborated_statement.ML b/spartan/core/elaborated_statement.ML
index 81f4a7d..33f88cf 100644
--- a/spartan/core/elaborated_statement.ML
+++ b/spartan/core/elaborated_statement.ML
@@ -416,6 +416,35 @@ val structured_statement =
Parse_Spec.statement -- Parse_Spec.if_statement' -- Parse.for_fixes
>> (fn ((shows, assumes), fixes) => (fixes, assumes, shows))
+fun these_factss more_facts (named_factss, state) =
+ (named_factss, state |> Proof.set_facts (maps snd named_factss @ more_facts))
+
+fun gen_assume prep_statement prep_att export raw_fixes raw_prems raw_concls state =
+ let
+ val ctxt = Proof.context_of state;
+
+ val bindings = map (apsnd (map (prep_att ctxt)) o fst) raw_concls;
+ val {fixes = params, assumes = prems_propss, shows = concl_propss, result_binds, text, ...} =
+ #1 (prep_statement raw_fixes raw_prems (map snd raw_concls) ctxt);
+ val propss = (map o map) (Logic.close_prop params (flat prems_propss)) concl_propss;
+ in
+ state
+ |> Proof.assert_forward
+ |> Proof.map_context_result (fn ctxt =>
+ ctxt
+ |> Proof_Context.augment text
+ |> fold Variable.maybe_bind_term result_binds
+ |> fold_burrow (Assumption.add_assms export o map (Thm.cterm_of ctxt)) propss
+ |-> (fn premss => fn ctxt =>
+ (premss, Context_Facts.register_facts (flat premss) ctxt))
+ |-> (fn premss =>
+ Proof_Context.note_thmss "" (bindings ~~ (map o map) (fn th => ([th], [])) premss)))
+ |> these_factss [] |> #2
+ end
+
+val assume =
+ gen_assume Proof_Context.cert_statement (K I) Assumption.assume_export
+
in
val _ = Outer_Syntax.command \<^command_keyword>\<open>assuming\<close> "elaborated assumption"
@@ -433,7 +462,7 @@ val _ = Outer_Syntax.command \<^command_keyword>\<open>assuming\<close> "elabora
val c' = c |> map (fn ((b, atts), ss) =>
((b, map (Attrib.attribute_cmd ctxt) atts), map read_terms ss))
val c'' = Elab.elaborate ctxt [] c'
- in Proof.assume a' b' c'' state end)))
+ in assume a' b' c'' state end)))
end
diff --git a/spartan/core/elimination.ML b/spartan/core/elimination.ML
index cd4e414..cf9d21e 100644
--- a/spartan/core/elimination.ML
+++ b/spartan/core/elimination.ML
@@ -41,7 +41,7 @@ val _ = Theory.setup (
(Thm.declaration_attribute o register_rule))
""
#> Global_Theory.add_thms_dynamic (\<^binding>\<open>elim\<close>,
- fn context => (map #1 (rules (Context.proof_of context))))
+ fn context => map #1 (rules (Context.proof_of context)))
)
diff --git a/spartan/core/focus.ML b/spartan/core/focus.ML
index 2ad4c8a..b963cfe 100644
--- a/spartan/core/focus.ML
+++ b/spartan/core/focus.ML
@@ -117,7 +117,8 @@ fun gen_schematic_subgoal prep_atts raw_result_binding param_specs state =
|> Proof.map_context (fn _ =>
#context subgoal_focus
|> Proof_Context.note_thmss "" [((Binding.name "prems", []), [(prems, [])])]
- |> #2)
+ |> snd
+ |> Context_Facts.register_facts prems)
|> Proof.internal_goal (K (K ())) (Proof_Context.get_mode ctxt) true "subgoal"
NONE after_qed [] [] [(Binding.empty_atts, [(Thm.term_of (#concl subgoal_focus), [])])]
|> #2
diff --git a/spartan/core/goals.ML b/spartan/core/goals.ML
index a04bd0e..7d52495 100644
--- a/spartan/core/goals.ML
+++ b/spartan/core/goals.ML
@@ -175,7 +175,8 @@ fun gen_schematic_theorem
in
goal_ctxt
|> not (null prems) ?
- (Proof_Context.note_thmss "" [((Binding.name prems_name, []), [(prems, [])])] #> snd)
+ (Proof_Context.note_thmss "" [((Binding.name prems_name, []), [(prems, [])])]
+ #> snd #> Context_Facts.register_facts prems)
|> Proof.theorem before_qed gen_and_after_qed (map snd stmt)
|> (case facts of NONE => I | SOME ths => Proof.refine_insert ths)
end
@@ -188,12 +189,12 @@ val schematic_theorem_cmd =
fun theorem spec descr =
Outer_Syntax.local_theory_to_proof' spec ("state " ^ descr)
- ( Scan.option (Args.parens (Args.$$$ "def"))
+ (Scan.option (Args.parens (Args.$$$ "def"))
-- (long_statement || short_statement) >>
(fn (opt_derive, (long, binding, includes, elems, concl)) =>
schematic_theorem_cmd
(case opt_derive of SOME "def" => true | _ => false)
- long descr NONE (K I) binding includes elems concl) )
+ long descr NONE (K I) binding includes elems concl))
fun definition spec descr =
Outer_Syntax.local_theory_to_proof' spec "definition via proof"
diff --git a/spartan/core/lib.ML b/spartan/core/lib.ML
index 392ae2e..e43ad98 100644
--- a/spartan/core/lib.ML
+++ b/spartan/core/lib.ML
@@ -27,6 +27,9 @@ val typing_of_term: term -> term
val decompose_goal: Proof.context -> term -> term list * term
val rigid_typing_concl: term -> bool
+(*Theorems*)
+val partition_judgments: thm list -> thm list * thm list * thm list
+
(*Subterms*)
val has_subterm: term list -> term -> bool
val subterm_count: term -> term -> int
@@ -121,6 +124,19 @@ fun rigid_typing_concl goal =
in is_typing concl andalso is_rigid (term_of_typing concl) end
+(** Theorems **)
+fun partition_judgments ths =
+ let
+ fun part [] facts conds eqs = (facts, conds, eqs)
+ | part (th::ths) facts conds eqs =
+ if is_typing (Thm.prop_of th) then
+ part ths (th::facts) conds eqs
+ else if is_typing (Thm.concl_of th) then
+ part ths facts (th::conds) eqs
+ else part ths facts conds (th::eqs)
+ in part ths [] [] [] end
+
+
(** Subterms **)
fun has_subterm tms =
diff --git a/spartan/core/tactics.ML b/spartan/core/tactics.ML
index 45fd119..0c46ef0 100644
--- a/spartan/core/tactics.ML
+++ b/spartan/core/tactics.ML
@@ -82,8 +82,8 @@ fun internalize_fact_tac t =
Drule.infer_instantiate' ctxt [NONE, NONE, SOME B, SOME a] @{thm PiE}
in
HEADGOAL (resolve_tac ctxt [resolvent])
- (*Infer the correct type T*)
- THEN SOMEGOAL (NO_CONTEXT_TACTIC ctxt o Types.check_infer [])
+ (*Unify with the correct type T*)
+ THEN SOMEGOAL (NO_CONTEXT_TACTIC ctxt o Types.known_ctac [])
end)
fun elim_core_tac tms types ctxt =
@@ -111,18 +111,16 @@ fun elim_ctac tms =
[] => CONTEXT_TACTIC' (fn ctxt => eresolve_tac ctxt (map #1 (Elim.rules ctxt)))
| major :: _ => CONTEXT_SUBGOAL (fn (goal, _) => fn cst as (ctxt, st) =>
let
- val facts = Proof_Context.facts_of ctxt
+ val facts = map Thm.prop_of (Types.known ctxt)
val prems = Logic.strip_assums_hyp goal
val template = Lib.typing_of_term major
- val types =
- map (Thm.prop_of o #1) (Facts.could_unify facts template)
- @ filter (fn prem => Term.could_unify (template, prem)) prems
+ val types = filter (fn th => Term.could_unify (template, th)) (facts @ prems)
|> map Lib.type_of_typing
in case types of
[] => no_ctac cst
| _ =>
let
- val inserts = map (Thm.prop_of o fst) (Facts.props facts) @ prems
+ val inserts = facts @ prems
|> filter Lib.is_typing
|> map Lib.dest_typing
|> filter_out (fn (t, _) =>
@@ -170,7 +168,7 @@ fun cases_ctac tm =
| [] => raise Option
| _ => raise error (Syntax.string_of_term ctxt tm ^ "not uniquely typed"))
handle Option =>
- error ("no case rule known for " ^ (Syntax.string_of_term ctxt tm))
+ error ("No case rule known for " ^ (Syntax.string_of_term ctxt tm))
in
resolve_tac ctxt [res] i
end)
diff --git a/spartan/core/typechecking.ML b/spartan/core/typechecking.ML
deleted file mode 100644
index ca89c8c..0000000
--- a/spartan/core/typechecking.ML
+++ /dev/null
@@ -1,96 +0,0 @@
-(* Title: typechecking.ML
- Author: Joshua Chen
-
-Type information and type-checking infrastructure.
-*)
-
-structure Types: sig
-
-val Data: Proof.context -> thm Item_Net.T
-val types: Proof.context -> term -> thm list
-val put_type: thm -> Proof.context -> Proof.context
-val put_types: thm list -> Proof.context -> Proof.context
-
-val debug_typechk: bool Config.T
-
-val known_ctac: thm list -> int -> context_tactic
-val check_infer: thm list -> int -> context_tactic
-
-end = struct
-
-
-(* Context data *)
-
-structure Data = Generic_Data (
- type T = thm Item_Net.T
- val empty = Item_Net.init Thm.eq_thm
- (single o Lib.term_of_typing o Thm.prop_of)
- val extend = I
- val merge = Item_Net.merge
-)
-
-val Data = Data.get o Context.Proof
-fun types ctxt tm = Item_Net.retrieve (Data ctxt) tm
-fun put_type typing = Context.proof_map (Data.map (Item_Net.update typing))
-fun put_types typings = foldr1 (op o) (map put_type typings)
-
-
-(* Context tactics for type-checking *)
-
-val debug_typechk = Attrib.setup_config_bool \<^binding>\<open>debug_typechk\<close> (K false)
-
-fun debug_tac ctxt s =
- if Config.get ctxt debug_typechk then print_tac ctxt s else all_tac
-
-(*Solves goals without metavariables and type inference problems by resolving
- with facts or assumption from inline premises.*)
-fun known_ctac facts = CONTEXT_SUBGOAL (fn (goal, i) => fn (ctxt, st) =>
- TACTIC_CONTEXT ctxt
- let val concl = Logic.strip_assums_concl goal in
- if Lib.no_vars concl orelse
- (Lib.is_typing concl andalso Lib.no_vars (Lib.term_of_typing concl))
- then
- let val ths = map (Simplifier.norm_hhf ctxt) facts
- in st |>
- (assume_tac ctxt ORELSE' resolve_tac ctxt ths THEN_ALL_NEW K no_tac) i
- end
- else Seq.empty
- end)
-
-(*Simple bidirectional typing tactic, with some nondeterminism from backtracking
- search over arbitrary `type` rules. The current implementation does not
- perform any normalization.*)
-fun check_infer_step facts i (ctxt, st) =
- let
- val tac = SUBGOAL (fn (goal, i) =>
- if Lib.rigid_typing_concl goal
- then
- let val net = Tactic.build_net (
- map (Simplifier.norm_hhf ctxt)
- (*FIXME: Shouldn't pull nameless facts directly from context. Note
- that we *do* need to be able to resolve with conditional
- statements expressing type family judgments*)
- (facts @ map fst (Facts.props (Proof_Context.facts_of ctxt)))
- (*MAYBE FIXME: Remove `type` from this list, and instead perform
- definitional unfolding to (w?)hnf.*)
- @(Named_Theorems.get ctxt \<^named_theorems>\<open>type\<close>)
- @(Named_Theorems.get ctxt \<^named_theorems>\<open>form\<close>)
- @(Named_Theorems.get ctxt \<^named_theorems>\<open>intr\<close>)
- @(map #1 (Elim.rules ctxt)))
- in (resolve_from_net_tac ctxt net) i end
- else no_tac)
-
- val ctxt' = ctxt (*TODO: Use this to store already-derived typing judgments*)
- in
- TACTIC_CONTEXT ctxt' (tac i st)
- end
-
-fun check_infer facts i (cst as (_, st)) =
- let val ctac = known_ctac facts CORELSE' check_infer_step facts
- in
- cst |> (ctac i CTHEN
- CREPEAT_IN_RANGE i (Thm.nprems_of st - i) (CTRY o CREPEAT_ALL_NEW_FWD ctac))
- end
-
-
-end
diff --git a/spartan/core/types.ML b/spartan/core/types.ML
new file mode 100644
index 0000000..a521f7c
--- /dev/null
+++ b/spartan/core/types.ML
@@ -0,0 +1,141 @@
+(* Title: typechecking.ML
+ Author: Joshua Chen
+
+Type information and type-checking infrastructure.
+*)
+
+structure Types: sig
+
+val Known: Proof.context -> thm Item_Net.T
+val known: Proof.context -> thm list
+val known_of: Proof.context -> term -> thm list
+val register_known: thm -> Context.generic -> Context.generic
+val register_knowns: thm list -> Context.generic -> Context.generic
+
+val Cond: Proof.context -> thm Item_Net.T
+val cond: Proof.context -> thm list
+val cond_of: Proof.context -> term -> thm list
+val register_cond: thm -> Context.generic -> Context.generic
+val register_conds: thm list -> Context.generic -> Context.generic
+
+val debug_typechk: bool Config.T
+
+val known_ctac: thm list -> int -> context_tactic
+val check_infer: thm list -> int -> context_tactic
+
+end = struct
+
+
+(** Context data **)
+
+(* Known types *)
+
+structure Known = Generic_Data (
+ type T = thm Item_Net.T
+ val empty = Item_Net.init Thm.eq_thm
+ (single o Lib.term_of_typing o Thm.prop_of)
+ val extend = I
+ val merge = Item_Net.merge
+)
+
+val Known = Known.get o Context.Proof
+val known = Item_Net.content o Known
+fun known_of ctxt tm = Item_Net.retrieve (Known ctxt) tm
+
+fun register_known typing =
+ if Lib.is_typing (Thm.prop_of typing) then Known.map (Item_Net.update typing)
+ else error "Not a type judgment"
+
+fun register_knowns typings = foldr1 (op o) (map register_known typings)
+
+(* Conditional type rules *)
+
+(*Two important cases: 1. general type inference rules and 2. type family
+ judgments*)
+
+structure Cond = Generic_Data (
+ type T = thm Item_Net.T
+ val empty = Item_Net.init Thm.eq_thm
+ (single o Lib.term_of_typing o Thm.concl_of)
+ val extend = I
+ val merge = Item_Net.merge
+)
+
+val Cond = Cond.get o Context.Proof
+val cond = Item_Net.content o Cond
+fun cond_of ctxt tm = Item_Net.retrieve (Cond ctxt) tm
+
+fun register_cond rule =
+ if Lib.is_typing (Thm.concl_of rule) then Cond.map (Item_Net.update rule)
+ else error "Not a conditional type judgment"
+
+fun register_conds rules = foldr1 (op o) (map register_cond rules)
+
+
+(** [type] attribute **)
+
+val _ = Theory.setup (
+ Attrib.setup \<^binding>\<open>type\<close>
+ (Scan.succeed (Thm.declaration_attribute (fn th =>
+ if Thm.no_prems th then register_known th else register_cond th)))
+ ""
+ #> Global_Theory.add_thms_dynamic (\<^binding>\<open>type\<close>,
+ fn context => let val ctxt = Context.proof_of context in
+ known ctxt @ cond ctxt end)
+)
+
+
+(** Context tactics for type-checking and elaboration **)
+
+val debug_typechk = Attrib.setup_config_bool \<^binding>\<open>debug_typechk\<close> (K false)
+
+fun debug_tac ctxt s =
+ if Config.get ctxt debug_typechk then print_tac ctxt s else all_tac
+
+(*Solves goals without metavariables and type inference problems by assumption
+ from inline premises or resolution with facts*)
+fun known_ctac facts = CONTEXT_SUBGOAL (fn (goal, i) => fn (ctxt, st) =>
+ TACTIC_CONTEXT ctxt
+ let val concl = Logic.strip_assums_concl goal in
+ if Lib.no_vars concl orelse
+ (Lib.is_typing concl andalso Lib.no_vars (Lib.term_of_typing concl))
+ then
+ let val ths = known ctxt @ map (Simplifier.norm_hhf ctxt) facts
+ in st |>
+ (assume_tac ctxt ORELSE' resolve_tac ctxt ths THEN_ALL_NEW K no_tac) i
+ end
+ else Seq.empty
+ end)
+
+(*Simple bidirectional typing tactic, with some nondeterminism from backtracking
+ search over input facts. The current implementation does not perform any
+ normalization.*)
+fun check_infer_step facts i (ctxt, st) =
+ let
+ val tac = SUBGOAL (fn (goal, i) =>
+ if Lib.rigid_typing_concl goal
+ then
+ let val net = Tactic.build_net (
+ map (Simplifier.norm_hhf ctxt) facts
+ @(cond ctxt)
+ @(Named_Theorems.get ctxt \<^named_theorems>\<open>form\<close>)
+ @(Named_Theorems.get ctxt \<^named_theorems>\<open>intr\<close>)
+ @(map #1 (Elim.rules ctxt)))
+ in (resolve_from_net_tac ctxt net) i end
+ else no_tac)
+
+ val ctxt' = ctxt (*TODO: Use this to store already-derived typing judgments*)
+ in
+ TACTIC_CONTEXT ctxt' (tac i st)
+ end
+
+fun check_infer facts i (cst as (_, st)) =
+ let
+ val ctac = known_ctac facts CORELSE' check_infer_step facts
+ in
+ cst |> (ctac i CTHEN
+ CREPEAT_IN_RANGE i (Thm.nprems_of st - i) (CTRY o CREPEAT_ALL_NEW_FWD ctac))
+ end
+
+
+end
diff --git a/spartan/lib/List.thy b/spartan/lib/List.thy
index dd51582..83e5149 100644
--- a/spartan/lib/List.thy
+++ b/spartan/lib/List.thy
@@ -149,7 +149,7 @@ Definition map:
proof (elim xs)
show "[]: List B" by intro
next fix x ys
- assume "x: A" "ys: List B"
+ assuming "x: A" "ys: List B"
show "f x # ys: List B" by typechk
qed
diff --git a/spartan/lib/Maybe.thy b/spartan/lib/Maybe.thy
index 0a7ec21..da22a4e 100644
--- a/spartan/lib/Maybe.thy
+++ b/spartan/lib/Maybe.thy
@@ -25,10 +25,10 @@ Definition MaybeInd:
"\<And>a. a: A \<Longrightarrow> f a: C (some A a)"
"m: Maybe A"
shows "C m"
- using assms[unfolded Maybe_def none_def some_def]
+ using assms[unfolded Maybe_def none_def some_def, type]
apply (elim m)
- apply (rule \<open>_ \<Longrightarrow> _: C (inl _ _ _)\<close>)
- apply (elim, rule \<open>_: C (inr _ _ _)\<close>)
+ apply fact
+ apply (elim, fact)
done
Lemma Maybe_comp_none:
diff --git a/spartan/lib/Prelude.thy b/spartan/lib/Prelude.thy
index 6adbce8..c0abf31 100644
--- a/spartan/lib/Prelude.thy
+++ b/spartan/lib/Prelude.thy
@@ -105,7 +105,8 @@ Definition ifelse [rotated 1]:
"a: C true"
"b: C false"
shows "C x"
- by (elim x) (elim, rule *)+
+ using assms[unfolded Bool_def true_def false_def, type]
+ by (elim x) (elim, fact)+
Lemma if_true:
assumes