From 3922e24270518be67192ad6928bb839132c74c07 Mon Sep 17 00:00:00 2001 From: Josh Chen Date: Wed, 23 Sep 2020 17:03:42 +0200 Subject: Basic experiments adding reduction to the type checker --- hott/Equivalence.thy | 48 ++++---------------------------- hott/Identity.thy | 47 ++++++++++++------------------- hott/List+.thy | 4 +-- spartan/core/Spartan.thy | 72 +++++++++++++++++++++++++++++------------------- spartan/core/eqsubst.ML | 4 +-- spartan/core/types.ML | 57 ++++++++++++++++++++++++++++---------- 6 files changed, 114 insertions(+), 118 deletions(-) diff --git a/hott/Equivalence.thy b/hott/Equivalence.thy index a57ed44..99300a0 100644 --- a/hott/Equivalence.thy +++ b/hott/Equivalence.thy @@ -144,8 +144,6 @@ Lemma (def) commute_homotopy: apply (transport eq: pathcomp_refl, transport eq: refl_pathcomp) by refl -\ \TODO: *Really* need normalization during type-checking and better equality - type rewriting to do the proof below properly\ Corollary (def) commute_homotopy': assumes "A: U i" @@ -154,21 +152,18 @@ Corollary (def) commute_homotopy': "H: f ~ (id A)" shows "H (f x) = f [H x]" proof - + (*FIXME: Bug; if the following type declaration isn't made then bad things + happen on the next lines.*) from \H: f ~ id A\ have [type]: "H: \x: A. f x = x" by (reduce add: homotopy_def) - have *: "(id A)[H x]: f x = x" - by (rewrite at "\ = _" id_comp[symmetric], - rewrite at "_ = \" id_comp[symmetric]) - have "H (f x) \ H x = H (f x) \ (id A)[H x]" - by (rule left_whisker, fact *, transport eq: ap_id) (reduce+, refl) + by (rule left_whisker, transport eq: ap_id, refl) also have [simplified id_comp]: "H (f x) \ (id A)[H x] = f[H x] \ H x" by (rule commute_homotopy) - finally have *: "{}" using * by this + finally have "{}" by this - show "H (f x) = f [H x]" - by pathcomp_cancelr (fact, typechk+) + thus "H (f x) = f [H x]" by pathcomp_cancelr (fact, typechk+) qed @@ -309,23 +304,11 @@ Lemma (def) is_qinv_if_is_biinv: assumes "A: U i" "B: U i" "f: A \ B" shows "is_biinv f \ is_qinv f" apply intro - - text \Split the hypothesis \<^term>\is_biinv f\ into its components and name them.\ unfolding is_biinv_def apply elims focus vars _ _ _ g H1 h H2 - text \Need to give the required function and homotopies.\ apply (rule is_qinvI) - text \We claim that \<^term>\g\ is a quasi-inverse to \<^term>\f\.\ \<^item> by (fact \g: _\) - - text \The first part \<^prop>\?H1: g \ f ~ id A\ is given by \<^term>\H1\.\ \<^item> by (fact \H1: _\) - - text \ - It remains to prove \<^prop>\?H2: f \ g ~ id B\. First show that \g ~ h\, - then the result follows from @{thm \H2: f \ h ~ id B\}. Here a proof - block is used for calculational reasoning. - \ \<^item> proof - have "g ~ g \ (id B)" by reduce refl also have ".. ~ g \ f \ h" by rhtpy (rule \H2:_\[symmetric]) @@ -437,7 +420,7 @@ Lemma (def) equiv_if_equal: by (rule lift_universe_codomain, rule Ui_in_USi) \<^enum> vars A using [[solve_side_conds=1]] - apply (subst transport_comp) + apply (rewrite transport_comp) \ by (rule Ui_in_USi) \ by reduce (rule in_USi_if_in_Ui) \ by reduce (rule id_is_biinv) @@ -452,24 +435,5 @@ Lemma (def) equiv_if_equal: by (rule lift_universe_codomain, rule Ui_in_USi) done -(*Uncomment this to see all implicits from here on. -no_translations - "f x" \ "f `x" - "x = y" \ "x =\<^bsub>A\<^esub> y" - "g \ f" \ "g \\<^bsub>A\<^esub> f" - "p\" \ "CONST pathinv A x y p" - "p \ q" \ "CONST pathcomp A x y z p q" - "fst" \ "CONST Spartan.fst A B" - "snd" \ "CONST Spartan.snd A B" - "f[p]" \ "CONST ap A B x y f p" - "trans P p" \ "CONST transport A P x y p" - "trans_const B p" \ "CONST transport_const A B x y p" - "lift P p u" \ "CONST pathlift A P x y p u" - "apd f p" \ "CONST Identity.apd A P f x y p" - "f ~ g" \ "CONST homotopy A B f g" - "is_qinv f" \ "CONST Equivalence.is_qinv A B f" - "is_biinv f" \ "CONST Equivalence.is_biinv A B f" -*) - end diff --git a/hott/Identity.thy b/hott/Identity.thy index 247d6a4..dc27ee8 100644 --- a/hott/Identity.thy +++ b/hott/Identity.thy @@ -242,7 +242,7 @@ Lemma (def) ap_funcomp: "x: A" "y: A" "f: A \ B" "g: B \ C" "p: x = y" - shows "(g \ f)[p] = g[f[p]]" thm ap + shows "(g \ f)[p] = g[f[p]]" apply (eq p) \<^item> by reduce \<^item> by reduce refl @@ -251,10 +251,7 @@ Lemma (def) ap_funcomp: Lemma (def) ap_id: assumes "A: U i" "x: A" "y: A" "p: x = y" shows "(id A)[p] = p" - apply (eq p) - \<^item> by reduce - \<^item> by reduce refl - done + by (eq p) (reduce, refl) section \Transport\ @@ -303,7 +300,7 @@ Lemma (def) pathcomp_cancel_left: by (transport eq: inv_pathcomp, transport eq: refl_pathcomp) refl also have ".. = p\ \ (p \ r)" by (transport eq: pathcomp_assoc[symmetric], transport eq: \\:_\) refl - also have ".. = r" thm inv_pathcomp + also have ".. = r" by (transport eq: pathcomp_assoc, transport eq: inv_pathcomp, transport eq: refl_pathcomp) refl @@ -339,7 +336,7 @@ Lemma (def) transport_left_inv: "x: A" "y: A" "p: x = y" shows "(trans P p\) \ (trans P p) = id (P x)" - by (eq p) (reduce; refl) + by (eq p) (reduce, refl) Lemma (def) transport_right_inv: assumes @@ -440,10 +437,7 @@ Lemma (def) pathlift_fst: "u: P x" "p: x = y" shows "fst[lift P p u] = p" - apply (eq p) - \<^item> by reduce - \<^item> by reduce refl - done + by (eq p) (reduce, refl) section \Dependent paths\ @@ -585,10 +579,10 @@ end section \Loop space\ definition \ where "\ A a \ a =\<^bsub>A\<^esub> a" -definition \2 where "\2 A a \ \ (\ A a) (refl a)" +definition \2 where "\2 A a \ refl a =\<^bsub>a =\<^bsub>A\<^esub> a\<^esub> refl a" -Lemma \2_alt_def: - "\2 A a \ refl a = refl a" +Lemma \2_\_of_\: + "\2 A a \ \ (\ A a) (refl a)" unfolding \2_def \_def . @@ -604,23 +598,20 @@ interpretation \2: notation \2.horiz_pathcomp (infix "\" 121) notation \2.horiz_pathcomp' (infix "\''" 121) -Lemma [type]: +Lemma assumes "\: \2 A a" and "\: \2 A a" - shows horiz_pathcomp_type: "\ \ \: \2 A a" - and horiz_pathcomp'_type: "\ \' \: \2 A a" + shows horiz_pathcomp_type [type]: "\ \ \: \2 A a" + and horiz_pathcomp'_type [type]: "\ \' \: \2 A a" using assms - unfolding \2.horiz_pathcomp_def \2.horiz_pathcomp'_def \2_alt_def + unfolding \2.horiz_pathcomp_def \2.horiz_pathcomp'_def \2_def by reduce+ Lemma (def) pathcomp_eq_horiz_pathcomp: assumes "\: \2 A a" "\: \2 A a" shows "\ \ \ = \ \ \" unfolding \2.horiz_pathcomp_def - (*FIXME: Definitional unfolding + normalization; shouldn't need explicit - unfolding*) - using assms[unfolded \2_alt_def, type] + using assms[unfolded \2_def, type] (*TODO: A `noting` keyword that puts the noted theorem into [type]*) proof (reduce, rule pathinv) - \ \Propositional equality rewriting needs to be improved\ have "refl (refl a) \ \ \ refl (refl a) = refl (refl a) \ \" by (rule pathcomp_refl) also have ".. = \" by (rule refl_pathcomp) @@ -641,7 +632,7 @@ Lemma (def) pathcomp_eq_horiz_pathcomp': assumes "\: \2 A a" "\: \2 A a" shows "\ \' \ = \ \ \" unfolding \2.horiz_pathcomp'_def - using assms[unfolded \2_alt_def, type] + using assms[unfolded \2_def, type] proof reduce have "refl (refl a) \ \ \ refl (refl a) = refl (refl a) \ \" by (rule pathcomp_refl) @@ -662,20 +653,18 @@ Lemma (def) pathcomp_eq_horiz_pathcomp': Lemma (def) eckmann_hilton: assumes "\: \2 A a" "\: \2 A a" shows "\ \ \ = \ \ \" - using assms[unfolded \2_alt_def, type] + using \2_def[comp] proof - have "\ \ \ = \ \ \" by (rule pathcomp_eq_horiz_pathcomp) also have [simplified comp]: ".. = \ \' \" - \ \Danger, Will Robinson! (Inferred implicit has an equivalent form but - needs to be manually simplified.)\ + \ \Danger! Inferred implicit has an equivalent form but needs to be + manually simplified.\ by (transport eq: \2.horiz_pathcomp_eq_horiz_pathcomp') refl also have ".. = \ \ \" by (rule pathcomp_eq_horiz_pathcomp') finally show "\ \ \ = \ \ \" - by (this; reduce add: \2_alt_def[symmetric]) - \ \TODO: The finishing call to `reduce` should be unnecessary with some - kind of definitional unfolding.\ + by this qed end diff --git a/hott/List+.thy b/hott/List+.thy index b73cc24..869d667 100644 --- a/hott/List+.thy +++ b/hott/List+.thy @@ -10,8 +10,8 @@ section \Length\ definition [implicit]: "len \ ListRec ? Nat 0 (fn _ _ rec. suc rec)" experiment begin - Lemma "len [] \ ?n" by (subst comp) - Lemma "len [0, suc 0, suc (suc 0)] \ ?n" by (subst comp) + Lemma "len [] \ ?n" by (subst comp; typechk?) + Lemma "len [0, suc 0, suc (suc 0)] \ ?n" by (subst comp; typechk?)+ end diff --git a/spartan/core/Spartan.thy b/spartan/core/Spartan.thy index 10caa30..6b2ed58 100644 --- a/spartan/core/Spartan.thy +++ b/spartan/core/Spartan.thy @@ -181,18 +181,16 @@ axiomatization where \ \ \x: A. B x \ \x: A. B' x" -section \Infrastructure\ +section \Type checking & inference\ ML_file \lib.ML\ ML_file \context_facts.ML\ ML_file \context_tactical.ML\ -subsection \Type-checking/inference\ - -\ \Rule attributes for the type-checker\ +\ \Rule attributes for the typechecker\ named_theorems form and intr and comp -\ \Defines elimination automation and the `elim` attribute\ +\ \Elimination/induction automation and the `elim` attribute\ ML_file \elimination.ML\ lemmas @@ -203,7 +201,20 @@ lemmas [comp] = beta Sig_comp and [cong] = Pi_cong lam_cong Sig_cong -\ \Type-checking\ +\ \Subsumption rule\ +lemma sub: + assumes "a: A" "A \ A'" + shows "a: A'" + using assms by simp + +\ \Basic substitution of definitional equalities\ +ML_file \~~/src/Tools/misc_legacy.ML\ +ML_file \~~/src/Tools/IsaPlanner/isand.ML\ +ML_file \~~/src/Tools/IsaPlanner/rw_inst.ML\ +ML_file \~~/src/Tools/IsaPlanner/zipper.ML\ +ML_file \~~/src/Tools/eqsubst.ML\ + +\ \Term normalization, type checking & inference\ ML_file \types.ML\ method_setup typechk = @@ -214,14 +225,26 @@ method_setup known = \Scan.succeed (K (CONTEXT_METHOD ( CHEADGOAL o Types.known_ctac)))\ -subsection \Statement commands\ +setup \ +let val typechk = fn ctxt => + NO_CONTEXT_TACTIC ctxt o Types.check_infer + (Simplifier.prems_of ctxt @ Context_Facts.known ctxt) +in + map_theory_simpset (fn ctxt => ctxt + addSolver (mk_solver "" typechk)) +end +\ + + +section \Statements and goals\ ML_file \focus.ML\ ML_file \elaboration.ML\ ML_file \elaborated_statement.ML\ ML_file \goals.ML\ -subsection \Proof methods\ + +section \Proof methods\ named_theorems intro \ \Logical introduction rules\ @@ -270,6 +293,7 @@ subsection \Reflexivity\ named_theorems refl method refl = (rule refl) + subsection \Trivial proofs (modulo automatic discharge of side conditions)\ method_setup this = @@ -278,16 +302,9 @@ method_setup this = (CONTEXT_TACTIC' (fn ctxt => resolve_tac ctxt facts)) facts))))\ -subsection \Rewriting\ -\ \\subst\ method\ -ML_file \~~/src/Tools/misc_legacy.ML\ -ML_file \~~/src/Tools/IsaPlanner/isand.ML\ -ML_file \~~/src/Tools/IsaPlanner/rw_inst.ML\ -ML_file \~~/src/Tools/IsaPlanner/zipper.ML\ -ML_file \eqsubst.ML\ +subsection \Rewriting\ -\ \\rewrite\ method\ consts rewrite_HOLE :: "'a::{}" ("\") lemma eta_expand: @@ -315,20 +332,18 @@ ML_file \~~/src/HOL/Library/cconv.ML\ ML_file \rewrite.ML\ \ \\reduce\ computes terms via judgmental equalities\ -setup \map_theory_simpset (fn ctxt => - ctxt addSolver (mk_solver "" (fn ctxt' => - NO_CONTEXT_TACTIC ctxt' o Types.check_infer (Simplifier.prems_of ctxt'))))\ - method reduce uses add = - changed \repeat_new \(simp add: comp add | sub comp); typechk?\\ + changed \repeat_new \(simp add: comp add | subst comp); typechk?\\ -subsection \Congruence automation\ + +subsection \Congruence relations\ consts "rhs" :: \'a\ ("..") ML_file \congruence.ML\ -subsection \Implicits\ + +section \Implicits\ text \ \?\ is used to mark implicit arguments in definitions, while \{}\ is expanded @@ -364,7 +379,8 @@ translations translations "\x. b" \ "\x: A. b" -subsection \Lambda coercion\ + +section \Lambda coercion\ \ \Coerce object lambdas to meta-lambdas\ abbreviation (input) lambda :: \o \ o \ o\ @@ -389,7 +405,7 @@ Lemma refine_codomain: "f: \x: A. B x" "\x. x: A \ f `x: C x" shows "f: \x: A. C x" - by (subst eta_exp) + by (rewrite eta_exp) Lemma lift_universe_codomain: assumes "A: U i" "f: A \ U j" @@ -463,12 +479,12 @@ lemma Lemma id_left [comp]: assumes "A: U i" "B: U i" "f: A \ B" shows "(id B) \\<^bsub>A\<^esub> f \ f" - by (subst eta_exp[of f]) (reduce, rule eta) + by (rewrite eta_exp[of f]) (reduce, rule eta) Lemma id_right [comp]: assumes "A: U i" "B: U i" "f: A \ B" shows "f \\<^bsub>A\<^esub> (id A) \ f" - by (subst eta_exp[of f]) (reduce, rule eta) + by (rewrite eta_exp[of f]) (reduce, rule eta) lemma id_U [type]: "id (U i): U i \ U i" @@ -494,7 +510,7 @@ Lemma fst_comp [comp]: Lemma snd_type [type]: assumes "A: U i" "\x. x: A \ B x: U i" shows "snd A B: \p: \x: A. B x. B (fst A B p)" - unfolding snd_def by typechk reduce + unfolding snd_def by typechk Lemma snd_comp [comp]: assumes "A: U i" "\x. x: A \ B x: U i" "a: A" "b: B a" diff --git a/spartan/core/eqsubst.ML b/spartan/core/eqsubst.ML index 31d5126..5ae8c73 100644 --- a/spartan/core/eqsubst.ML +++ b/spartan/core/eqsubst.ML @@ -430,12 +430,12 @@ val _ = Method.setup \<^binding>\sub\ (parser >> (fn ((asm, occs), inthms) => fn ctxt => SIMPLE_METHOD' ( (if asm then eqsubst_asm_tac else eqsubst_tac) ctxt occs inthms))) - "single-step substitution" #> + "single-step substitution" (* #> Method.setup \<^binding>\subst\ (parser >> (fn ((asm, occs), inthms) => K (CONTEXT_METHOD ( CHEADGOAL o SIDE_CONDS 0 ((if asm then eqsubst_asm_ctac else eqsubst_ctac) occs inthms))))) - "single-step substitution with automatic discharge of side conditions" + "single-step substitution with automatic discharge of side conditions" *) ) end diff --git a/spartan/core/types.ML b/spartan/core/types.ML index 70e5057..67918b9 100644 --- a/spartan/core/types.ML +++ b/spartan/core/types.ML @@ -43,42 +43,69 @@ fun known_ctac facts = CONTEXT_SUBGOAL (fn (goal, i) => fn (ctxt, st) => if Lib.no_vars concl orelse (Lib.is_typing concl andalso Lib.no_vars (Lib.term_of_typing concl)) then - let val ths = known ctxt @ map (Simplifier.norm_hhf ctxt) facts + let val ths = known ctxt @ facts in st |> (assume_tac ctxt ORELSE' resolve_tac ctxt ths THEN_ALL_NEW K no_tac) i end else Seq.empty end) -(*Simple bidirectional typing tactic, with some nondeterminism from backtracking - search over input facts. The current implementation does not perform any - normalization.*) +(*Simple bidirectional typing tactic with some backtracking search over input + facts.*) fun check_infer_step facts i (ctxt, st) = let - val tac = SUBGOAL (fn (goal, i) => + val refine_tac = SUBGOAL (fn (goal, i) => if Lib.rigid_typing_concl goal then - let val net = Tactic.build_net ( - map (Simplifier.norm_hhf ctxt) facts - @(cond ctxt) - @(Named_Theorems.get ctxt \<^named_theorems>\form\) - @(Named_Theorems.get ctxt \<^named_theorems>\intr\) - @(map #1 (Elim.rules ctxt))) - in (resolve_from_net_tac ctxt net) i end + let + val net = Tactic.build_net ( + map (Simplifier.norm_hhf ctxt) facts + @(cond ctxt) + @(Named_Theorems.get ctxt \<^named_theorems>\form\) + @(Named_Theorems.get ctxt \<^named_theorems>\intr\) + @(map #1 (Elim.rules ctxt))) + in resolve_from_net_tac ctxt net i end else no_tac) + val sub_tac = SUBGOAL (fn (goal, i) => + let val concl = Logic.strip_assums_concl goal in + if Lib.is_typing concl + andalso Lib.is_rigid (Lib.term_of_typing concl) + andalso Lib.no_vars (Lib.type_of_typing concl) + then + (resolve_tac ctxt @{thms sub} + THEN' SUBGOAL (fn (_, i) => + NO_CONTEXT_TACTIC ctxt (check_infer facts i)) + THEN' compute_tac ctxt facts + THEN_ALL_NEW K no_tac) i + else no_tac end) + val ctxt' = ctxt (*TODO: Use this to store already-derived typing judgments*) in - TACTIC_CONTEXT ctxt' (tac i st) + TACTIC_CONTEXT ctxt' ( + (NO_CONTEXT_TACTIC ctxt' o known_ctac facts + ORELSE' refine_tac + ORELSE' sub_tac) i st) end -fun check_infer facts i (cst as (_, st)) = +and check_infer facts i (cst as (_, st)) = let - val ctac = known_ctac facts CORELSE' check_infer_step facts + val ctac = check_infer_step facts in cst |> (ctac i CTHEN CREPEAT_IN_RANGE i (Thm.nprems_of st - i) (CTRY o CREPEAT_ALL_NEW_FWD ctac)) end +and compute_tac ctxt facts = + let + val comps = Named_Theorems.get ctxt \<^named_theorems>\comp\ + val ctxt' = ctxt addsimps comps + in + SUBGOAL (fn (_, i) => + ((CHANGED o asm_simp_tac ctxt' ORELSE' EqSubst.eqsubst_tac ctxt [0] comps) + THEN_ALL_NEW SUBGOAL (fn (_, i) => + NO_CONTEXT_TACTIC ctxt (check_infer facts i))) i) + end + end -- cgit v1.2.3