From bd2efacaf67ae84c41377e7af38dacc5aa64f405 Mon Sep 17 00:00:00 2001 From: Josh Chen Date: Fri, 14 Aug 2020 11:07:17 +0200 Subject: (FEAT) Context data slots for known types and conditional type rules, as well as a separate one for judgmental equality rules. (REF) Goal statement assumptions are now put into the new context data slots. (FEAT) `assuming` Isar keyword—like `assume` but puts assumptions into context data. (REF) Typechecking and all other tactics refactored to use type information from the context data, as opposed to looking at all facts visible in context. MINOR INCOMPATIBILITY: facts that were implicitly used in proofs now have to be annotated with [type] to make them visible throughout the context, else explicitly passed to methods via `using`, or declared with `assuming`. (REF) Fixed incompatibilities in theories. --- hott/Equivalence.thy | 34 ++++----- hott/Identity.thy | 47 ++++++------ hott/Nat.thy | 14 ++-- spartan/core/Spartan.thy | 31 ++++---- spartan/core/context_facts.ML | 44 +++++++++++ spartan/core/elaborated_statement.ML | 31 +++++++- spartan/core/elimination.ML | 2 +- spartan/core/focus.ML | 3 +- spartan/core/goals.ML | 7 +- spartan/core/lib.ML | 16 ++++ spartan/core/tactics.ML | 14 ++-- spartan/core/typechecking.ML | 96 ------------------------ spartan/core/types.ML | 141 +++++++++++++++++++++++++++++++++++ spartan/lib/List.thy | 2 +- spartan/lib/Maybe.thy | 6 +- spartan/lib/Prelude.thy | 3 +- 16 files changed, 313 insertions(+), 178 deletions(-) create mode 100644 spartan/core/context_facts.ML delete mode 100644 spartan/core/typechecking.ML create mode 100644 spartan/core/types.ML diff --git a/hott/Equivalence.thy b/hott/Equivalence.thy index 7d1f2b1..a57ed44 100644 --- a/hott/Equivalence.thy +++ b/hott/Equivalence.thy @@ -52,7 +52,7 @@ Lemma (def) hsym: shows "g ~ f" unfolding homotopy_def proof intro - fix x assume "x: A" then have "f x = g x" + fix x assuming "x: A" then have "f x = g x" by (htpy H) thus "g x = f x" by (rule pathinv) fact @@ -70,7 +70,7 @@ Lemma (def) htrans: shows "f ~ h" unfolding homotopy_def proof intro - fix x assume "x: A" + fix x assuming "x: A" have *: "f x = g x" "g x = h x" by (htpy H1, htpy H2) show "f x = h x" @@ -119,7 +119,7 @@ Lemma funcomp_right_htpy: shows "(g \ f) ~ (g \ f')" unfolding homotopy_def proof (intro, reduce) - fix x assume "x: A" + fix x assuming "x: A" have *: "f x = f' x" by (htpy H) show "g (f x) = g (f' x)" @@ -154,18 +154,18 @@ Corollary (def) commute_homotopy': "H: f ~ (id A)" shows "H (f x) = f [H x]" proof - - from \H: f ~ id A\ have "H: \x: A. f x = x" + from \H: f ~ id A\ have [type]: "H: \x: A. f x = x" by (reduce add: homotopy_def) - have "(id A)[H x]: f x = x" + have *: "(id A)[H x]: f x = x" by (rewrite at "\ = _" id_comp[symmetric], rewrite at "_ = \" id_comp[symmetric]) have "H (f x) \ H x = H (f x) \ (id A)[H x]" - by (rule left_whisker, transport eq: ap_id) (reduce+, refl) + by (rule left_whisker, fact *, transport eq: ap_id) (reduce+, refl) also have [simplified id_comp]: "H (f x) \ (id A)[H x] = f[H x] \ H x" by (rule commute_homotopy) - finally have *: "{}" by this + finally have *: "{}" using * by this show "H (f x) = f [H x]" by pathcomp_cancelr (fact, typechk+) @@ -179,7 +179,7 @@ subsection \Quasi-inverses\ definition "is_qinv A B f \ \g: B \ A. homotopy A (fn _. A) (g \\<^bsub>A\<^esub> f) (id A) \ homotopy B (fn _. B) (f \\<^bsub>B\<^esub> g) (id B)" -lemma is_qinv_type [type]: +Lemma is_qinv_type [type]: assumes "A: U i" "B: U i" "f: A \ B" shows "is_qinv A B f: U i" unfolding is_qinv_def @@ -266,7 +266,7 @@ definition "is_biinv A B f \ (\g: B \ A. homotopy A (fn _. A) (g \\<^bsub>A\<^esub> f) (id A)) \ (\g: B \ A. homotopy B (fn _. B) (f \\<^bsub>B\<^esub> g) (id B))" -lemma is_biinv_type [type]: +Lemma is_biinv_type [type]: assumes "A: U i" "B: U i" "f: A \ B" shows "is_biinv A B f: U i" unfolding is_biinv_def by typechk @@ -365,7 +365,7 @@ text \ definition equivalence (infix "\" 110) where "A \ B \ \f: A \ B. Equivalence.is_biinv A B f" -lemma equivalence_type [type]: +Lemma equivalence_type [type]: assumes "A: U i" "B: U i" shows "A \ B: U i" unfolding equivalence_def by typechk @@ -432,28 +432,24 @@ Lemma (def) equiv_if_equal: \<^enum> vars A B apply (rewrite at A in "A \ B" id_comp[symmetric]) using [[solve_side_conds=1]] - apply (rewrite at B in "_ \ B" id_comp[symmetric], fact) + apply (rewrite at B in "_ \ B" id_comp[symmetric]) apply (rule transport, rule Ui_in_USi) - apply (rule lift_universe_codomain, rule Ui_in_USi) - apply (typechk, rule Ui_in_USi) - by facts + by (rule lift_universe_codomain, rule Ui_in_USi) \<^enum> vars A using [[solve_side_conds=1]] apply (subst transport_comp) \ by (rule Ui_in_USi) \ by reduce (rule in_USi_if_in_Ui) - \ by reduce (rule id_is_biinv, fact) + \ by reduce (rule id_is_biinv) done done \<^item> \ \Similar proof as in the first subgoal above\ apply (rewrite at A in "A \ B" id_comp[symmetric]) using [[solve_side_conds=1]] - apply (rewrite at B in "_ \ B" id_comp[symmetric], fact) + apply (rewrite at B in "_ \ B" id_comp[symmetric]) apply (rule transport, rule Ui_in_USi) - apply (rule lift_universe_codomain, rule Ui_in_USi) - apply (typechk, rule Ui_in_USi) - by facts + by (rule lift_universe_codomain, rule Ui_in_USi) done (*Uncomment this to see all implicits from here on. diff --git a/hott/Identity.thy b/hott/Identity.thy index 4829b6f..247d6a4 100644 --- a/hott/Identity.thy +++ b/hott/Identity.thy @@ -82,7 +82,7 @@ Lemma (def) pathinv: shows "y =\<^bsub>A\<^esub> x" by (eq p) intro -lemma pathinv_comp [comp]: +Lemma pathinv_comp [comp]: assumes "A: U i" "x: A" shows "pathinv A x x (refl x) \ refl x" unfolding pathinv_def by reduce @@ -94,11 +94,11 @@ Lemma (def) pathcomp: shows "x =\<^bsub>A\<^esub> z" proof (eq p) - fix x q assume "x: A" and "q: x =\<^bsub>A\<^esub> z" + fix x q assuming "x: A" and "q: x =\<^bsub>A\<^esub> z" show "x =\<^bsub>A\<^esub> z" by (eq q) refl qed -lemma pathcomp_comp [comp]: +Lemma pathcomp_comp [comp]: assumes "A: U i" "a: A" shows "pathcomp A a a a (refl a) (refl a) \ refl a" unfolding pathcomp_def by reduce @@ -491,9 +491,9 @@ Lemma (def) right_whisker: shows "p \ r = q \ r" apply (eq r) focus vars x s t proof - - have "t \ refl x = t" by (rule pathcomp_refl) - also have ".. = s" by fact - also have ".. = s \ refl x" by (rule pathcomp_refl[symmetric]) + have "s \ refl x = s" by (rule pathcomp_refl) + also have ".. = t" by fact + also have ".. = t \ refl x" by (rule pathcomp_refl[symmetric]) finally show "{}" by this qed done @@ -505,9 +505,9 @@ Lemma (def) left_whisker: shows "r \ p = r \ q" apply (eq r) focus vars x s t proof - - have "refl x \ t = t" by (rule refl_pathcomp) - also have ".. = s" by fact - also have ".. = refl x \ s" by (rule refl_pathcomp[symmetric]) + have "refl x \ s = s" by (rule refl_pathcomp) + also have ".. = t" by fact + also have ".. = refl x \ t" by (rule refl_pathcomp[symmetric]) finally show "{}" by this qed done @@ -542,14 +542,13 @@ text \Conditions under which horizontal path-composition is defined.\A\<^esub> b" "q: a =\<^bsub>A\<^esub> b" "r: b =\<^bsub>A\<^esub> c" "s: b =\<^bsub>A\<^esub> c" begin Lemma (def) horiz_pathcomp: - notes assums assumes "\: p = q" "\: r = s" shows "p \ r = q \ s" proof (rule pathcomp) @@ -560,7 +559,6 @@ qed typechk text \A second horizontal composition:\ Lemma (def) horiz_pathcomp': - notes assums assumes "\: p = q" "\: r = s" shows "p \ r = q \ s" proof (rule pathcomp) @@ -572,13 +570,12 @@ notation horiz_pathcomp (infix "\" 121) notation horiz_pathcomp' (infix "\''" 121) Lemma (def) horiz_pathcomp_eq_horiz_pathcomp': - notes assums assumes "\: p = q" "\: r = s" shows "\ \ \ = \ \' \" unfolding horiz_pathcomp_def horiz_pathcomp'_def apply (eq \, eq \) focus vars p apply (eq p) - focus vars a q by (eq q) (reduce, refl) + focus vars a _ _ _ r by (eq r) (reduce, refl) done done @@ -597,7 +594,7 @@ Lemma \2_alt_def: section \Eckmann-Hilton\ -context fixes i A a assumes "A: U i" "a: A" +context fixes i A a assumes [type]: "A: U i" "a: A" begin interpretation \2: @@ -619,14 +616,18 @@ Lemma (def) pathcomp_eq_horiz_pathcomp: assumes "\: \2 A a" "\: \2 A a" shows "\ \ \ = \ \ \" unfolding \2.horiz_pathcomp_def - using assms[unfolded \2_alt_def] + (*FIXME: Definitional unfolding + normalization; shouldn't need explicit + unfolding*) + using assms[unfolded \2_alt_def, type] proof (reduce, rule pathinv) \ \Propositional equality rewriting needs to be improved\ - have "{} = refl (refl a) \ \" by (rule pathcomp_refl) + have "refl (refl a) \ \ \ refl (refl a) = refl (refl a) \ \" + by (rule pathcomp_refl) also have ".. = \" by (rule refl_pathcomp) finally have eq\: "{} = \" by this - have "{} = refl (refl a) \ \" by (rule pathcomp_refl) + have "refl (refl a) \ \ \ refl (refl a) = refl (refl a) \ \" + by (rule pathcomp_refl) also have ".. = \" by (rule refl_pathcomp) finally have eq\: "{} = \" by this @@ -640,13 +641,15 @@ Lemma (def) pathcomp_eq_horiz_pathcomp': assumes "\: \2 A a" "\: \2 A a" shows "\ \' \ = \ \ \" unfolding \2.horiz_pathcomp'_def - using assms[unfolded \2_alt_def] + using assms[unfolded \2_alt_def, type] proof reduce - have "{} = refl (refl a) \ \" by (rule pathcomp_refl) + have "refl (refl a) \ \ \ refl (refl a) = refl (refl a) \ \" + by (rule pathcomp_refl) also have ".. = \" by (rule refl_pathcomp) finally have eq\: "{} = \" by this - have "{} = refl (refl a) \ \" by (rule pathcomp_refl) + have "refl (refl a) \ \ \ refl (refl a) = refl (refl a) \ \" + by (rule pathcomp_refl) also have ".. = \" by (rule refl_pathcomp) finally have eq\: "{} = \" by this @@ -659,7 +662,7 @@ Lemma (def) pathcomp_eq_horiz_pathcomp': Lemma (def) eckmann_hilton: assumes "\: \2 A a" "\: \2 A a" shows "\ \ \ = \ \ \" - using assms[unfolded \2_alt_def] + using assms[unfolded \2_alt_def, type] proof - have "\ \ \ = \ \ \" by (rule pathcomp_eq_horiz_pathcomp) diff --git a/hott/Nat.thy b/hott/Nat.thy index 716703a..f45387c 100644 --- a/hott/Nat.thy +++ b/hott/Nat.thy @@ -62,17 +62,17 @@ subsection \Addition\ definition add (infixl "+" 120) where "m + n \ NatRec Nat m (K suc) n" -lemma add_type [type]: +Lemma add_type [type]: assumes "m: Nat" "n: Nat" shows "m + n: Nat" unfolding add_def by typechk -lemma add_zero [comp]: +Lemma add_zero [comp]: assumes "m: Nat" shows "m + 0 \ m" unfolding add_def by reduce -lemma add_suc [comp]: +Lemma add_suc [comp]: assumes "m: Nat" "n: Nat" shows "m + suc n \ suc (m + n)" unfolding add_def by reduce @@ -123,22 +123,22 @@ subsection \Multiplication\ definition mul (infixl "*" 121) where "m * n \ NatRec Nat 0 (K $ add m) n" -lemma mul_type [type]: +Lemma mul_type [type]: assumes "m: Nat" "n: Nat" shows "m * n: Nat" unfolding mul_def by typechk -lemma mul_zero [comp]: +Lemma mul_zero [comp]: assumes "n: Nat" shows "n * 0 \ 0" unfolding mul_def by reduce -lemma mul_one [comp]: +Lemma mul_one [comp]: assumes "n: Nat" shows "n * 1 \ n" unfolding mul_def by reduce -lemma mul_suc [comp]: +Lemma mul_suc [comp]: assumes "m: Nat" "n: Nat" shows "m * suc n \ m + m * n" unfolding mul_def by reduce diff --git a/spartan/core/Spartan.thy b/spartan/core/Spartan.thy index 7f13036..412881b 100644 --- a/spartan/core/Spartan.thy +++ b/spartan/core/Spartan.thy @@ -189,7 +189,7 @@ ML_file \context_tactical.ML\ subsection \Type-checking/inference\ \ \Rule attributes for the type-checker\ -named_theorems form and intr and comp and type +named_theorems form and intr and comp \ \Defines elimination automation and the `elim` attribute\ ML_file \elimination.ML\ @@ -202,7 +202,7 @@ lemmas [comp] = beta Sig_comp and [cong] = Pi_cong lam_cong Sig_cong -ML_file \typechecking.ML\ +ML_file \types.ML\ method_setup typechk = \Scan.succeed (K (CONTEXT_METHOD ( @@ -214,6 +214,7 @@ method_setup known = subsection \Statement commands\ +ML_file \context_facts.ML\ ML_file \focus.ML\ ML_file \elaboration.ML\ ML_file \elaborated_statement.ML\ @@ -374,12 +375,12 @@ translations "f x" \ "f `x" section \Functions\ -lemma eta_exp: +Lemma eta_exp: assumes "f: \x: A. B x" shows "f \ \x: A. f x" by (rule eta[symmetric]) -lemma refine_codomain: +Lemma refine_codomain: assumes "A: U i" "f: \x: A. B x" @@ -387,7 +388,7 @@ lemma refine_codomain: shows "f: \x: A. C x" by (subst eta_exp) -lemma lift_universe_codomain: +Lemma lift_universe_codomain: assumes "A: U i" "f: A \ U j" shows "f: A \ U (S j)" using in_USi_if_in_Ui[of "f {}"] @@ -402,7 +403,7 @@ syntax translations "g \\<^bsub>A\<^esub> f" \ "CONST funcomp A g f" -lemma funcompI [type]: +Lemma funcompI [type]: assumes "A: U i" "B: U i" @@ -413,7 +414,7 @@ lemma funcompI [type]: "g \\<^bsub>A\<^esub> f: \x: A. C (f x)" unfolding funcomp_def by typechk -lemma funcomp_assoc [comp]: +Lemma funcomp_assoc [comp]: assumes "A: U i" "f: A \ B" @@ -423,7 +424,7 @@ lemma funcomp_assoc [comp]: "(h \\<^bsub>B\<^esub> g) \\<^bsub>A\<^esub> f \ h \\<^bsub>A\<^esub> g \\<^bsub>A\<^esub> f" unfolding funcomp_def by reduce -lemma funcomp_lambda_comp [comp]: +Lemma funcomp_lambda_comp [comp]: assumes "A: U i" "\x. x: A \ b x: B" @@ -432,7 +433,7 @@ lemma funcomp_lambda_comp [comp]: "(\x: B. c x) \\<^bsub>A\<^esub> (\x: A. b x) \ \x: A. c (b x)" unfolding funcomp_def by reduce -lemma funcomp_apply_comp [comp]: +Lemma funcomp_apply_comp [comp]: assumes "A: U i" "B: U i" "\x y. x: B \ C x: U i" "f: A \ B" "g: \x: B. C x" @@ -456,12 +457,12 @@ lemma id_comp [comp]: "x: A \ (id A) x \ x" \ \for the occasional manual rewrite\ by reduce+ -lemma id_left [comp]: +Lemma id_left [comp]: assumes "A: U i" "B: U i" "f: A \ B" shows "(id B) \\<^bsub>A\<^esub> f \ f" by (subst eta_exp[of f]) (reduce, rule eta) -lemma id_right [comp]: +Lemma id_right [comp]: assumes "A: U i" "B: U i" "f: A \ B" shows "f \\<^bsub>A\<^esub> (id A) \ f" by (subst eta_exp[of f]) (reduce, rule eta) @@ -476,23 +477,23 @@ section \Pairs\ definition "fst A B \ \p: \x: A. B x. SigInd A B (fn _. A) (fn x y. x) p" definition "snd A B \ \p: \x: A. B x. SigInd A B (fn p. B (fst A B p)) (fn x y. y) p" -lemma fst_type [type]: +Lemma fst_type [type]: assumes "A: U i" "\x. x: A \ B x: U i" shows "fst A B: (\x: A. B x) \ A" unfolding fst_def by typechk -lemma fst_comp [comp]: +Lemma fst_comp [comp]: assumes "A: U i" "\x. x: A \ B x: U i" "a: A" "b: B a" shows "fst A B \ a" unfolding fst_def by reduce -lemma snd_type [type]: +Lemma snd_type [type]: assumes "A: U i" "\x. x: A \ B x: U i" shows "snd A B: \p: \x: A. B x. B (fst A B p)" unfolding snd_def by typechk reduce -lemma snd_comp [comp]: +Lemma snd_comp [comp]: assumes "A: U i" "\x. x: A \ B x: U i" "a: A" "b: B a" shows "snd A B \ b" unfolding snd_def by reduce diff --git a/spartan/core/context_facts.ML b/spartan/core/context_facts.ML new file mode 100644 index 0000000..4b4bcd9 --- /dev/null +++ b/spartan/core/context_facts.ML @@ -0,0 +1,44 @@ +structure Context_Facts: sig + +val Eq: Proof.context -> thm Item_Net.T +val eq: Proof.context -> thm list +val eq_of: Proof.context -> term -> thm list +val register_eq: thm -> Context.generic -> Context.generic +val register_eqs: thm list -> Context.generic -> Context.generic +val register_facts: thm list -> Proof.context -> Proof.context + +end = struct + + +(* Equality statements *) + +structure Eq = Generic_Data ( + type T = thm Item_Net.T + val empty = Item_Net.init Thm.eq_thm + (single o (#1 o Lib.dest_eq) o Thm.concl_of) + val extend = I + val merge = Item_Net.merge +) + +val Eq = Eq.get o Context.Proof +val eq = Item_Net.content o Eq +fun eq_of ctxt tm = Item_Net.retrieve (Eq ctxt) tm + +fun register_eq rule = + if Lib.is_eq (Thm.concl_of rule) then Eq.map (Item_Net.update rule) + else error "Not a definitional equality judgment" + +fun register_eqs rules = foldr1 (op o) (map register_eq rules) + + +(* Context assumptions *) + +fun register_facts ths = + let + val (facts, conds, eqs) = Lib.partition_judgments ths + val f = Types.register_knowns facts handle Empty => I + val c = Types.register_conds conds handle Empty => I + val e = register_eqs eqs handle Empty => I + in Context.proof_map (e o c o f) end + +end diff --git a/spartan/core/elaborated_statement.ML b/spartan/core/elaborated_statement.ML index 81f4a7d..33f88cf 100644 --- a/spartan/core/elaborated_statement.ML +++ b/spartan/core/elaborated_statement.ML @@ -416,6 +416,35 @@ val structured_statement = Parse_Spec.statement -- Parse_Spec.if_statement' -- Parse.for_fixes >> (fn ((shows, assumes), fixes) => (fixes, assumes, shows)) +fun these_factss more_facts (named_factss, state) = + (named_factss, state |> Proof.set_facts (maps snd named_factss @ more_facts)) + +fun gen_assume prep_statement prep_att export raw_fixes raw_prems raw_concls state = + let + val ctxt = Proof.context_of state; + + val bindings = map (apsnd (map (prep_att ctxt)) o fst) raw_concls; + val {fixes = params, assumes = prems_propss, shows = concl_propss, result_binds, text, ...} = + #1 (prep_statement raw_fixes raw_prems (map snd raw_concls) ctxt); + val propss = (map o map) (Logic.close_prop params (flat prems_propss)) concl_propss; + in + state + |> Proof.assert_forward + |> Proof.map_context_result (fn ctxt => + ctxt + |> Proof_Context.augment text + |> fold Variable.maybe_bind_term result_binds + |> fold_burrow (Assumption.add_assms export o map (Thm.cterm_of ctxt)) propss + |-> (fn premss => fn ctxt => + (premss, Context_Facts.register_facts (flat premss) ctxt)) + |-> (fn premss => + Proof_Context.note_thmss "" (bindings ~~ (map o map) (fn th => ([th], [])) premss))) + |> these_factss [] |> #2 + end + +val assume = + gen_assume Proof_Context.cert_statement (K I) Assumption.assume_export + in val _ = Outer_Syntax.command \<^command_keyword>\assuming\ "elaborated assumption" @@ -433,7 +462,7 @@ val _ = Outer_Syntax.command \<^command_keyword>\assuming\ "elabora val c' = c |> map (fn ((b, atts), ss) => ((b, map (Attrib.attribute_cmd ctxt) atts), map read_terms ss)) val c'' = Elab.elaborate ctxt [] c' - in Proof.assume a' b' c'' state end))) + in assume a' b' c'' state end))) end diff --git a/spartan/core/elimination.ML b/spartan/core/elimination.ML index cd4e414..cf9d21e 100644 --- a/spartan/core/elimination.ML +++ b/spartan/core/elimination.ML @@ -41,7 +41,7 @@ val _ = Theory.setup ( (Thm.declaration_attribute o register_rule)) "" #> Global_Theory.add_thms_dynamic (\<^binding>\elim\, - fn context => (map #1 (rules (Context.proof_of context)))) + fn context => map #1 (rules (Context.proof_of context))) ) diff --git a/spartan/core/focus.ML b/spartan/core/focus.ML index 2ad4c8a..b963cfe 100644 --- a/spartan/core/focus.ML +++ b/spartan/core/focus.ML @@ -117,7 +117,8 @@ fun gen_schematic_subgoal prep_atts raw_result_binding param_specs state = |> Proof.map_context (fn _ => #context subgoal_focus |> Proof_Context.note_thmss "" [((Binding.name "prems", []), [(prems, [])])] - |> #2) + |> snd + |> Context_Facts.register_facts prems) |> Proof.internal_goal (K (K ())) (Proof_Context.get_mode ctxt) true "subgoal" NONE after_qed [] [] [(Binding.empty_atts, [(Thm.term_of (#concl subgoal_focus), [])])] |> #2 diff --git a/spartan/core/goals.ML b/spartan/core/goals.ML index a04bd0e..7d52495 100644 --- a/spartan/core/goals.ML +++ b/spartan/core/goals.ML @@ -175,7 +175,8 @@ fun gen_schematic_theorem in goal_ctxt |> not (null prems) ? - (Proof_Context.note_thmss "" [((Binding.name prems_name, []), [(prems, [])])] #> snd) + (Proof_Context.note_thmss "" [((Binding.name prems_name, []), [(prems, [])])] + #> snd #> Context_Facts.register_facts prems) |> Proof.theorem before_qed gen_and_after_qed (map snd stmt) |> (case facts of NONE => I | SOME ths => Proof.refine_insert ths) end @@ -188,12 +189,12 @@ val schematic_theorem_cmd = fun theorem spec descr = Outer_Syntax.local_theory_to_proof' spec ("state " ^ descr) - ( Scan.option (Args.parens (Args.$$$ "def")) + (Scan.option (Args.parens (Args.$$$ "def")) -- (long_statement || short_statement) >> (fn (opt_derive, (long, binding, includes, elems, concl)) => schematic_theorem_cmd (case opt_derive of SOME "def" => true | _ => false) - long descr NONE (K I) binding includes elems concl) ) + long descr NONE (K I) binding includes elems concl)) fun definition spec descr = Outer_Syntax.local_theory_to_proof' spec "definition via proof" diff --git a/spartan/core/lib.ML b/spartan/core/lib.ML index 392ae2e..e43ad98 100644 --- a/spartan/core/lib.ML +++ b/spartan/core/lib.ML @@ -27,6 +27,9 @@ val typing_of_term: term -> term val decompose_goal: Proof.context -> term -> term list * term val rigid_typing_concl: term -> bool +(*Theorems*) +val partition_judgments: thm list -> thm list * thm list * thm list + (*Subterms*) val has_subterm: term list -> term -> bool val subterm_count: term -> term -> int @@ -121,6 +124,19 @@ fun rigid_typing_concl goal = in is_typing concl andalso is_rigid (term_of_typing concl) end +(** Theorems **) +fun partition_judgments ths = + let + fun part [] facts conds eqs = (facts, conds, eqs) + | part (th::ths) facts conds eqs = + if is_typing (Thm.prop_of th) then + part ths (th::facts) conds eqs + else if is_typing (Thm.concl_of th) then + part ths facts (th::conds) eqs + else part ths facts conds (th::eqs) + in part ths [] [] [] end + + (** Subterms **) fun has_subterm tms = diff --git a/spartan/core/tactics.ML b/spartan/core/tactics.ML index 45fd119..0c46ef0 100644 --- a/spartan/core/tactics.ML +++ b/spartan/core/tactics.ML @@ -82,8 +82,8 @@ fun internalize_fact_tac t = Drule.infer_instantiate' ctxt [NONE, NONE, SOME B, SOME a] @{thm PiE} in HEADGOAL (resolve_tac ctxt [resolvent]) - (*Infer the correct type T*) - THEN SOMEGOAL (NO_CONTEXT_TACTIC ctxt o Types.check_infer []) + (*Unify with the correct type T*) + THEN SOMEGOAL (NO_CONTEXT_TACTIC ctxt o Types.known_ctac []) end) fun elim_core_tac tms types ctxt = @@ -111,18 +111,16 @@ fun elim_ctac tms = [] => CONTEXT_TACTIC' (fn ctxt => eresolve_tac ctxt (map #1 (Elim.rules ctxt))) | major :: _ => CONTEXT_SUBGOAL (fn (goal, _) => fn cst as (ctxt, st) => let - val facts = Proof_Context.facts_of ctxt + val facts = map Thm.prop_of (Types.known ctxt) val prems = Logic.strip_assums_hyp goal val template = Lib.typing_of_term major - val types = - map (Thm.prop_of o #1) (Facts.could_unify facts template) - @ filter (fn prem => Term.could_unify (template, prem)) prems + val types = filter (fn th => Term.could_unify (template, th)) (facts @ prems) |> map Lib.type_of_typing in case types of [] => no_ctac cst | _ => let - val inserts = map (Thm.prop_of o fst) (Facts.props facts) @ prems + val inserts = facts @ prems |> filter Lib.is_typing |> map Lib.dest_typing |> filter_out (fn (t, _) => @@ -170,7 +168,7 @@ fun cases_ctac tm = | [] => raise Option | _ => raise error (Syntax.string_of_term ctxt tm ^ "not uniquely typed")) handle Option => - error ("no case rule known for " ^ (Syntax.string_of_term ctxt tm)) + error ("No case rule known for " ^ (Syntax.string_of_term ctxt tm)) in resolve_tac ctxt [res] i end) diff --git a/spartan/core/typechecking.ML b/spartan/core/typechecking.ML deleted file mode 100644 index ca89c8c..0000000 --- a/spartan/core/typechecking.ML +++ /dev/null @@ -1,96 +0,0 @@ -(* Title: typechecking.ML - Author: Joshua Chen - -Type information and type-checking infrastructure. -*) - -structure Types: sig - -val Data: Proof.context -> thm Item_Net.T -val types: Proof.context -> term -> thm list -val put_type: thm -> Proof.context -> Proof.context -val put_types: thm list -> Proof.context -> Proof.context - -val debug_typechk: bool Config.T - -val known_ctac: thm list -> int -> context_tactic -val check_infer: thm list -> int -> context_tactic - -end = struct - - -(* Context data *) - -structure Data = Generic_Data ( - type T = thm Item_Net.T - val empty = Item_Net.init Thm.eq_thm - (single o Lib.term_of_typing o Thm.prop_of) - val extend = I - val merge = Item_Net.merge -) - -val Data = Data.get o Context.Proof -fun types ctxt tm = Item_Net.retrieve (Data ctxt) tm -fun put_type typing = Context.proof_map (Data.map (Item_Net.update typing)) -fun put_types typings = foldr1 (op o) (map put_type typings) - - -(* Context tactics for type-checking *) - -val debug_typechk = Attrib.setup_config_bool \<^binding>\debug_typechk\ (K false) - -fun debug_tac ctxt s = - if Config.get ctxt debug_typechk then print_tac ctxt s else all_tac - -(*Solves goals without metavariables and type inference problems by resolving - with facts or assumption from inline premises.*) -fun known_ctac facts = CONTEXT_SUBGOAL (fn (goal, i) => fn (ctxt, st) => - TACTIC_CONTEXT ctxt - let val concl = Logic.strip_assums_concl goal in - if Lib.no_vars concl orelse - (Lib.is_typing concl andalso Lib.no_vars (Lib.term_of_typing concl)) - then - let val ths = map (Simplifier.norm_hhf ctxt) facts - in st |> - (assume_tac ctxt ORELSE' resolve_tac ctxt ths THEN_ALL_NEW K no_tac) i - end - else Seq.empty - end) - -(*Simple bidirectional typing tactic, with some nondeterminism from backtracking - search over arbitrary `type` rules. The current implementation does not - perform any normalization.*) -fun check_infer_step facts i (ctxt, st) = - let - val tac = SUBGOAL (fn (goal, i) => - if Lib.rigid_typing_concl goal - then - let val net = Tactic.build_net ( - map (Simplifier.norm_hhf ctxt) - (*FIXME: Shouldn't pull nameless facts directly from context. Note - that we *do* need to be able to resolve with conditional - statements expressing type family judgments*) - (facts @ map fst (Facts.props (Proof_Context.facts_of ctxt))) - (*MAYBE FIXME: Remove `type` from this list, and instead perform - definitional unfolding to (w?)hnf.*) - @(Named_Theorems.get ctxt \<^named_theorems>\type\) - @(Named_Theorems.get ctxt \<^named_theorems>\form\) - @(Named_Theorems.get ctxt \<^named_theorems>\intr\) - @(map #1 (Elim.rules ctxt))) - in (resolve_from_net_tac ctxt net) i end - else no_tac) - - val ctxt' = ctxt (*TODO: Use this to store already-derived typing judgments*) - in - TACTIC_CONTEXT ctxt' (tac i st) - end - -fun check_infer facts i (cst as (_, st)) = - let val ctac = known_ctac facts CORELSE' check_infer_step facts - in - cst |> (ctac i CTHEN - CREPEAT_IN_RANGE i (Thm.nprems_of st - i) (CTRY o CREPEAT_ALL_NEW_FWD ctac)) - end - - -end diff --git a/spartan/core/types.ML b/spartan/core/types.ML new file mode 100644 index 0000000..a521f7c --- /dev/null +++ b/spartan/core/types.ML @@ -0,0 +1,141 @@ +(* Title: typechecking.ML + Author: Joshua Chen + +Type information and type-checking infrastructure. +*) + +structure Types: sig + +val Known: Proof.context -> thm Item_Net.T +val known: Proof.context -> thm list +val known_of: Proof.context -> term -> thm list +val register_known: thm -> Context.generic -> Context.generic +val register_knowns: thm list -> Context.generic -> Context.generic + +val Cond: Proof.context -> thm Item_Net.T +val cond: Proof.context -> thm list +val cond_of: Proof.context -> term -> thm list +val register_cond: thm -> Context.generic -> Context.generic +val register_conds: thm list -> Context.generic -> Context.generic + +val debug_typechk: bool Config.T + +val known_ctac: thm list -> int -> context_tactic +val check_infer: thm list -> int -> context_tactic + +end = struct + + +(** Context data **) + +(* Known types *) + +structure Known = Generic_Data ( + type T = thm Item_Net.T + val empty = Item_Net.init Thm.eq_thm + (single o Lib.term_of_typing o Thm.prop_of) + val extend = I + val merge = Item_Net.merge +) + +val Known = Known.get o Context.Proof +val known = Item_Net.content o Known +fun known_of ctxt tm = Item_Net.retrieve (Known ctxt) tm + +fun register_known typing = + if Lib.is_typing (Thm.prop_of typing) then Known.map (Item_Net.update typing) + else error "Not a type judgment" + +fun register_knowns typings = foldr1 (op o) (map register_known typings) + +(* Conditional type rules *) + +(*Two important cases: 1. general type inference rules and 2. type family + judgments*) + +structure Cond = Generic_Data ( + type T = thm Item_Net.T + val empty = Item_Net.init Thm.eq_thm + (single o Lib.term_of_typing o Thm.concl_of) + val extend = I + val merge = Item_Net.merge +) + +val Cond = Cond.get o Context.Proof +val cond = Item_Net.content o Cond +fun cond_of ctxt tm = Item_Net.retrieve (Cond ctxt) tm + +fun register_cond rule = + if Lib.is_typing (Thm.concl_of rule) then Cond.map (Item_Net.update rule) + else error "Not a conditional type judgment" + +fun register_conds rules = foldr1 (op o) (map register_cond rules) + + +(** [type] attribute **) + +val _ = Theory.setup ( + Attrib.setup \<^binding>\type\ + (Scan.succeed (Thm.declaration_attribute (fn th => + if Thm.no_prems th then register_known th else register_cond th))) + "" + #> Global_Theory.add_thms_dynamic (\<^binding>\type\, + fn context => let val ctxt = Context.proof_of context in + known ctxt @ cond ctxt end) +) + + +(** Context tactics for type-checking and elaboration **) + +val debug_typechk = Attrib.setup_config_bool \<^binding>\debug_typechk\ (K false) + +fun debug_tac ctxt s = + if Config.get ctxt debug_typechk then print_tac ctxt s else all_tac + +(*Solves goals without metavariables and type inference problems by assumption + from inline premises or resolution with facts*) +fun known_ctac facts = CONTEXT_SUBGOAL (fn (goal, i) => fn (ctxt, st) => + TACTIC_CONTEXT ctxt + let val concl = Logic.strip_assums_concl goal in + if Lib.no_vars concl orelse + (Lib.is_typing concl andalso Lib.no_vars (Lib.term_of_typing concl)) + then + let val ths = known ctxt @ map (Simplifier.norm_hhf ctxt) facts + in st |> + (assume_tac ctxt ORELSE' resolve_tac ctxt ths THEN_ALL_NEW K no_tac) i + end + else Seq.empty + end) + +(*Simple bidirectional typing tactic, with some nondeterminism from backtracking + search over input facts. The current implementation does not perform any + normalization.*) +fun check_infer_step facts i (ctxt, st) = + let + val tac = SUBGOAL (fn (goal, i) => + if Lib.rigid_typing_concl goal + then + let val net = Tactic.build_net ( + map (Simplifier.norm_hhf ctxt) facts + @(cond ctxt) + @(Named_Theorems.get ctxt \<^named_theorems>\form\) + @(Named_Theorems.get ctxt \<^named_theorems>\intr\) + @(map #1 (Elim.rules ctxt))) + in (resolve_from_net_tac ctxt net) i end + else no_tac) + + val ctxt' = ctxt (*TODO: Use this to store already-derived typing judgments*) + in + TACTIC_CONTEXT ctxt' (tac i st) + end + +fun check_infer facts i (cst as (_, st)) = + let + val ctac = known_ctac facts CORELSE' check_infer_step facts + in + cst |> (ctac i CTHEN + CREPEAT_IN_RANGE i (Thm.nprems_of st - i) (CTRY o CREPEAT_ALL_NEW_FWD ctac)) + end + + +end diff --git a/spartan/lib/List.thy b/spartan/lib/List.thy index dd51582..83e5149 100644 --- a/spartan/lib/List.thy +++ b/spartan/lib/List.thy @@ -149,7 +149,7 @@ Definition map: proof (elim xs) show "[]: List B" by intro next fix x ys - assume "x: A" "ys: List B" + assuming "x: A" "ys: List B" show "f x # ys: List B" by typechk qed diff --git a/spartan/lib/Maybe.thy b/spartan/lib/Maybe.thy index 0a7ec21..da22a4e 100644 --- a/spartan/lib/Maybe.thy +++ b/spartan/lib/Maybe.thy @@ -25,10 +25,10 @@ Definition MaybeInd: "\a. a: A \ f a: C (some A a)" "m: Maybe A" shows "C m" - using assms[unfolded Maybe_def none_def some_def] + using assms[unfolded Maybe_def none_def some_def, type] apply (elim m) - apply (rule \_ \ _: C (inl _ _ _)\) - apply (elim, rule \_: C (inr _ _ _)\) + apply fact + apply (elim, fact) done Lemma Maybe_comp_none: diff --git a/spartan/lib/Prelude.thy b/spartan/lib/Prelude.thy index 6adbce8..c0abf31 100644 --- a/spartan/lib/Prelude.thy +++ b/spartan/lib/Prelude.thy @@ -105,7 +105,8 @@ Definition ifelse [rotated 1]: "a: C true" "b: C false" shows "C x" - by (elim x) (elim, rule *)+ + using assms[unfolded Bool_def true_def false_def, type] + by (elim x) (elim, fact)+ Lemma if_true: assumes -- cgit v1.2.3