# HG changeset patch # User wenzelm # Date 1429285759 -7200 # Node ID 54bea620e54f698966275176987e56d7fec238c3 # Parent 5d90d301ad661ea08355a645718fc5c4c811307a added Eisbach, using version 3752768caa17 of its Bitbucket repository; diff -r 5d90d301ad66 -r 54bea620e54f ANNOUNCE --- a/ANNOUNCE Fri Apr 17 16:54:25 2015 +0200 +++ b/ANNOUNCE Fri Apr 17 17:49:19 2015 +0200 @@ -24,6 +24,8 @@ * New proof method "rewrite" for single-step rewriting with subterm selection based on patterns. +* New Eisbach proof method language and "match" method. + * Updated manuals: datatypes, implementation, isar-ref, jedit, sledgehammer, system. diff -r 5d90d301ad66 -r 54bea620e54f CONTRIBUTORS --- a/CONTRIBUTORS Fri Apr 17 16:54:25 2015 +0200 +++ b/CONTRIBUTORS Fri Apr 17 17:49:19 2015 +0200 @@ -6,6 +6,9 @@ Contributions to Isabelle2015 ----------------------------- +* 2014/2015: Daniel Matichuk, Toby Murray, NICTA and Makarius Wenzel + The Eisbach proof method language and "match" method. + * March 2015: Jasmin Blanchette, Inria & LORIA & MPII, Mathias Fleury, MPII, and Dmitriy Traytel, TUM More multiset theorems, syntax, and operations. diff -r 5d90d301ad66 -r 54bea620e54f NEWS --- a/NEWS Fri Apr 17 16:54:25 2015 +0200 +++ b/NEWS Fri Apr 17 17:49:19 2015 +0200 @@ -59,6 +59,13 @@ * Structural composition of proof methods (meth1; meth2) in Isar corresponds to (tac1 THEN_ALL_NEW tac2) in ML. +* The Eisbach proof method language allows to define new proof methods +by combining existing ones with their usual syntax. The "match" proof +method provides basic fact/term matching in addition to +premise/conclusion matching through Subgoal.focus, and binds fact names +from matches as well as term patterns within matches. See also +~~/src/HOL/Eisbach/Eisbach.thy and the included examples. + *** Prover IDE -- Isabelle/Scala/jEdit *** diff -r 5d90d301ad66 -r 54bea620e54f src/HOL/Eisbach/Eisbach.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Eisbach/Eisbach.thy Fri Apr 17 17:49:19 2015 +0200 @@ -0,0 +1,43 @@ +(* Title: Eisbach.thy + Author: Daniel Matichuk, NICTA/UNSW + +Main entry point for Eisbach proof method language. +*) + +theory Eisbach +imports Pure +keywords + "method" :: thy_decl and + "concl" + "prems" (* FIXME conflict with "prems" in Isar, which is presently dormant *) + "declares" + "methods" + "\" "\" + "uses" +begin + +ML_file "parse_tools.ML" +ML_file "eisbach_rule_insts.ML" +ML_file "method_closure.ML" +ML_file "match_method.ML" +ML_file "eisbach_antiquotations.ML" + +(* FIXME reform Isabelle/Pure attributes to make this work by default *) +attribute_setup THEN = + \Scan.lift (Scan.optional (Args.bracks Parse.nat) 1) -- Attrib.thm >> (fn (i, B) => + Method_Closure.free_aware_rule_attribute [B] (fn _ => fn A => A RSN (i, B)))\ + "resolution with rule" + +attribute_setup OF = + \Attrib.thms >> (fn Bs => + Method_Closure.free_aware_rule_attribute Bs (fn _ => fn A => A OF Bs))\ + "rule resolved with facts" + +attribute_setup rotated = + \Scan.lift (Scan.optional Parse.int 1 >> (fn n => + Method_Closure.free_aware_rule_attribute [] (fn _ => rotate_prems n)))\ + "rotated theorem premises" + +method solves methods m = \m; fail\ + +end diff -r 5d90d301ad66 -r 54bea620e54f src/HOL/Eisbach/Eisbach_Tools.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Eisbach/Eisbach_Tools.thy Fri Apr 17 17:49:19 2015 +0200 @@ -0,0 +1,43 @@ +(* Title: Eisbach_Tools.thy + Author: Daniel Matichuk, NICTA/UNSW + +Usability tools for Eisbach. +*) + +theory Eisbach_Tools +imports Eisbach +begin + +ML \ +local + +fun trace_method parser print = + Scan.lift (Args.mode "dummy") -- parser >> + (fn (dummy, x) => fn ctxt => SIMPLE_METHOD (fn st => + (if dummy orelse not (Method_Closure.is_dummy st) then tracing (print ctxt x) else (); + all_tac st))); + +fun setup_trace_method binding parser print = + Method.setup binding + (trace_method parser (fn ctxt => fn x => Binding.name_of binding ^ ": " ^ print ctxt x)) + ""; + +in + +val _ = + Theory.setup + (setup_trace_method @{binding print_fact} + (Scan.lift (Scan.ahead Parse.not_eof) -- Attrib.thms) + (fn ctxt => fn (tok, thms) => + (* FIXME proper formatting!? *) + Token.unparse tok ^ ": " ^ commas (map (Display.string_of_thm ctxt) thms)) #> + setup_trace_method @{binding print_term} + (Scan.lift (Scan.ahead Parse.not_eof) -- Args.term) + (fn ctxt => fn (tok, t) => + (* FIXME proper formatting!? *) + Token.unparse tok ^ ": " ^ Syntax.string_of_term ctxt t)); + +end +\ + +end diff -r 5d90d301ad66 -r 54bea620e54f src/HOL/Eisbach/Examples.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Eisbach/Examples.thy Fri Apr 17 17:49:19 2015 +0200 @@ -0,0 +1,220 @@ +(* Title: Examples.thy + Author: Daniel Matichuk, NICTA/UNSW +*) + +section \Basic Eisbach examples\ + +theory Examples +imports Main Eisbach_Tools +begin + + +subsection \Basic methods\ + +method my_intros = \rule conjI | rule impI\ + +lemma "P \ Q \ Z \ X" + apply my_intros+ + oops + +method my_intros' uses intros = \rule conjI | rule impI | rule intros\ + +lemma "P \ Q \ Z \ X" + apply (my_intros' intros: disjI1)+ + oops + +method my_spec for x :: 'a = \drule spec[where x="x"]\ + +lemma "\x. P x \ P x" + apply (my_spec x) + apply assumption + done + + +subsection \Focusing and matching\ + +method match_test = + \match prems in U: "P x \ Q x" for P Q x \ + \print_term P, + print_term Q, + print_term x, + print_fact U\\ + +lemma "\x. P x \ Q x \ A x \ B x \ R x y \ True" + apply match_test -- \Valid match, but not quite what we were expecting..\ + back -- \Can backtrack over matches, exploring all bindings\ + back + back + back + back + back -- \Found the other conjunction\ + back + back + back + back + back + oops + +text \Use matching to avoid "improper" methods\ + +lemma focus_test: + shows "\x. \x. P x \ P x" + apply (my_spec "x :: 'a", assumption)? -- \Wrong x\ + apply (match concl in "P x" for x \ \my_spec x, assumption\) + done + + +text \Matches are exclusive. Backtracking will not occur past a match\ + +method match_test' = + \match concl in + "P \ Q" for P Q \ + \print_term P, print_term Q, rule conjI[where P="P" and Q="Q"]; assumption\ + \ "H" for H \ \print_term H\ + \ + +text \Solves goal\ +lemma "P \ Q \ P \ Q" + apply match_test' + done + +text \Fall-through case never taken\ +lemma "P \ Q" + apply match_test'? + oops + +lemma "P" + apply match_test' + oops + + +method my_spec_guess = + \match concl in "P (x :: 'a)" for P x \ + \drule spec[where x=x], + print_term P, + print_term x\\ + +lemma "\x. P (x :: nat) \ Q (x :: nat)" + apply my_spec_guess + oops + +method my_spec_guess2 = + \match prems in U[thin]:"\x. P x \ Q x" and U':"P x" for P Q x \ + \insert spec[where x=x, OF U], + print_term P, + print_term Q\\ + +lemma "\x. P x \ Q x \ Q x \ Q x" + apply my_spec_guess2? -- \Fails. Note that both "P"s must match\ + oops + +lemma "\x. P x \ Q x \ P x \ Q x" + apply my_spec_guess2 + apply (erule mp) + apply assumption + done + + +subsection \Higher-order methods\ + +method higher_order_example for x methods meth = + \cases x, meth, meth\ + +lemma + assumes A: "x = Some a" + shows "the x = a" + by (higher_order_example x \simp add: A\) + + +subsection \Recursion\ + +method recursion_example for x :: bool = + \print_term x, + match (x) in "A \ B" for A B \ + \(print_term A, + print_term B, + recursion_example A, + recursion_example B) | -\\ + +lemma "P" + apply (recursion_example "(A \ D) \ (B \ C)") + oops + + +subsection \Solves combinator\ + +lemma "A \ B \ A \ B" + apply (solves \rule conjI\)? -- \Doesn't solve the goal!\ + apply (solves \rule conjI, assumption, assumption\) + done + + +subsection \Demo\ + +method solve methods m = \m;fail\ + +named_theorems intros and elims and subst + +method prop_solver declares intros elims subst = + \(assumption | + rule intros | erule elims | + subst subst | subst (asm) subst | + (erule notE; solve \prop_solver\))+\ + +lemmas [intros] = + conjI + impI + disjCI + iffI + notI +lemmas [elims] = + impCE + conjE + disjE + +lemma "((A \ B) \ (A \ C) \ (B \ C)) \ C" + apply prop_solver + done + +method guess_all = + \match prems in U[thin]:"\x. P (x :: 'a)" for P \ + \match prems in "?H (y :: 'a)" for y \ + \rule allE[where P = P and x = y, OF U]\ + | match concl in "?H (y :: 'a)" for y \ + \rule allE[where P = P and x = y, OF U]\\\ + +lemma "(\x. P x \ Q x) \ P y \ Q y" + apply guess_all + apply prop_solver + done + +lemma "(\x. P x \ Q x) \ P z \ P y \ Q y" + apply (solve \guess_all, prop_solver\) -- \Try it without solve\ + done + +method guess_ex = + \match concl in + "\x. P (x :: 'a)" for P \ + \match prems in "?H (x :: 'a)" for x \ + \rule exI[where x=x]\\\ + +lemma "P x \ \x. P x" + apply guess_ex + apply prop_solver + done + +method fol_solver = + \(guess_ex | guess_all | prop_solver) ; solve \fol_solver\\ + +declare + allI [intros] + exE [elims] + ex_simps [subst] + all_simps [subst] + +lemma "(\x. P x) \ (\x. Q x) \ (\x. P x \ Q x)" + and "\x. P x \ (\x. P x)" + and "(\x. \y. R x y) \ (\y. \x. R x y)" + by fol_solver+ + +end diff -r 5d90d301ad66 -r 54bea620e54f src/HOL/Eisbach/Tests.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Eisbach/Tests.thy Fri Apr 17 17:49:19 2015 +0200 @@ -0,0 +1,272 @@ +(* Title: Tests.thy + Author: Daniel Matichuk, NICTA/UNSW +*) + +section \Miscellaneous Eisbach tests\ + +theory Tests +imports Main Eisbach_Tools +begin + +section \Named Theorems Tests\ + +named_theorems foo + +method foo declares foo = + \rule foo\ + +lemma + assumes A [foo]: A + shows A + apply foo + done + + +section \Match Tests\ + +notepad +begin + have dup: "\A. A \ A \ A" by simp + + fix A y + have "(\x. A x) \ A y" + apply (rule dup, match prems in Y: "\B. P B" for P \ \match (P) in A \ \print_fact Y, rule Y\\) + apply (rule dup, match prems in Y: "\B :: 'a. P B" for P \ \match (P) in A \ \print_fact Y, rule Y\\) + apply (rule dup, match prems in Y: "\B :: 'a. P B" for P \ \match concl in "P y" for y \ \print_fact Y, print_term y, rule Y[where B=y]\\) + apply (rule dup, match prems in Y: "\B :: 'a. P B" for P \ \match concl in "P z" for z \ \print_fact Y, print_term y, rule Y[where B=z]\\) + apply (rule dup, match concl in "P y" for P \ \match prems in Y: "\z. P z" \ \print_fact Y, rule Y[where z=y]\\) + apply (match prems in Y: "\z :: 'a. P z" for P \ \match concl in "P y" \ \print_fact Y, rule Y[where z=y]\\) + done + + assume X: "\x. A x" "A y" + have "A y" + apply (match X in Y:"\B. A B" and Y':"B ?x" for B \ \print_fact Y[where B=y], print_term B\) + apply (match X in Y:"B ?x" and Y':"B ?x" for B \ \print_fact Y, print_term B\) + apply (match X in Y:"B x" and Y':"B x" for B x \ \print_fact Y, print_term B, print_term x\) + apply (insert X) + apply (match prems in Y:"\B. A B" and Y':"B y" for B and y :: 'a \ \print_fact Y[where B=y], print_term B\) + apply (match prems in Y:"B ?x" and Y':"B ?x" for B \ \print_fact Y, print_term B\) + apply (match prems in Y:"B x" and Y':"B x" for B x \ \print_fact Y, print_term B\) + apply (match concl in "P x" and "P y" for P x \ \print_term P, print_term x\) + apply assumption + done + + (* match focusing retains prems *) + fix B x + have "(\x. A x) \ (\z. B z) \ A y \ B x" + apply (match prems in Y: "\z :: 'a. A z" \ \match prems in Y': "\z :: 'b. B z" \ \print_fact Y, print_fact Y', rule Y'[where z=x]\\) + done + + (*Attributes *) + fix C + have "(\x :: 'a. A x) \ (\z. B z) \ A y \ B x \ B x \ A y" + apply (intro conjI) + apply (match prems in Y: "\z :: 'a. A z" and Y'[intro]:"\z :: 'b. B z" \ \fastforce\) + apply (match prems in Y: "\z :: 'a. A z" \ \match prems in Y'[intro]:"\z :: 'b. B z" \ \fastforce\\) + apply (match prems in Y[thin]: "\z :: 'a. A z" \ \(match prems in Y':"\z :: 'a. A z" \ \fail\ \ Y': "?H" \ \-\)\) + apply assumption + done + + fix A B C D + have "\uu'' uu''' uu uu'. (\x :: 'a. A uu' x) \ D uu y \ (\z. B uu z) \ C uu y \ (\z y. C uu z) \ B uu x \ B uu x \ C uu y" + apply (match prems in Y[thin]: "\z :: 'a. A ?zz' z" and + Y'[thin]: "\rr :: 'b. B ?zz rr" \ + \print_fact Y, print_fact Y', intro conjI, rule Y', insert Y', insert Y'[where rr=x]\) + apply (match prems in Y:"B ?u ?x" \ \rule Y\) + apply (insert TrueI) + apply (match prems in Y'[thin]: "\ff. B uu ff" for uu \ \insert Y', drule meta_spec[where x=x]\) + apply assumption + done + + + (* Multi-matches. As many facts as match are bound. *) + fix A B C x + have "(\x :: 'a. A x) \ (\y :: 'a. B y) \ C y \ (A x \ B y \ C y)" + apply (match prems in Y[thin]: "\z :: 'a. ?A z" (multi) \ \intro conjI, (rule Y)+\) + apply (match prems in Y[thin]: "\z :: 'a. ?A z" (multi) \ \fail\ \ "C y" \ \-\) (* multi-match must bind something *) + apply (match prems in Y: "C y" \ \rule Y\) + done + + fix A B C x + have "(\x :: 'a. A x) \ (\y :: 'a. B y) \ C y \ (A x \ B y \ C y)" + apply (match prems in Y[thin]: "\z. ?A z" (multi) \ \intro conjI, (rule Y)+\) + apply (match prems in Y[thin]: "\z. ?A z" (multi) \ \fail\ \ "C y" \ \-\) (* multi-match must bind something *) + apply (match prems in Y: "C y" \ \rule Y\) + done + + + (*We may use for-fixes in multi-matches too. All bound facts must agree on the fixed term *) + fix A B C x + have "(\y :: 'a. B y \ C y) \ (\x :: 'a. A x \ B x) \ (\y :: 'a. A y \ C y) \ C y \ (A x \ B y \ C y)" + apply (match prems in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \intro conjI Y[THEN conjunct1]\) + apply (match prems in Y: "\z :: 'a. ?A z \ False" (multi) \ \print_fact Y, fail\ \ "C y" \ \print_term C\) (* multi-match must bind something *) + apply (match prems in Y: "\x. B x \ C x" \ \intro conjI Y[THEN conjunct1]\) + apply (match prems in Y: "C ?x" \ \rule Y\) + done + + (* Attributes *) + fix A B C x + have "(\x :: 'a. A x \ B x) \ (\y :: 'a. A y \ C y) \ (\y :: 'a. B y \ C y) \ C y \ (A x \ B y \ C y)" + apply (match prems in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \match Y[THEN conjunct1] in Y':"?H" (multi) \ \intro conjI,rule Y'\\) + apply (match prems in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \match Y[THEN conjunct2] in Y':"?H" (multi) \ \rule Y'\\) + apply assumption + done + +(* Removed feature for now *) +(* + fix A B C x + have "(\x :: 'a. A x \ B x) \ (\y :: 'a. A y \ C y) \ (\y :: 'a. B y \ C y) \ C y \ (A x \ B y \ C y)" + apply (match prems in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \match \K @{thms Y TrueI}\ in Y':"?H" (multi) \ \rule conjI; (rule Y')?\\) + apply (match prems in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \match \K [@{thm Y}]\ in Y':"?H" (multi) \ \rule Y'\\) + done +*) + (* Testing THEN_ALL_NEW within match *) + fix A B C x + have "(\x :: 'a. A x \ B x) \ (\y :: 'a. A y \ C y) \ (\y :: 'a. B y \ C y) \ C y \ (A x \ B y \ C y)" + apply (match prems in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \intro conjI ; ((rule Y[THEN conjunct1])?); rule Y[THEN conjunct2] \) + done + + (* Cut tests *) + fix A B C + + have "D \ C \ A \ B \ A \ C \ D \ True \ C" + by (((match prems in I: "P \ Q" (cut) + and I': "P \ ?U" for P Q \ \rule mp [OF I' I[THEN conjunct1]]\)?), simp) + + have "A \ B \ A \ C \ C" + by (((match prems in I: "P \ Q" (cut) + and I': "P \ ?U" for P Q \ \rule mp [OF I' I[THEN conjunct1]]\)?, simp) | simp) + +end + +(* Testing inner focusing. This fails if we don't smash flex-flex pairs produced + by retrofitting. This needs to be done more carefully to avoid smashing + legitimate pairs.*) + +schematic_lemma "?A x \ A x" + apply (match concl in "H" for H \ \match concl in Y for Y \ \print_term Y\\) + apply assumption + done + +(* Ensure short-circuit after first match failure *) +lemma + assumes A: "P \ Q" + shows "P" + by ((match A in "P \ Q" \ \fail\ \ "?H" \ \-\) | simp add: A) + +lemma + assumes A: "D \ C" "A \ B" "A \ B" + shows "A" + apply ((match A in U: "P \ Q" (cut) and "P' \ Q'" for P Q P' Q' \ + \simp add: U\ \ "?H" \ \-\) | -) + apply (simp add: A) + done + + +subsection \Uses Tests\ + +ML \ + fun test_internal_fact ctxt factnm = + (case try (Proof_Context.get_thms ctxt) factnm of + NONE => () + | SOME _ => error "Found internal fact")\ + +method uses_test\<^sub>1 uses uses_test\<^sub>1_uses = \rule uses_test\<^sub>1_uses\ + +lemma assumes A shows A by (uses_test\<^sub>1 uses_test\<^sub>1_uses: assms) + +ML \test_internal_fact @{context} "uses_test\<^sub>1_uses"\ + +ML \test_internal_fact @{context} "Tests.uses_test\<^sub>1_uses"\ +ML \test_internal_fact @{context} "Tests.uses_test\<^sub>1.uses_test\<^sub>1_uses"\ + + +(* Testing term and fact passing in recursion *) + +method recursion_example for x :: bool uses facts = + \match (x) in + "A \ B" for A B \ \(recursion_example A facts: facts, recursion_example B facts: facts)\ + \ "?H" \ \match facts in U: "x" \ \insert U\\\ + +lemma + assumes asms: "A" "B" "C" "D" + shows "(A \ B) \ (C \ D)" + apply (recursion_example "(A \ B) \ (C \ D)" facts: asms) + apply simp + done + +(*Method.sections in existing method*) +method my_simp\<^sub>1 uses my_simp\<^sub>1_facts = \simp add: my_simp\<^sub>1_facts\ +lemma assumes A shows A by (my_simp\<^sub>1 my_simp\<^sub>1_facts: assms) + +(*Method.sections via Eisbach argument parser*) +method uses_test\<^sub>2 uses uses_test\<^sub>2_uses = \uses_test\<^sub>1 uses_test\<^sub>1_uses: uses_test\<^sub>2_uses\ +lemma assumes A shows A by (uses_test\<^sub>2 uses_test\<^sub>2_uses: assms) + + +subsection \Declaration Tests\ + +named_theorems declare_facts\<^sub>1 + +method declares_test\<^sub>1 declares declare_facts\<^sub>1 = \rule declare_facts\<^sub>1\ + +lemma assumes A shows A by (declares_test\<^sub>1 declare_facts\<^sub>1: assms) + +lemma assumes A[declare_facts\<^sub>1]: A shows A by declares_test\<^sub>1 + + +subsection \Rule Instantiation Tests\ + +method my_allE\<^sub>1 for x :: 'a and P :: "'a \ bool" = + \erule allE [where x = x and P = P]\ + +lemma "\x. Q x \ Q x" by (my_allE\<^sub>1 x Q) + +method my_allE\<^sub>2 for x :: 'a and P :: "'a \ bool" = + \erule allE [of P x]\ + +lemma "\x. Q x \ Q x" by (my_allE\<^sub>2 x Q) + +method my_allE\<^sub>3 for x :: 'a and P :: "'a \ bool" = + \match allE [where 'a = 'a] in X: "\(x :: 'a) P R. \x. P x \ (P x \ R) \ R" \ + \erule X [where x = x and P = P]\\ + +lemma "\x. Q x \ Q x" by (my_allE\<^sub>3 x Q) + +method my_allE\<^sub>4 for x :: 'a and P :: "'a \ bool" = + \match allE [where 'a = 'a] in X: "\(x :: 'a) P R. \x. P x \ (P x \ R) \ R" \ + \erule X [of x P]\\ + +lemma "\x. Q x \ Q x" by (my_allE\<^sub>4 x Q) + + +ML {* +structure Data = Generic_Data +( + type T = thm list; + val empty: T = []; + val extend = I; + fun merge data : T = Thm.merge_thms data; +); +*} + +local_setup \Local_Theory.add_thms_dynamic (@{binding test_dyn}, Data.get)\ + +setup \Context.theory_map (Data.put @{thms TrueI})\ + +method dynamic_thms_test = \rule test_dyn\ + +locale foo = + fixes A + assumes A : "A" +begin + +local_setup + \Local_Theory.declaration {pervasive = false, syntax = false} + (fn phi => Data.put (Morphism.fact phi @{thms A}))\ + +lemma A by dynamic_thms_test + +end + +end diff -r 5d90d301ad66 -r 54bea620e54f src/HOL/Eisbach/eisbach_antiquotations.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Eisbach/eisbach_antiquotations.ML Fri Apr 17 17:49:19 2015 +0200 @@ -0,0 +1,54 @@ +(* Title: eisbach_antiquotations.ML + Author: Daniel Matichuk, NICTA/UNSW + +ML antiquotations for Eisbach. +*) + +structure Eisbach_Antiquotations: sig end = +struct + +(** evaluate Eisbach method from ML **) + +val _ = + Theory.setup + (ML_Antiquotation.inline @{binding method} (* FIXME ML_Antiquotation.value (!?) *) + (Args.context -- Scan.lift (Args.mode "drop_cases") -- Scan.lift (Parse.position Parse.name) + >> (fn ((ctxt, drop_cases), nameref) => + let + val ((targs, (factargs, margs)), _) = Method_Closure.get_inner_method ctxt nameref; + + val has_factargs = not (null factargs); + + val (targnms, ctxt') = + fold_map (fn (Var ((x, _), _)) => ML_Context.variant x) targs ctxt; + val (margnms, ctxt'') = fold_map ML_Context.variant margs ctxt'; + val (factsnm, _) = ML_Context.variant "facts" ctxt''; + + val fn_header = + margnms + |> has_factargs ? append [factsnm] + |> append targnms + |> map (fn nm => "fn " ^ nm ^ " => ") + |> implode; + + val ML_context = ML_Context.struct_name ctxt ^ ".ML_context"; + val ml_inner = + ML_Syntax.atomic + ("Method_Closure.get_inner_method " ^ ML_context ^ + ML_Syntax.print_pair ML_Syntax.print_string ML_Syntax.print_position nameref ^ + "|> (fn ((targs, (_, margs)), text) => Method_Closure.eval_inner_method " ^ + ML_context ^ " ((targs, margs), text))"); + + val drop_cases_suffix = + if drop_cases then "#> (fn f => (fn ctxt => fn thms => f ctxt thms |> Seq.map snd))" + else ""; + + val factsnm = if has_factargs then factsnm else "[]"; + in + ML_Syntax.atomic + (fn_header ^ ml_inner ^ " " ^ ML_Syntax.print_list I targnms ^ " " ^ + factsnm ^ " " ^ + ML_Syntax.print_list I margnms ^ drop_cases_suffix) + end))); + +end; diff -r 5d90d301ad66 -r 54bea620e54f src/HOL/Eisbach/eisbach_rule_insts.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Eisbach/eisbach_rule_insts.ML Fri Apr 17 17:49:19 2015 +0200 @@ -0,0 +1,189 @@ +(* Title: eisbach_rule_insts.ML + Author: Daniel Matichuk, NICTA/UNSW + +Eisbach-aware variants of the "where" and "of" attributes. + +Alternate syntax for rule_insts.ML participates in token closures by +examining the behaviour of Rule_Insts.where_rule and instantiating token +values accordingly. Instantiations in re-interpretation are done with +Drule.cterm_instantiate. +*) + +structure Eisbach_Rule_Insts : sig end = +struct + +fun restore_tags thm = Thm.map_tags (K (Thm.get_tags thm)); + +fun add_thm_insts thm = + let + val thy = Thm.theory_of_thm thm; + val tyvars = Thm.fold_terms Term.add_tvars thm []; + val tyvars' = tyvars |> map (Logic.mk_term o Logic.mk_type o TVar); + + val tvars = Thm.fold_terms Term.add_vars thm []; + val tvars' = tvars |> map (Logic.mk_term o Var); + + val conj = + Logic.mk_conjunction_list (tyvars' @ tvars') |> Thm.global_cterm_of thy |> Drule.mk_term; + in + ((tyvars, tvars), Conjunction.intr thm conj) + end; + +fun get_thm_insts thm = + let + val (thm', insts) = Conjunction.elim thm; + + val insts' = insts + |> Drule.dest_term + |> Thm.term_of + |> Logic.dest_conjunction_list + |> map Logic.dest_term + |> (fn f => fold (fn t => fn (tys, ts) => + (case try Logic.dest_type t of + SOME T => (T :: tys, ts) + | NONE => (tys, t :: ts))) f ([], [])) + ||> rev + |>> rev; + in + (thm', insts') + end; + +fun instantiate_xis insts thm = + let + val tyvars = Thm.fold_terms Term.add_tvars thm []; + val tvars = Thm.fold_terms Term.add_vars thm []; + val cert = Thm.global_cterm_of (Thm.theory_of_thm thm); + val certT = Thm.global_ctyp_of (Thm.theory_of_thm thm); + + fun add_inst (xi, t) (Ts, ts) = + (case AList.lookup (op =) tyvars xi of + SOME S => ((certT (TVar (xi, S)), certT (Logic.dest_type t)) :: Ts, ts) + | NONE => + (case AList.lookup (op =) tvars xi of + SOME T => (Ts, (cert (Var (xi, T)), cert t) :: ts) + | NONE => error "indexname not found in thm")); + + val (cTinsts, cinsts) = fold add_inst insts ([], []); + in + (Thm.instantiate (cTinsts, []) thm + |> Drule.cterm_instantiate cinsts + COMP_INCR asm_rl) + |> Thm.adjust_maxidx_thm ~1 + |> restore_tags thm + end; + + +datatype rule_inst = + Named_Insts of ((indexname * string) * (term -> unit)) list +| Term_Insts of (indexname * term) list; + +fun embed_indexname ((xi,s),f) = + let + fun wrap_xi xi t = Logic.mk_conjunction (Logic.mk_term (Var (xi,fastype_of t)),Logic.mk_term t); + in ((xi,s),f o wrap_xi xi) end; + +fun unembed_indexname t = + let + val (t, t') = apply2 Logic.dest_term (Logic.dest_conjunction t); + val (xi, _) = Term.dest_Var t; + in (xi, t') end; + +fun read_where_insts toks = + let + val parser = + Parse.!!! + (Parse.and_list1 (Args.var -- (Args.$$$ "=" |-- Parse_Tools.name_term)) -- Parse.for_fixes) + --| Scan.ahead Parse.eof; + val (insts, fixes) = the (Scan.read Token.stopper parser toks); + + val insts' = + if forall (fn (_, v) => Parse_Tools.is_real_val v) insts + then Term_Insts (map (fn (_,t) => unembed_indexname (Parse_Tools.the_real_val t)) insts) + else Named_Insts (map (fn (xi, p) => embed_indexname + ((xi,Parse_Tools.the_parse_val p),Parse_Tools.the_parse_fun p)) insts); + in + (insts', fixes) + end; + +fun of_rule thm (args, concl_args) = + let + fun zip_vars _ [] = [] + | zip_vars (_ :: xs) (NONE :: rest) = zip_vars xs rest + | zip_vars ((x, _) :: xs) (SOME t :: rest) = (x, t) :: zip_vars xs rest + | zip_vars [] _ = error "More instantiations than variables in theorem"; + val insts = + zip_vars (rev (Term.add_vars (Thm.full_prop_of thm) [])) args @ + zip_vars (rev (Term.add_vars (Thm.concl_of thm) [])) concl_args; + in insts end; + +val inst = Args.maybe Parse_Tools.name_term; +val concl = Args.$$$ "concl" -- Args.colon; + +fun read_of_insts toks thm = + let + val parser = + Parse.!!! + ((Scan.repeat (Scan.unless concl inst) -- Scan.optional (concl |-- Scan.repeat inst) []) + -- Parse.for_fixes) --| Scan.ahead Parse.eof; + val ((insts, concl_insts), fixes) = + the (Scan.read Token.stopper parser toks); + + val insts' = + if forall (fn SOME t => Parse_Tools.is_real_val t | NONE => true) (insts @ concl_insts) + then + Term_Insts + (map_filter (Option.map (Parse_Tools.the_real_val #> unembed_indexname)) (insts @ concl_insts)) + + else + Named_Insts + (apply2 (map (Option.map (fn p => (Parse_Tools.the_parse_val p,Parse_Tools.the_parse_fun p)))) + (insts, concl_insts) + |> of_rule thm |> map ((fn (xi, (nm, tok)) => embed_indexname ((xi, nm), tok)))); + in + (insts', fixes) + end; + +fun read_instantiate_closed ctxt ((Named_Insts insts), fixes) thm = + let + val insts' = map (fn ((v, t), _) => ((v, Position.none), t)) insts; + + val (thm_insts, thm') = add_thm_insts thm + val (thm'', thm_insts') = + Rule_Insts.where_rule ctxt insts' fixes thm' + |> get_thm_insts; + + val tyinst = + ListPair.zip (fst thm_insts, fst thm_insts') |> map (fn ((xi, _), typ) => (xi, typ)); + val tinst = + ListPair.zip (snd thm_insts, snd thm_insts') |> map (fn ((xi, _), t) => (xi, t)); + + val _ = + map (fn ((xi, _), f) => + (case AList.lookup (op =) tyinst xi of + SOME typ => f (Logic.mk_type typ) + | NONE => + (case AList.lookup (op =) tinst xi of + SOME t => f t + | NONE => error "Lost indexname in instantiated theorem"))) insts; + in + (thm'' |> restore_tags thm) + end + | read_instantiate_closed _ ((Term_Insts insts), _) thm = instantiate_xis insts thm; + +val parse_all : Token.T list context_parser = Scan.lift (Scan.many Token.not_eof); + +val _ = + Theory.setup + (Attrib.setup @{binding "where"} (parse_all >> + (fn toks => Thm.rule_attribute (fn context => + read_instantiate_closed (Context.proof_of context) (read_where_insts toks)))) + "named instantiation of theorem"); + +val _ = + Theory.setup + (Attrib.setup @{binding "of"} (parse_all >> + (fn toks => Thm.rule_attribute (fn context => fn thm => + read_instantiate_closed (Context.proof_of context) (read_of_insts toks thm) thm))) + "positional instantiation of theorem"); + +end; diff -r 5d90d301ad66 -r 54bea620e54f src/HOL/Eisbach/match_method.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Eisbach/match_method.ML Fri Apr 17 17:49:19 2015 +0200 @@ -0,0 +1,619 @@ +(* Title: match_method.ML + Author: Daniel Matichuk, NICTA/UNSW + +Setup for "match" proof method. It provides basic fact/term matching in +addition to premise/conclusion matching through Subgoal.focus, and binds +fact names from matches as well as term patterns within matches. +*) + +signature MATCH_METHOD = +sig + val focus_schematics: Proof.context -> Envir.tenv + val focus_params: Proof.context -> term list + (* FIXME proper ML interface for the main thing *) +end + +structure Match_Method : MATCH_METHOD = +struct + +(*Variant of filter_prems_tac with auxiliary configuration; + recovers premise order afterwards.*) +fun filter_prems_tac' ctxt prep pred a = + let + fun Then NONE tac = SOME tac + | Then (SOME tac) tac' = SOME (tac THEN' tac'); + fun thins H (tac, n, a, i) = + (case pred a H of + NONE => (tac, n + 1, a, i) + | SOME a' => (Then tac (rotate_tac n THEN' eresolve_tac ctxt [thin_rl]), 0, a', i + n)); + in + SUBGOAL (fn (goal, i) => + let val Hs = Logic.strip_assums_hyp (prep goal) in + (case fold thins Hs (NONE, 0, a, 0) of + (NONE, _, _, _) => no_tac + | (SOME tac, _, _, n) => tac i THEN rotate_tac (~ n) i) + end) + end; + + +datatype match_kind = + Match_Term of term Item_Net.T + | Match_Fact of thm Item_Net.T + | Match_Concl + | Match_Prems; + + +val aconv_net = Item_Net.init (op aconv) single; + +val parse_match_kind = + Scan.lift @{keyword "concl"} >> K Match_Concl || + Scan.lift @{keyword "prems"} >> K Match_Prems || + Scan.lift (@{keyword "("}) |-- Args.term --| Scan.lift (@{keyword ")"}) >> + (fn t => Match_Term (Item_Net.update t aconv_net)) || + Attrib.thms >> (fn thms => Match_Fact (fold Item_Net.update thms Thm.full_rules)); + + +fun nameable_match m = (case m of Match_Fact _ => true | Match_Prems => true | _ => false); +fun prop_match m = (case m of Match_Term _ => false | _ => true); + +val bound_term : (term, binding) Parse_Tools.parse_val parser = + Parse_Tools.parse_term_val Parse.binding; + +val fixes = + Parse.and_list1 (Scan.repeat1 bound_term -- + Scan.option (@{keyword "::"} |-- Parse.!!! Parse.typ) >> (fn (xs, T) => map (rpair T) xs)) + >> flat; + +val for_fixes = Scan.optional (@{keyword "for"} |-- fixes) []; + +fun pos_of dyn = + (case dyn of + Parse_Tools.Parse_Val (b, _) => Binding.pos_of b + | _ => raise Fail "Not a parse value"); + + +(*FIXME: Dynamic facts modify the background theory, so we have to resort + to token replacement for matched facts. *) +fun dynamic_fact ctxt = + bound_term -- Args.opt_attribs (Attrib.check_name ctxt); + +type match_args = {unify : bool, multi : bool, cut : bool}; + +val parse_match_args = + Scan.optional (Args.parens (Parse.enum1 "," + (Args.$$$ "unify" || Args.$$$ "multi" || Args.$$$ "cut"))) [] >> + (fn ss => + fold (fn s => fn {unify, multi, cut} => + (case s of + "unify" => {unify = true, multi = multi, cut = cut} + | "multi" => {unify = unify, multi = true, cut = cut} + | "cut" => {unify = unify, multi = multi, cut = true})) + ss {unify = false, multi = false, cut = false}); + +(*TODO: Shape holes in thms *) +fun parse_named_pats match_kind = + Args.context :|-- (fn ctxt => + Scan.lift (Parse.and_list1 (Scan.option (dynamic_fact ctxt --| Args.colon) :-- + (fn opt_dyn => + if is_none opt_dyn orelse nameable_match match_kind + then Parse_Tools.name_term -- parse_match_args + else + let val b = #1 (the opt_dyn) + in error ("Cannot bind fact name in term match" ^ Position.here (pos_of b)) end)) + -- for_fixes -- (@{keyword "\"} |-- Parse.token Parse.cartouche)) + >> (fn ((ts, fixes), cartouche) => + (case Token.get_value cartouche of + SOME (Token.Source src) => + let + val text = Method_Closure.read_inner_method ctxt src + (*TODO: Correct parse context for attributes?*) + val ts' = + map + (fn (b, (Parse_Tools.Real_Val v, match_args)) => + ((Option.map (fn (b, att) => + (Parse_Tools.the_real_val b, + map (Attrib.attribute ctxt) att)) b, match_args), v) + | _ => raise Fail "Expected closed term") ts + val fixes' = map (fn (p, _) => Parse_Tools.the_real_val p) fixes + in (ts', fixes', text) end + | SOME _ => error "Unexpected token value in match cartouche" + | NONE => + let + val fixes' = map (fn (pb, otyp) => (Parse_Tools.the_parse_val pb, otyp, NoSyn)) fixes; + val (fixes'', ctxt1) = Proof_Context.read_vars fixes' ctxt; + val (fix_nms, ctxt2) = Proof_Context.add_fixes fixes'' ctxt1; + + val ctxt3 = Proof_Context.set_mode Proof_Context.mode_schematic ctxt2; + + fun parse_term term = + if prop_match match_kind + then Syntax.parse_prop ctxt3 term + else Syntax.parse_term ctxt3 term; + + val pats = + map (fn (_, (term, _)) => parse_term (Parse_Tools.the_parse_val term)) ts + |> Syntax.check_terms ctxt3; + + val ctxt4 = fold Variable.declare_term pats ctxt3; + + val (Ts, ctxt5) = ctxt4 |> fold_map Proof_Context.inferred_param fix_nms; + + val real_fixes = map Free (fix_nms ~~ Ts); + + fun reject_extra_free (Free (x, _)) () = + if Variable.is_fixed ctxt5 x then () + else error ("Illegal use of free (unfixed) variable " ^ quote x) + | reject_extra_free _ () = (); + val _ = (fold o fold_aterms) reject_extra_free pats (); + + (*fun test_multi_bind {multi = multi, ...} pat = multi andalso + not (null (inter (op =) (map Free (Term.add_frees pat [])) real_fixes)) andalso + error "Cannot fix terms in multi-match. Use a schematic instead." + + val _ = map2 (fn pat => fn (_, (_, match_args)) => test_multi_bind match_args pat) pats ts*) + + val binds = + map (fn (b, _) => Option.map (fn (b, att) => (Parse_Tools.the_parse_val b, att)) b) ts; + + fun upd_ctxt (SOME (b, att)) pat (tms, ctxt) = + let + val ([nm], ctxt') = + Variable.variant_fixes [Name.internal (Binding.name_of b)] ctxt; + val abs_nms = Term.strip_all_vars pat; + + val param_thm = map (Drule.mk_term o Thm.cterm_of ctxt' o Free) abs_nms + |> Conjunction.intr_balanced + |> Drule.generalize ([], map fst abs_nms); + + val thm = + Thm.cterm_of ctxt' (Free (nm, propT)) + |> Drule.mk_term + |> not (null abs_nms) ? Conjunction.intr param_thm + |> Drule.zero_var_indexes + |> Method_Closure.tag_free_thm; + + (*TODO: Preprocess attributes here?*) + + val (_, ctxt'') = Proof_Context.note_thmss "" [((b, []), [([thm], [])])] ctxt'; + in + (SOME (Thm.prop_of thm, map (Attrib.attribute ctxt) att) :: tms, ctxt'') + end + | upd_ctxt NONE _ (tms, ctxt) = (NONE :: tms, ctxt); + + val (binds, ctxt6) = ctxt5 + |> (fn ctxt => fold2 upd_ctxt binds pats ([], ctxt) |> apfst rev) + ||> Proof_Context.restore_mode ctxt; + + val (src, text) = Method_Closure.read_text_closure ctxt6 (Token.input_of cartouche); + + val morphism = + Variable.export_morphism ctxt6 + (ctxt + |> Token.declare_maxidx_src src + |> Variable.declare_maxidx (Variable.maxidx_of ctxt6)); + + val pats' = map (Term.map_types Type_Infer.paramify_vars #> Morphism.term morphism) pats; + val _ = ListPair.app (fn ((_, (Parse_Tools.Parse_Val (_, f), _)), t) => f t) (ts, pats'); + + val binds' = map (Option.map (fn (t, atts) => (Morphism.term morphism t, atts))) binds; + + val _ = + ListPair.app + (fn ((SOME ((Parse_Tools.Parse_Val (_, f), _)), _), SOME (t, _)) => f t + | ((NONE, _), NONE) => () + | _ => error "Mismatch between real and parsed bound variables") + (ts, binds'); + + val real_fixes' = map (Morphism.term morphism) real_fixes; + val _ = + ListPair.app (fn ((Parse_Tools.Parse_Val (_, f), _), t) => f t) (fixes, real_fixes'); + + val match_args = map (fn (_, (_, match_args)) => match_args) ts; + val binds'' = (binds' ~~ match_args) ~~ pats'; + + val src' = Token.transform_src morphism src; + val _ = Token.assign (SOME (Token.Source src')) cartouche; + in + (binds'', real_fixes', text) + end))); + + +fun parse_match_bodies match_kind = + Parse.enum1' "\" (parse_named_pats match_kind); + + +fun dest_internal_fact t = + (case try Logic.dest_conjunction t of + SOME (params, head) => + (params |> Logic.dest_conjunctions |> map Logic.dest_term, + head |> Logic.dest_term) + | NONE => ([], t |> Logic.dest_term)); + + +fun inst_thm ctxt env ts params thm = + let + val ts' = map (Envir.norm_term env) ts; + val insts = map (Thm.cterm_of ctxt) ts' ~~ map (Thm.cterm_of ctxt) params; + val tags = Thm.get_tags thm; + + (* + val Tinsts = Type.raw_matches ((map (fastype_of) params), (map (fastype_of) ts')) Vartab.empty + |> Vartab.dest + |> map (fn (xi, (S, typ)) => (certT (TVar (xi, S)), certT typ)) + *) + + val thm' = Drule.cterm_instantiate insts thm + (*|> Thm.instantiate (Tinsts, [])*) + |> Thm.map_tags (K tags); + in + thm' + end; + +fun do_inst fact_insts' env text ctxt = + let + val fact_insts = + map_filter + (fn ((((SOME ((_, head), att), _), _), _), thms) => SOME (head, (thms, att)) + | _ => NONE) fact_insts'; + + fun apply_attribute thm att ctxt = + let + val (opt_context', thm') = att (Context.Proof ctxt, thm) + in + (case thm' of + SOME _ => error "Rule attributes cannot be applied here" + | _ => the_default ctxt (Option.map Context.proof_of opt_context')) + end; + + fun apply_attributes atts thm = fold (apply_attribute thm) atts; + + (*TODO: What to do about attributes that raise errors?*) + val (fact_insts, ctxt') = + fold_map (fn (head, (thms, atts : attribute list)) => fn ctxt => + ((head, thms), fold (apply_attributes atts) thms ctxt)) fact_insts ctxt; + + fun try_dest_term thm = try (Thm.prop_of #> dest_internal_fact #> snd) thm; + + fun expand_fact thm = + the_default [thm] + (case try_dest_term thm of + SOME t_ident => AList.lookup (op aconv) fact_insts t_ident + | NONE => NONE); + + val morphism = + Morphism.term_morphism "do_inst.term" (Envir.norm_term env) $> + Morphism.fact_morphism "do_inst.fact" (maps expand_fact); + + val text' = Method.map_source (Token.transform_src morphism) text; + in + (text', ctxt') + end; + +fun DROP_CASES (tac: cases_tactic) : tactic = + tac #> Seq.map (fn (_, st) => st); + +fun prep_fact_pat ((x, args), pat) ctxt = + let + val ((params, pat'), ctxt') = Variable.focus pat ctxt; + val params' = map (Free o snd) params; + + val morphism = + Variable.export_morphism ctxt' + (ctxt |> Variable.declare_maxidx (Variable.maxidx_of ctxt')); + val pat'' :: params'' = map (Morphism.term morphism) (pat' :: params'); + + fun prep_head (t, att) = (dest_internal_fact t, att); + in + ((((Option.map prep_head x, args), params''), pat''), ctxt') + end; + +fun match_filter_env ctxt fixes (ts, params) thm env = + let + val param_vars = map Term.dest_Var params; + val params' = map (Envir.lookup env) param_vars; + + val fixes_vars = map Term.dest_Var fixes; + + val tenv = Envir.term_env env; + val all_vars = Vartab.keys tenv; + + val extra_vars = subtract (fn ((xi, _), xi') => xi = xi') fixes_vars all_vars; + + val tenv' = Envir.term_env env + |> fold (Vartab.delete_safe) extra_vars; + + val env' = + Envir.Envir {maxidx = Envir.maxidx_of env, tenv = tenv', tyenv = Envir.type_env env}; + + val all_params_bound = forall (fn SOME (Var _) => true | _ => false) params'; + in + if all_params_bound + then SOME (case ts of SOME ts => inst_thm ctxt env params ts thm | _ => thm, env') + else NONE + end; + + +(* Slightly hacky way of uniquely identifying focus premises *) +val prem_idN = "premise_id"; + +fun prem_id_eq ((id, _ : thm), (id', _ : thm)) = id = id'; + +val prem_rules : (int * thm) Item_Net.T = + Item_Net.init prem_id_eq (single o Thm.full_prop_of o snd); + +fun raw_thm_to_id thm = + (case Properties.get (Thm.get_tags thm) prem_idN of NONE => NONE | SOME id => Int.fromString id) + |> the_default ~1; + +structure Focus_Data = Proof_Data +( + type T = + (int * (int * thm) Item_Net.T) * (*prems*) + Envir.tenv * (*schematics*) + term list (*params*) + fun init _ : T = ((0, prem_rules), Vartab.empty, []) +); + + +(* focus prems *) + +val focus_prems = #1 o Focus_Data.get; + +fun add_focus_prem prem = + (Focus_Data.map o @{apply 3(1)}) (fn (next, net) => + (next + 1, Item_Net.update (next, Thm.tag_rule (prem_idN, string_of_int next) prem) net)); + +fun remove_focus_prem thm = + (Focus_Data.map o @{apply 3(1)} o apsnd) + (Item_Net.remove (raw_thm_to_id thm, thm)); + +(*TODO: Preliminary analysis to see if we're trying to clear in a non-focus match?*) +val _ = + Theory.setup + (Attrib.setup @{binding "thin"} + (Scan.succeed + (Thm.declaration_attribute (fn th => Context.mapping I (remove_focus_prem th)))) + "clear premise inside match method"); + + +(* focus schematics *) + +val focus_schematics = #2 o Focus_Data.get; + +fun add_focus_schematics cterms = + (Focus_Data.map o @{apply 3(2)}) + (fold (fn (Var (xi, T), t) => Vartab.update_new (xi, (T, t))) + (map (apply2 Thm.term_of) cterms)); + + +(* focus params *) + +val focus_params = #3 o Focus_Data.get; + +fun add_focus_params params = + (Focus_Data.map o @{apply 3(3)}) + (append (map (fn (_, ct) => Thm.term_of ct) params)); + + +(* Add focus elements as proof data *) +fun augment_focus + ({context, params, prems, asms, concl, schematics} : Subgoal.focus) : Subgoal.focus = + let + val context' = context + |> add_focus_params params + |> add_focus_schematics (snd schematics) + |> fold add_focus_prem (rev prems); + in + {context = context', + params = params, + prems = prems, + concl = concl, + schematics = schematics, + asms = asms} + end; + + +(* Fix schematics in the goal *) +fun focus_concl ctxt i goal = + let + val ({context, concl, params, prems, asms, schematics}, goal') = + Subgoal.focus_params ctxt i goal; + + val ((_, schematic_terms), context') = + Variable.import_inst true [Thm.term_of concl] context + |>> Thm.certify_inst (Thm.theory_of_thm goal'); + + val goal'' = Thm.instantiate ([], schematic_terms) goal'; + val concl' = Thm.instantiate_cterm ([], schematic_terms) concl; + val (schematic_types, schematic_terms') = schematics; + val schematics' = (schematic_types, schematic_terms @ schematic_terms'); + in + ({context = context', concl = concl', params = params, prems = prems, + schematics = schematics', asms = asms} : Subgoal.focus, goal'') + end; + +exception MATCH_CUT; + +val raise_match : (thm * Envir.env) Seq.seq = Seq.make (fn () => raise MATCH_CUT); + +fun match_facts ctxt fixes prop_pats get = + let + fun is_multi (((_, x : match_args), _), _) = #multi x; + fun is_unify (_, x : match_args) = #unify x; + fun is_cut (_, x : match_args) = #cut x; + + fun match_thm (((x, params), pat), thm) env = + let + fun try_dest_term term = the_default term (try Logic.dest_term term); + + val pat' = pat |> Envir.norm_term env |> try_dest_term; + + val item' = Thm.prop_of thm |> try_dest_term; + val ts = Option.map (fst o fst) (fst x); + (*FIXME: Do we need to move one of these patterns above the other?*) + + val matches = + (if is_unify x + then Unify.smash_unifiers (Context.Proof ctxt) [(pat', item') ] env + else Unify.matchers (Context.Proof ctxt) [(pat', item')]) + |> Seq.map_filter (fn env' => + match_filter_env ctxt fixes (ts, params) thm (Envir.merge (env, env'))) + |> is_cut x ? (fn t => Seq.make (fn () => + Option.map (fn (x, _) => (x, raise_match)) (Seq.pull t))); + in + matches + end; + + val all_matches = + map (fn pat => (pat, get (snd pat))) prop_pats + |> map (fn (pat, matches) => (pat, map (fn thm => match_thm (pat, thm)) matches)); + + fun proc_multi_match (pat, thmenvs) (pats, env) = + if is_multi pat then + let + val empty = ([], Envir.empty ~1); + + val thmenvs' = + Seq.EVERY (map (fn e => fn (thms, env) => + Seq.append (Seq.map (fn (thm, env') => (thm :: thms, env')) (e env)) + (Seq.single (thms, env))) thmenvs) empty; + in + Seq.map_filter (fn (fact, env') => + if not (null fact) then SOME ((pat, fact) :: pats, env') else NONE) thmenvs' + end + else + fold (fn e => Seq.append (Seq.map (fn (thm, env') => + ((pat, [thm]) :: pats, env')) (e env))) thmenvs Seq.empty; + + val all_matches = + Seq.EVERY (map proc_multi_match all_matches) ([], Envir.empty ~1) + |> Seq.filter (fn (_, e) => forall (is_some o Envir.lookup e o Term.dest_Var) fixes); + + fun map_handle seq = Seq.make (fn () => + (case (Seq.pull seq handle MATCH_CUT => NONE) of + SOME (x, seq') => SOME (x, map_handle seq') + | NONE => NONE)); + in + map_handle all_matches + end; + +fun real_match using ctxt fixes m text pats goal = + let + fun make_fact_matches ctxt get = + let + val (pats', ctxt') = fold_map prep_fact_pat pats ctxt; + in + match_facts ctxt' fixes pats' get + |> Seq.map (fn (fact_insts, env) => do_inst fact_insts env text ctxt') + end; + + (*TODO: Slightly hacky re-use of fact match implementation in plain term matching *) + fun make_term_matches ctxt get = + let + val pats' = + map + (fn ((SOME _, _), _) => error "Cannot name term match" + | ((_, x), t) => (((NONE, x), []), Logic.mk_term t)) pats; + + val thm_of = Drule.mk_term o Thm.cterm_of ctxt; + fun get' t = get (Logic.dest_term t) |> map thm_of; + in + match_facts ctxt fixes pats' get' + |> Seq.map (fn (fact_insts, env) => do_inst fact_insts env text ctxt) + end; + in + (case m of + Match_Fact net => + Seq.map (fn (text, ctxt') => Method_Closure.method_evaluate text ctxt' using goal) + (make_fact_matches ctxt (Item_Net.retrieve net)) + | Match_Term net => + Seq.map (fn (text, ctxt') => Method_Closure.method_evaluate text ctxt' using goal) + (make_term_matches ctxt (Item_Net.retrieve net)) + | match_kind => + if Thm.no_prems goal then Seq.empty + else + let + fun focus_cases f g = + (case match_kind of + Match_Prems => f + | Match_Concl => g + | _ => raise Fail "Match kind fell through"); + + val ({context = focus_ctxt, params, asms, concl, ...}, focused_goal) = + focus_cases (Subgoal.focus_prems) (focus_concl) ctxt 1 goal + |>> augment_focus; + + val texts = + focus_cases + (fn _ => + make_fact_matches focus_ctxt + (Item_Net.retrieve (focus_prems focus_ctxt |> snd) #> + order_list)) + (fn _ => + make_term_matches focus_ctxt (fn _ => [Logic.strip_imp_concl (Thm.term_of concl)])) + (); + + (*TODO: How to handle cases? *) + + fun do_retrofit inner_ctxt goal' = + let + val cleared_prems = + subtract (eq_fst (op =)) + (focus_prems inner_ctxt |> snd |> Item_Net.content) + (focus_prems focus_ctxt |> snd |> Item_Net.content) + |> map (fn (_, thm) => + Thm.hyps_of thm + |> (fn [hyp] => hyp | _ => error "Prem should have only one hyp")); + + val n_subgoals = Thm.nprems_of goal'; + fun prep_filter t = + Term.subst_bounds (map (Thm.term_of o snd) params |> rev, Term.strip_all_body t); + fun filter_test prems t = + if member (op =) prems t then SOME (remove1 (op aconv) t prems) else NONE; + in + Subgoal.retrofit inner_ctxt ctxt params asms 1 goal' goal |> + (if n_subgoals = 0 orelse null cleared_prems then I + else + Seq.map (Goal.restrict 1 n_subgoals) + #> Seq.maps (ALLGOALS (fn i => + DETERM (filter_prems_tac' ctxt prep_filter filter_test cleared_prems i))) + #> Seq.map (Goal.unrestrict 1)) + end; + + fun apply_text (text, ctxt') = + let + val goal' = + DROP_CASES (Method_Closure.method_evaluate text ctxt' using) focused_goal + |> Seq.maps (DETERM (do_retrofit ctxt')) + |> Seq.map (fn goal => ([]: cases, goal)) + in goal' end; + in + Seq.map apply_text texts + end) + end; + +val match_parser = + parse_match_kind :-- (fn kind => Scan.lift @{keyword "in"} |-- parse_match_bodies kind) >> + (fn (matches, bodies) => fn ctxt => fn using => fn goal => + if Method_Closure.is_dummy goal then Seq.empty + else + let + fun exec (pats, fixes, text) goal = + let + val ctxt' = fold Variable.declare_term fixes ctxt + |> fold (fn (_, t) => Variable.declare_term t) pats; (*Is this a good idea? We really only care about the maxidx*) + in + real_match using ctxt' fixes matches text pats goal + end; + in + Seq.FIRST (map exec bodies) goal + |> Seq.flat + end); + +val _ = + Theory.setup + (Method.setup @{binding match} + (match_parser >> (fn m => fn ctxt => METHOD_CASES (m ctxt))) + "structural analysis/matching on goals"); + +end; diff -r 5d90d301ad66 -r 54bea620e54f src/HOL/Eisbach/method_closure.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Eisbach/method_closure.ML Fri Apr 17 17:49:19 2015 +0200 @@ -0,0 +1,344 @@ +(* Title: method_closure.ML + Author: Daniel Matichuk, NICTA/UNSW + +Facilities for treating method syntax as a closure, with abstraction +over terms, facts and other methods. + +The 'method' command allows to define new proof methods by combining +existing ones with their usual syntax. +*) + +signature METHOD_CLOSURE = +sig + val is_dummy: thm -> bool + val tag_free_thm: thm -> thm + val free_aware_rule_attribute: thm list -> (Context.generic -> thm -> thm) -> Thm.attribute + val read_inner_method: Proof.context -> Token.src -> Method.text + val read_text_closure: Proof.context -> Input.source -> Token.src * Method.text + val method_evaluate: Method.text -> Proof.context -> Method.method + val get_inner_method: Proof.context -> string * Position.T -> + (term list * (string list * string list)) * Method.text + val eval_inner_method: Proof.context -> (term list * string list) * Method.text -> + term list -> (string * thm list) list -> Method.method list -> + Proof.context -> Method.method + val method_definition: binding -> (binding * typ option * mixfix) list -> + binding list -> binding list -> binding list -> Input.source -> local_theory -> local_theory + val method_definition_cmd: binding -> (binding * string option * mixfix) list -> + binding list -> binding list -> binding list -> Input.source -> local_theory -> local_theory +end; + +structure Method_Closure: METHOD_CLOSURE = +struct + +(* context data *) + +structure Data = Generic_Data +( + type T = + ((term list * (string list * string list)) * Method.text) Symtab.table; + val empty: T = Symtab.empty; + val extend = I; + fun merge (methods1,methods2) : T = + (Symtab.merge (K true) (methods1, methods2)); +); + +val get_methods = Data.get o Context.Proof; +val map_methods = Data.map; + + +structure Local_Data = Proof_Data +( + type T = + Method.method Symtab.table * (*dynamic methods*) + (term list -> Proof.context -> Method.method) (*recursive method*); + fun init _ : T = (Symtab.empty, fn _ => fn _ => Method.fail); +); + +fun lookup_dynamic_method full_name ctxt = + (case Symtab.lookup (#1 (Local_Data.get ctxt)) full_name of + SOME m => m + | NONE => error ("Illegal use of internal Eisbach method: " ^ quote full_name)); + +val update_dynamic_method = Local_Data.map o apfst o Symtab.update; + +val get_recursive_method = #2 o Local_Data.get; +val put_recursive_method = Local_Data.map o apsnd o K; + + +(* free thm *) + +fun is_dummy thm = + (case try Logic.dest_term (Thm.concl_of (perhaps (try Goal.conclude) thm)) of + NONE => false + | SOME t => Term.is_dummy_pattern t); + +val free_thmN = "Method_Closure.free_thm"; +fun tag_free_thm thm = Thm.tag_rule (free_thmN, "") thm; + +val dummy_free_thm = tag_free_thm Drule.dummy_thm; + +fun is_free_thm thm = Properties.defined (Thm.get_tags thm) free_thmN; + +fun is_free_fact [thm] = is_free_thm thm + | is_free_fact _ = false; + +fun free_aware_rule_attribute args f = + Thm.rule_attribute (fn context => fn thm => + if exists is_free_thm (thm :: args) then dummy_free_thm + else f context thm); + + +(* thm semantics for combined methods with internal parser. Simulates outer syntax parsing. *) +(* Creates closures for each combined method while parsing, based on the parse context *) + +fun read_inner_method ctxt src = + let + val toks = Token.args_of_src src; + val parser = Parse.!!! (Method.parser' ctxt 0 --| Scan.ahead Parse.eof); + in + (case Scan.read Token.stopper parser toks of + SOME (method_text, _) => method_text + | NONE => error ("Failed to parse method" ^ Position.here (#2 (Token.name_of_src src)))) + end; + +fun read_text_closure ctxt input = + let + (*tokens*) + val keywords = Thy_Header.get_keywords' ctxt; + val toks = + Input.source_explode input + |> Token.read_no_commands keywords (Scan.one Token.not_eof); + val _ = + toks |> List.app (fn tok => + if Token.keyword_with Symbol.is_ascii_identifier tok then + Context_Position.report ctxt (Token.pos_of tok) Markup.keyword1 + else ()); + + (*source closure*) + val src = + Token.src ("", Input.pos_of input) toks + |> Token.init_assignable_src; + val method_text = read_inner_method ctxt src; + val method_text' = Method.map_source (Method.method_closure ctxt) method_text; + val src' = Token.closure_src src; + in (src', method_text') end; + +val parse_method = + Args.context -- Scan.lift (Parse.token Parse.cartouche) >> (fn (ctxt, tok) => + (case Token.get_value tok of + NONE => + let + val (src, text) = read_text_closure ctxt (Token.input_of tok); + val _ = Token.assign (SOME (Token.Source src)) tok; + in text end + | SOME (Token.Source src) => read_inner_method ctxt src + | SOME _ => + error ("Unexpected inner token value for method cartouche" ^ + Position.here (Token.pos_of tok)))); + +fun method_evaluate text ctxt : Method.method = fn facts => fn st => + if is_dummy st then Seq.empty + else Method.evaluate text (Config.put Method.closure false ctxt) facts st; + + +fun parse_term_args args = + Args.context :|-- (fn ctxt => + let + fun parse T = + (if T = propT then Syntax.parse_prop ctxt else Syntax.parse_term ctxt) + #> Type.constraint (Type_Infer.paramify_vars T); + + fun do_parse' T = + Parse_Tools.name_term >> + (fn Parse_Tools.Parse_Val (s, f) => (parse T s, f) + | Parse_Tools.Real_Val t' => (t', K ())); + + fun do_parse (Var (_, T)) = do_parse' T + | do_parse (Free (_, T)) = do_parse' T + | do_parse t = error ("Unexpected method parameter: " ^ Syntax.string_of_term ctxt t); + + fun rep [] x = Scan.succeed [] x + | rep (t :: ts) x = (do_parse t -- rep ts >> op ::) x; + + fun check ts = + let + val (ts, fs) = split_list ts; + val ts' = Syntax.check_terms ctxt ts |> Variable.polymorphic ctxt; + val _ = ListPair.app (fn (f, t) => f t) (fs, ts'); + in ts' end; + in Scan.lift (rep args) >> check end); + +fun match_term_args ctxt args ts = + let + val match_types = Sign.typ_match (Proof_Context.theory_of ctxt) o apply2 Term.fastype_of; + val tyenv = fold match_types (args ~~ ts) Vartab.empty; + val tenv = + fold (fn ((xi, T), t) => Vartab.update_new (xi, (T, t))) + (map Term.dest_Var args ~~ ts) Vartab.empty; + in Envir.Envir {maxidx = ~1, tenv = tenv, tyenv = tyenv} end; + +fun check_attrib ctxt attrib = + let + val name = Binding.name_of attrib; + val pos = Binding.pos_of attrib; + val named_thm = Named_Theorems.check ctxt (name, pos); + val parser: Method.modifier parser = + Args.$$$ name -- Args.colon >> + K {init = I, attribute = Named_Theorems.add named_thm, pos = pos}; + in (named_thm, parser) end; + + +fun instantiate_text env text = + let val morphism = Morphism.term_morphism "instantiate_text" (Envir.norm_term env); + in Method.map_source (Token.transform_src morphism) text end; + +fun evaluate_dynamic_thm ctxt name = + (case (try (Named_Theorems.get ctxt) name) of + SOME thms => thms + | NONE => Proof_Context.get_thms ctxt name); + + +fun evaluate_named_theorems ctxt = + (Method.map_source o Token.map_values) + (fn Token.Fact (SOME name, _) => + Token.Fact (SOME name, evaluate_dynamic_thm ctxt name) + | x => x); + +fun evaluate_method_def fix_env raw_text ctxt = + let + val text = raw_text + |> instantiate_text fix_env + |> evaluate_named_theorems ctxt; + in method_evaluate text ctxt end; + +fun setup_local_method binding lthy = + let + val full_name = Local_Theory.full_name lthy binding; + in + lthy + |> update_dynamic_method (full_name, Method.fail) + |> Method.local_setup binding (Scan.succeed (lookup_dynamic_method full_name)) "(internal)" + end; + +fun setup_local_fact binding = Named_Theorems.declare binding ""; + +fun parse_method_args method_names = + let + fun bind_method (name, text) ctxt = + update_dynamic_method (name, method_evaluate text ctxt) ctxt; + + fun do_parse t = parse_method >> pair t; + fun rep [] x = Scan.succeed [] x + | rep (t :: ts) x = (do_parse t -- rep ts >> op ::) x; + in rep method_names >> fold bind_method end; + + +(* FIXME redundant!? -- base name of binding is not changed by usual morphisms*) +fun Morphism_name phi name = + Morphism.binding phi (Binding.make (name, Position.none)) |> Binding.name_of; + +fun add_method binding ((fixes, declares, methods), text) lthy = + lthy |> + Local_Theory.declaration {syntax = false, pervasive = true} (fn phi => + map_methods + (Symtab.update (Local_Theory.full_name lthy binding, + (((map (Morphism.term phi) fixes), + (map (Morphism_name phi) declares, map (Morphism_name phi) methods)), + Method.map_source (Token.transform_src phi) text)))); + +fun get_inner_method ctxt (xname, pos) = + let + val name = Method.check_name ctxt (xname, pos); + in + (case Symtab.lookup (get_methods ctxt) name of + SOME entry => entry + | NONE => error ("Unknown Eisbach method: " ^ quote name ^ Position.here pos)) + end; + +fun eval_inner_method ctxt0 header fixes attribs methods = + let + val ((term_args, hmethods), text) = header; + + fun match fixes = (* FIXME proper context!? *) + (case Seq.pull (Unify.matchers (Context.Proof ctxt0) (term_args ~~ fixes)) of + SOME (env, _) => env + | NONE => error "Couldn't match term arguments"); + + fun add_thms (name, thms) = + fold (Context.proof_map o Named_Theorems.add_thm name) thms; + + val setup_ctxt = fold add_thms attribs + #> fold update_dynamic_method (hmethods ~~ methods) + #> put_recursive_method (fn fixes => evaluate_method_def (match fixes) text); + in + fn ctxt => evaluate_method_def (match fixes) text (setup_ctxt ctxt) + end; + +fun gen_method_definition prep_vars name vars uses attribs methods body lthy = + let + val (uses_nms, lthy1) = lthy + |> Proof_Context.concealed + |> Local_Theory.open_target |-> Proof_Context.private_scope + |> Local_Theory.map_background_naming (Name_Space.add_path (Binding.name_of name)) + |> Config.put Method.old_section_parser true + |> fold setup_local_method methods + |> fold_map setup_local_fact uses; + + val ((xs, Ts), lthy2) = lthy1 + |> prep_vars vars |-> Proof_Context.add_fixes + |-> (fn xs => fold_map Proof_Context.inferred_param xs #>> pair xs); + + val term_args = map Free (xs ~~ Ts); + val (named_thms, modifiers) = map (check_attrib lthy2) (attribs @ uses) |> split_list; + val self_name :: method_names = map (Local_Theory.full_name lthy2) (name :: methods); + + fun parser args eval = + apfst (Config.put_generic Method.old_section_parser true) #> + (parse_term_args args --| + Method.sections modifiers -- + (*Scan.depend (fn context => Scan.succeed () >> (K (fold XNamed_Theorems.empty uses_nms context, ()))) --*) (* FIXME *) + parse_method_args method_names >> eval); + + val lthy3 = lthy2 + |> Method.local_setup (Binding.make (Binding.name_of name, Position.none)) + (parser term_args + (fn (fixes, decl) => fn ctxt => get_recursive_method ctxt fixes (decl ctxt))) "(internal)"; + + val (src, text) = read_text_closure lthy3 body; + + val morphism = + Variable.export_morphism lthy3 + (lthy + |> Proof_Context.transfer (Proof_Context.theory_of lthy3) + |> Token.declare_maxidx_src src + |> Variable.declare_maxidx (Variable.maxidx_of lthy3)); + + val text' = Method.map_source (Token.transform_src morphism) text; + val term_args' = map (Morphism.term morphism) term_args; + + fun real_exec ts ctxt = + evaluate_method_def (match_term_args ctxt term_args' ts) text' ctxt; + val real_parser = + parser term_args' (fn (fixes, decl) => fn ctxt => + real_exec fixes (put_recursive_method real_exec (decl ctxt))); + in + lthy3 + |> Local_Theory.close_target + |> Proof_Context.restore_naming lthy + |> Method.local_setup name real_parser "(defined in Eisbach)" + |> add_method name ((term_args', named_thms, method_names), text') + end; + +val method_definition = gen_method_definition Proof_Context.cert_vars; +val method_definition_cmd = gen_method_definition Proof_Context.read_vars; + +val _ = + Outer_Syntax.local_theory @{command_keyword method} "Eisbach method definition" + (Parse.binding -- Parse.for_fixes -- + ((Scan.optional (@{keyword "uses"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) []) -- + (Scan.optional (@{keyword "declares"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) [])) -- + (Scan.optional (@{keyword "methods"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) []) -- + Parse.!!! (@{keyword "="} |-- Parse.token Parse.cartouche) + >> (fn ((((name, vars), (uses, attribs)), methods), cartouche) => + method_definition_cmd name vars uses attribs methods (Token.input_of cartouche))); +end; diff -r 5d90d301ad66 -r 54bea620e54f src/HOL/Eisbach/parse_tools.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Eisbach/parse_tools.ML Fri Apr 17 17:49:19 2015 +0200 @@ -0,0 +1,49 @@ +(* Title: parse_tools.ML + Author: Daniel Matichuk, NICTA/UNSW + +Simple tools for deferred stateful token values. +*) + +signature PARSE_TOOLS = +sig + datatype ('a, 'b) parse_val = + Real_Val of 'a + | Parse_Val of 'b * ('a -> unit) + + val parse_term_val : 'b parser -> (term, 'b) parse_val parser + + val name_term : (term, string) parse_val parser + val is_real_val : ('a, 'b) parse_val -> bool + + val the_real_val : ('a, 'b) parse_val -> 'a + val the_parse_val : ('a, 'b) parse_val -> 'b + val the_parse_fun : ('a, 'b) parse_val -> ('a -> unit) +end; + +structure Parse_Tools: PARSE_TOOLS = +struct + +datatype ('a, 'b) parse_val = + Real_Val of 'a +| Parse_Val of 'b * ('a -> unit); + +fun parse_term_val scan = + Scan.ahead Parse.not_eof -- Scan.ahead (Scan.option Args.internal_term) -- scan >> + (fn ((_, SOME t), _) => Real_Val t + | ((tok, NONE), v) => Parse_Val (v, fn t => Token.assign (SOME (Token.Term t)) tok)); + +val name_term = parse_term_val Args.name_inner_syntax; + +fun is_real_val (Real_Val _) = true + | is_real_val _ = false; + +fun the_real_val (Real_Val t) = t + | the_real_val _ = raise Fail "Expected close parsed value"; + +fun the_parse_val (Parse_Val (b, _)) = b + | the_parse_val _ = raise Fail "Expected open parsed value"; + +fun the_parse_fun (Parse_Val (_, f)) = f + | the_parse_fun _ = raise Fail "Expected open parsed value"; + +end; diff -r 5d90d301ad66 -r 54bea620e54f src/HOL/ROOT --- a/src/HOL/ROOT Fri Apr 17 16:54:25 2015 +0200 +++ b/src/HOL/ROOT Fri Apr 17 17:49:19 2015 +0200 @@ -631,6 +631,15 @@ "root.tex" "style.tex" +session "HOL-Eisbach" in Eisbach = HOL + + description {* + The Eisbach proof method language and "match" method. + *} + theories + Eisbach + Tests + Examples + session "HOL-SET_Protocol" in SET_Protocol = HOL + description {* Verification of the SET Protocol.