--- a/src/FOL/FOL.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/FOL/FOL.thy Wed Oct 29 19:23:32 2014 +0100
@@ -12,8 +12,6 @@
ML_file "~~/src/Provers/classical.ML"
ML_file "~~/src/Provers/blast.ML"
ML_file "~~/src/Provers/clasimp.ML"
-ML_file "~~/src/Tools/induct.ML"
-ML_file "~~/src/Tools/case_product.ML"
subsection {* The classical axiom *}
@@ -181,8 +179,6 @@
open Basic_Classical;
*}
-setup Cla.setup
-
(*Propositional rules*)
lemmas [intro!] = refl TrueI conjI disjCI impI notI iffI
and [elim!] = conjE disjE impCE FalseE iffCE
@@ -209,8 +205,6 @@
val blast_tac = Blast.blast_tac;
*}
-setup Blast.setup
-
lemma ex1_functional: "[| EX! z. P(a,z); P(a,b); P(a,c) |] ==> b = c"
by blast
@@ -344,14 +338,12 @@
|> simpset_of;
*}
-setup {* map_theory_simpset (put_simpset FOL_ss) *}
-
-setup "Simplifier.method_setup Splitter.split_modifiers"
-setup Splitter.setup
-setup clasimp_setup
+setup {*
+ map_theory_simpset (put_simpset FOL_ss) #>
+ Simplifier.method_setup Splitter.split_modifiers
+*}
ML_file "~~/src/Tools/eqsubst.ML"
-setup EqSubst.setup
subsection {* Other simple lemmas *}
@@ -418,6 +410,7 @@
text {* Method setup. *}
+ML_file "~~/src/Tools/induct.ML"
ML {*
structure Induct = Induct
(
@@ -431,10 +424,9 @@
);
*}
-setup Induct.setup
declare case_split [cases type: o]
-setup Case_Product.setup
+ML_file "~~/src/Tools/case_product.ML"
hide_const (open) eq
--- a/src/FOL/IFOL.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/FOL/IFOL.thy Wed Oct 29 19:23:32 2014 +0100
@@ -591,7 +591,6 @@
open Hypsubst;
*}
-setup hypsubst_setup
ML_file "intprover.ML"
--- a/src/HOL/Algebra/Ring.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Algebra/Ring.thy Wed Oct 29 19:23:32 2014 +0100
@@ -391,10 +391,14 @@
ML_file "ringsimp.ML"
-setup Algebra.attrib_setup
+attribute_setup algebra = {*
+ Scan.lift ((Args.add >> K true || Args.del >> K false) --| Args.colon || Scan.succeed true)
+ -- Scan.lift Args.name -- Scan.repeat Args.term
+ >> (fn ((b, n), ts) => if b then Ringsimp.add_struct (n, ts) else Ringsimp.del_struct (n, ts))
+*} "theorems controlling algebra method"
method_setup algebra = {*
- Scan.succeed (SIMPLE_METHOD' o Algebra.algebra_tac)
+ Scan.succeed (SIMPLE_METHOD' o Ringsimp.algebra_tac)
*} "normalisation of algebraic structure"
lemmas (in ring) ring_simprules
--- a/src/HOL/Algebra/ringsimp.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Algebra/ringsimp.ML Wed Oct 29 19:23:32 2014 +0100
@@ -3,17 +3,17 @@
Normalisation method for locales ring and cring.
*)
-signature ALGEBRA =
+signature RINGSIMP =
sig
val print_structures: Proof.context -> unit
- val attrib_setup: theory -> theory
+ val add_struct: string * term list -> attribute
+ val del_struct: string * term list -> attribute
val algebra_tac: Proof.context -> int -> tactic
end;
-structure Algebra: ALGEBRA =
+structure Ringsimp: RINGSIMP =
struct
-
(** Theory and context data **)
fun struct_eq ((s1: string, ts1), (s2, ts2)) =
@@ -56,7 +56,7 @@
(** Attribute **)
-fun add_struct_thm s =
+fun add_struct s =
Thm.declaration_attribute
(fn thm => Data.map (AList.map_default struct_eq (s, []) (insert Thm.eq_thm_prop thm)));
@@ -64,11 +64,4 @@
Thm.declaration_attribute
(fn _ => Data.map (AList.delete struct_eq s));
-val attrib_setup =
- Attrib.setup @{binding algebra}
- (Scan.lift ((Args.add >> K true || Args.del >> K false) --| Args.colon || Scan.succeed true)
- -- Scan.lift Args.name -- Scan.repeat Args.term
- >> (fn ((b, n), ts) => if b then add_struct_thm (n, ts) else del_struct (n, ts)))
- "theorems controlling algebra method";
-
end;
--- a/src/HOL/BNF_Greatest_Fixpoint.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/BNF_Greatest_Fixpoint.thy Wed Oct 29 19:23:32 2014 +0100
@@ -17,9 +17,7 @@
"primcorec" :: thy_decl
begin
-setup {*
-Sign.const_alias @{binding proj} @{const_name Equiv_Relations.proj}
-*}
+setup {* Sign.const_alias @{binding proj} @{const_name Equiv_Relations.proj} *}
lemma one_pointE: "\<lbrakk>\<And>x. s = x \<Longrightarrow> P\<rbrakk> \<Longrightarrow> P"
by simp
--- a/src/HOL/Coinduction.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Coinduction.thy Wed Oct 29 19:23:32 2014 +0100
@@ -14,6 +14,4 @@
ML_file "Tools/coinduction.ML"
-setup Coinduction.setup
-
end
--- a/src/HOL/Fields.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Fields.thy Wed Oct 29 19:23:32 2014 +0100
@@ -23,8 +23,7 @@
fixes inverse :: "'a \<Rightarrow> 'a"
and divide :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixl "'/" 70)
-setup {* Sign.add_const_constraint
- (@{const_name "divide"}, SOME @{typ "'a \<Rightarrow> 'a \<Rightarrow> 'a"}) *}
+setup {* Sign.add_const_constraint (@{const_name "divide"}, SOME @{typ "'a \<Rightarrow> 'a \<Rightarrow> 'a"}) *}
context semiring
@@ -47,8 +46,7 @@
end
-setup {* Sign.add_const_constraint
- (@{const_name "divide"}, SOME @{typ "'a::inverse \<Rightarrow> 'a \<Rightarrow> 'a"}) *}
+setup {* Sign.add_const_constraint (@{const_name "divide"}, SOME @{typ "'a::inverse \<Rightarrow> 'a \<Rightarrow> 'a"}) *}
text{* Lemmas @{text divide_simps} move division to the outside and eliminates them on (in)equalities. *}
--- a/src/HOL/Fun_Def.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Fun_Def.thy Wed Oct 29 19:23:32 2014 +0100
@@ -106,11 +106,6 @@
Scan.succeed (NO_CASES oo Induction_Schema.induction_schema_tac)
*} "prove an induction principle"
-setup {*
- Function.setup
- #> Function_Fun.setup
-*}
-
subsection {* Measure functions *}
@@ -135,8 +130,6 @@
(K (SIMPLE_METHOD o Lexicographic_Order.lexicographic_order_tac false))
*} "termination prover for lexicographic orderings"
-setup Lexicographic_Order.setup
-
subsection {* Congruence rules *}
@@ -308,8 +301,6 @@
ML_file "Tools/Function/scnp_reconstruct.ML"
ML_file "Tools/Function/fun_cases.ML"
-setup ScnpReconstruct.setup
-
ML_val -- "setup inactive"
{*
Context.theory_map (Function_Common.set_termination_prover
--- a/src/HOL/Fun_Def_Base.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Fun_Def_Base.thy Wed Oct 29 19:23:32 2014 +0100
@@ -11,8 +11,12 @@
ML_file "Tools/Function/function_lib.ML"
named_theorems termination_simp "simplification rules for termination proofs"
ML_file "Tools/Function/function_common.ML"
-ML_file "Tools/Function/context_tree.ML"
-setup Function_Ctx_Tree.setup
+ML_file "Tools/Function/function_context_tree.ML"
+
+attribute_setup fundef_cong =
+ \<open>Attrib.add_del Function_Context_Tree.cong_add Function_Context_Tree.cong_del\<close>
+ "declaration of congruence rule for function definitions"
+
ML_file "Tools/Function/sum_tree.ML"
end
--- a/src/HOL/HOL.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/HOL.thy Wed Oct 29 19:23:32 2014 +0100
@@ -27,18 +27,12 @@
ML_file "~~/src/Tools/eqsubst.ML"
ML_file "~~/src/Provers/quantifier1.ML"
ML_file "~~/src/Tools/atomize_elim.ML"
-ML_file "~~/src/Tools/induct.ML"
ML_file "~~/src/Tools/cong_tac.ML"
-ML_file "~~/src/Tools/intuitionistic.ML"
+ML_file "~~/src/Tools/intuitionistic.ML" setup \<open>Intuitionistic.method_setup @{binding iprover}\<close>
ML_file "~~/src/Tools/project_rule.ML"
ML_file "~~/src/Tools/subtyping.ML"
ML_file "~~/src/Tools/case_product.ML"
-setup {*
- Intuitionistic.method_setup @{binding iprover}
- #> Subtyping.setup
- #> Case_Product.setup
-*}
ML \<open>Plugin_Name.declare_setup @{binding extraction}\<close>
@@ -231,7 +225,7 @@
lemma trans_sym [Pure.elim?]: "r = s ==> t = s ==> r = t"
by (rule trans [OF _ sym])
-lemma meta_eq_to_obj_eq:
+lemma meta_eq_to_obj_eq:
assumes meq: "A == B"
shows "A = B"
by (unfold meq) (rule refl)
@@ -847,26 +841,23 @@
val hyp_subst_tacs = [Hypsubst.hyp_subst_tac]
);
-structure Basic_Classical: BASIC_CLASSICAL = Classical;
+structure Basic_Classical: BASIC_CLASSICAL = Classical;
open Basic_Classical;
*}
-setup Classical.setup
-
setup {*
-let
- fun non_bool_eq (@{const_name HOL.eq}, Type (_, [T, _])) = T <> @{typ bool}
- | non_bool_eq _ = false;
- fun hyp_subst_tac' ctxt =
- SUBGOAL (fn (goal, i) =>
- if Term.exists_Const non_bool_eq goal
- then Hypsubst.hyp_subst_tac ctxt i
- else no_tac);
-in
- Hypsubst.hypsubst_setup
(*prevent substitution on bool*)
- #> Context_Rules.addSWrapper (fn ctxt => fn tac => hyp_subst_tac' ctxt ORELSE' tac)
-end
+ let
+ fun non_bool_eq (@{const_name HOL.eq}, Type (_, [T, _])) = T <> @{typ bool}
+ | non_bool_eq _ = false;
+ fun hyp_subst_tac' ctxt =
+ SUBGOAL (fn (goal, i) =>
+ if Term.exists_Const non_bool_eq goal
+ then Hypsubst.hyp_subst_tac ctxt i
+ else no_tac);
+ in
+ Context_Rules.addSWrapper (fn ctxt => fn tac => hyp_subst_tac' ctxt ORELSE' tac)
+ end
*}
declare iffI [intro!]
@@ -932,8 +923,6 @@
val blast_tac = Blast.blast_tac;
*}
-setup Blast.setup
-
subsubsection {* Simplifier *}
@@ -1197,18 +1186,14 @@
ML_file "Tools/simpdata.ML"
ML {* open Simpdata *}
-setup {* map_theory_simpset (put_simpset HOL_basic_ss) *}
+setup {*
+ map_theory_simpset (put_simpset HOL_basic_ss) #>
+ Simplifier.method_setup Splitter.split_modifiers
+*}
simproc_setup defined_Ex ("EX x. P x") = {* fn _ => Quantifier1.rearrange_ex *}
simproc_setup defined_All ("ALL x. P x") = {* fn _ => Quantifier1.rearrange_all *}
-setup {*
- Simplifier.method_setup Splitter.split_modifiers
- #> Splitter.setup
- #> clasimp_setup
- #> EqSubst.setup
-*}
-
text {* Simproc for proving @{text "(y = x) == False"} from premise @{text "~(x = y)"}: *}
simproc_setup neq ("x = y") = {* fn _ =>
@@ -1467,6 +1452,7 @@
text {* Method setup. *}
+ML_file "~~/src/Tools/induct.ML"
ML {*
structure Induct = Induct
(
@@ -1484,7 +1470,6 @@
ML_file "~~/src/Tools/induction.ML"
setup {*
- Induct.setup #> Induction.setup #>
Context.theory_map (Induct.map_simpset (fn ss => ss
addsimprocs
[Simplifier.simproc_global @{theory} "swap_induct_false"
@@ -1558,13 +1543,12 @@
lemma [induct_simp]: "induct_implies induct_true P == P"
by (simp add: induct_implies_def induct_true_def)
-lemma [induct_simp]: "(x = x) = True"
+lemma [induct_simp]: "(x = x) = True"
by (rule simp_thms)
hide_const induct_forall induct_implies induct_equal induct_conj induct_true induct_false
ML_file "~~/src/Tools/induct_tacs.ML"
-setup Induct_Tacs.setup
subsubsection {* Coherent logic *}
@@ -1640,8 +1624,8 @@
lemmas eq_sym_conv = eq_commute
lemma nnf_simps:
- "(\<not>(P \<and> Q)) = (\<not> P \<or> \<not> Q)" "(\<not> (P \<or> Q)) = (\<not> P \<and> \<not>Q)" "(P \<longrightarrow> Q) = (\<not>P \<or> Q)"
- "(P = Q) = ((P \<and> Q) \<or> (\<not>P \<and> \<not> Q))" "(\<not>(P = Q)) = ((P \<and> \<not> Q) \<or> (\<not>P \<and> Q))"
+ "(\<not>(P \<and> Q)) = (\<not> P \<or> \<not> Q)" "(\<not> (P \<or> Q)) = (\<not> P \<and> \<not>Q)" "(P \<longrightarrow> Q) = (\<not>P \<or> Q)"
+ "(P = Q) = ((P \<and> Q) \<or> (\<not>P \<and> \<not> Q))" "(\<not>(P = Q)) = ((P \<and> \<not> Q) \<or> (\<not>P \<and> Q))"
"(\<not> \<not>(P)) = P"
by blast+
@@ -1737,10 +1721,11 @@
by (fact arg_cong)
setup {*
- Code_Preproc.map_pre (put_simpset HOL_basic_ss)
- #> Code_Preproc.map_post (put_simpset HOL_basic_ss)
- #> Code_Simp.map_ss (put_simpset HOL_basic_ss
- #> Simplifier.add_cong @{thm conj_left_cong} #> Simplifier.add_cong @{thm disj_left_cong})
+ Code_Preproc.map_pre (put_simpset HOL_basic_ss) #>
+ Code_Preproc.map_post (put_simpset HOL_basic_ss) #>
+ Code_Simp.map_ss (put_simpset HOL_basic_ss #>
+ Simplifier.add_cong @{thm conj_left_cong} #>
+ Simplifier.add_cong @{thm disj_left_cong})
*}
@@ -1799,7 +1784,7 @@
text {* More about @{typ prop} *}
lemma [code nbe]:
- shows "(True \<Longrightarrow> PROP Q) \<equiv> PROP Q"
+ shows "(True \<Longrightarrow> PROP Q) \<equiv> PROP Q"
and "(PROP Q \<Longrightarrow> True) \<equiv> Trueprop True"
and "(P \<Longrightarrow> R) \<equiv> Trueprop (P \<longrightarrow> R)" by (auto intro!: equal_intr_rule)
@@ -1828,9 +1813,7 @@
"equal TYPE('a) TYPE('a) \<longleftrightarrow> True"
by (simp add: equal)
-setup {*
- Sign.add_const_constraint (@{const_name equal}, SOME @{typ "'a\<Colon>type \<Rightarrow> 'a \<Rightarrow> bool"})
-*}
+setup {* Sign.add_const_constraint (@{const_name equal}, SOME @{typ "'a\<Colon>type \<Rightarrow> 'a \<Rightarrow> bool"}) *}
lemma equal_alias_cert: "OFCLASS('a, equal_class) \<equiv> ((op = :: 'a \<Rightarrow> 'a \<Rightarrow> bool) \<equiv> equal)" (is "?ofclass \<equiv> ?equal")
proof
@@ -1843,14 +1826,10 @@
show "PROP ?ofclass" proof
qed (simp add: `PROP ?equal`)
qed
-
-setup {*
- Sign.add_const_constraint (@{const_name equal}, SOME @{typ "'a\<Colon>equal \<Rightarrow> 'a \<Rightarrow> bool"})
-*}
-setup {*
- Nbe.add_const_alias @{thm equal_alias_cert}
-*}
+setup {* Sign.add_const_constraint (@{const_name equal}, SOME @{typ "'a\<Colon>equal \<Rightarrow> 'a \<Rightarrow> bool"}) *}
+
+setup {* Nbe.add_const_alias @{thm equal_alias_cert} *}
text {* Cases *}
@@ -1860,8 +1839,8 @@
using assms by simp_all
setup {*
- Code.add_case @{thm Let_case_cert}
- #> Code.add_undefined @{const_name undefined}
+ Code.add_case @{thm Let_case_cert} #>
+ Code.add_undefined @{const_name undefined}
*}
declare [[code abort: undefined]]
@@ -1877,7 +1856,7 @@
| constant True \<rightharpoonup>
(SML) "true" and (OCaml) "true" and (Haskell) "True" and (Scala) "true"
| constant False \<rightharpoonup>
- (SML) "false" and (OCaml) "false" and (Haskell) "False" and (Scala) "false"
+ (SML) "false" and (OCaml) "false" and (Haskell) "False" and (Scala) "false"
code_reserved SML
bool true false
@@ -1936,13 +1915,13 @@
subsubsection {* Evaluation and normalization by evaluation *}
method_setup eval = {*
-let
- fun eval_tac ctxt =
- let val conv = Code_Runtime.dynamic_holds_conv ctxt
- in CONVERSION (Conv.params_conv ~1 (K (Conv.concl_conv ~1 conv)) ctxt) THEN' rtac TrueI end
-in
- Scan.succeed (SIMPLE_METHOD' o eval_tac)
-end
+ let
+ fun eval_tac ctxt =
+ let val conv = Code_Runtime.dynamic_holds_conv ctxt
+ in CONVERSION (Conv.params_conv ~1 (K (Conv.concl_conv ~1 conv)) ctxt) THEN' rtac TrueI end
+ in
+ Scan.succeed (SIMPLE_METHOD' o eval_tac)
+ end
*} "solve goal by evaluation"
method_setup normalization = {*
@@ -1989,23 +1968,23 @@
subsection {* Legacy tactics and ML bindings *}
ML {*
-(* combination of (spec RS spec RS ...(j times) ... spec RS mp) *)
-local
- fun wrong_prem (Const (@{const_name All}, _) $ Abs (_, _, t)) = wrong_prem t
- | wrong_prem (Bound _) = true
- | wrong_prem _ = false;
- val filter_right = filter (not o wrong_prem o HOLogic.dest_Trueprop o hd o Thm.prems_of);
-in
- fun smp i = funpow i (fn m => filter_right ([spec] RL m)) ([mp]);
- fun smp_tac j = EVERY'[dresolve_tac (smp j), atac];
-end;
+ (* combination of (spec RS spec RS ...(j times) ... spec RS mp) *)
+ local
+ fun wrong_prem (Const (@{const_name All}, _) $ Abs (_, _, t)) = wrong_prem t
+ | wrong_prem (Bound _) = true
+ | wrong_prem _ = false;
+ val filter_right = filter (not o wrong_prem o HOLogic.dest_Trueprop o hd o Thm.prems_of);
+ in
+ fun smp i = funpow i (fn m => filter_right ([spec] RL m)) ([mp]);
+ fun smp_tac j = EVERY'[dresolve_tac (smp j), atac];
+ end;
-local
- val nnf_ss =
- simpset_of (put_simpset HOL_basic_ss @{context} addsimps @{thms simp_thms nnf_simps});
-in
- fun nnf_conv ctxt = Simplifier.rewrite (put_simpset nnf_ss ctxt);
-end
+ local
+ val nnf_ss =
+ simpset_of (put_simpset HOL_basic_ss @{context} addsimps @{thms simp_thms nnf_simps});
+ in
+ fun nnf_conv ctxt = Simplifier.rewrite (put_simpset nnf_ss ctxt);
+ end
*}
hide_const (open) eq equal
--- a/src/HOL/Inductive.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Inductive.thy Wed Oct 29 19:23:32 2014 +0100
@@ -258,7 +258,6 @@
Collect_mono in_mono vimage_mono
ML_file "Tools/inductive.ML"
-setup Inductive.setup
theorems [mono] =
imp_refl disj_mono conj_mono ex_mono all_mono if_bool_eq_conj
--- a/src/HOL/Library/Library.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Library/Library.thy Wed Oct 29 19:23:32 2014 +0100
@@ -38,7 +38,7 @@
Lattice_Constructions
Linear_Temporal_Logic_on_Streams
ListVector
- Lubs_Glbs
+ Lub_Glb
Mapping
Monad_Syntax
More_List
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Library/Lub_Glb.thy Wed Oct 29 19:23:32 2014 +0100
@@ -0,0 +1,245 @@
+(* Title: HOL/Library/Lub_Glb.thy
+ Author: Jacques D. Fleuriot, University of Cambridge
+ Author: Amine Chaieb, University of Cambridge *)
+
+header {* Definitions of Least Upper Bounds and Greatest Lower Bounds *}
+
+theory Lub_Glb
+imports Complex_Main
+begin
+
+text {* Thanks to suggestions by James Margetson *}
+
+definition setle :: "'a set \<Rightarrow> 'a::ord \<Rightarrow> bool" (infixl "*<=" 70)
+ where "S *<= x = (ALL y: S. y \<le> x)"
+
+definition setge :: "'a::ord \<Rightarrow> 'a set \<Rightarrow> bool" (infixl "<=*" 70)
+ where "x <=* S = (ALL y: S. x \<le> y)"
+
+
+subsection {* Rules for the Relations @{text "*<="} and @{text "<=*"} *}
+
+lemma setleI: "ALL y: S. y \<le> x \<Longrightarrow> S *<= x"
+ by (simp add: setle_def)
+
+lemma setleD: "S *<= x \<Longrightarrow> y: S \<Longrightarrow> y \<le> x"
+ by (simp add: setle_def)
+
+lemma setgeI: "ALL y: S. x \<le> y \<Longrightarrow> x <=* S"
+ by (simp add: setge_def)
+
+lemma setgeD: "x <=* S \<Longrightarrow> y: S \<Longrightarrow> x \<le> y"
+ by (simp add: setge_def)
+
+
+definition leastP :: "('a \<Rightarrow> bool) \<Rightarrow> 'a::ord \<Rightarrow> bool"
+ where "leastP P x = (P x \<and> x <=* Collect P)"
+
+definition isUb :: "'a set \<Rightarrow> 'a set \<Rightarrow> 'a::ord \<Rightarrow> bool"
+ where "isUb R S x = (S *<= x \<and> x: R)"
+
+definition isLub :: "'a set \<Rightarrow> 'a set \<Rightarrow> 'a::ord \<Rightarrow> bool"
+ where "isLub R S x = leastP (isUb R S) x"
+
+definition ubs :: "'a set \<Rightarrow> 'a::ord set \<Rightarrow> 'a set"
+ where "ubs R S = Collect (isUb R S)"
+
+
+subsection {* Rules about the Operators @{term leastP}, @{term ub} and @{term lub} *}
+
+lemma leastPD1: "leastP P x \<Longrightarrow> P x"
+ by (simp add: leastP_def)
+
+lemma leastPD2: "leastP P x \<Longrightarrow> x <=* Collect P"
+ by (simp add: leastP_def)
+
+lemma leastPD3: "leastP P x \<Longrightarrow> y: Collect P \<Longrightarrow> x \<le> y"
+ by (blast dest!: leastPD2 setgeD)
+
+lemma isLubD1: "isLub R S x \<Longrightarrow> S *<= x"
+ by (simp add: isLub_def isUb_def leastP_def)
+
+lemma isLubD1a: "isLub R S x \<Longrightarrow> x: R"
+ by (simp add: isLub_def isUb_def leastP_def)
+
+lemma isLub_isUb: "isLub R S x \<Longrightarrow> isUb R S x"
+ unfolding isUb_def by (blast dest: isLubD1 isLubD1a)
+
+lemma isLubD2: "isLub R S x \<Longrightarrow> y : S \<Longrightarrow> y \<le> x"
+ by (blast dest!: isLubD1 setleD)
+
+lemma isLubD3: "isLub R S x \<Longrightarrow> leastP (isUb R S) x"
+ by (simp add: isLub_def)
+
+lemma isLubI1: "leastP(isUb R S) x \<Longrightarrow> isLub R S x"
+ by (simp add: isLub_def)
+
+lemma isLubI2: "isUb R S x \<Longrightarrow> x <=* Collect (isUb R S) \<Longrightarrow> isLub R S x"
+ by (simp add: isLub_def leastP_def)
+
+lemma isUbD: "isUb R S x \<Longrightarrow> y : S \<Longrightarrow> y \<le> x"
+ by (simp add: isUb_def setle_def)
+
+lemma isUbD2: "isUb R S x \<Longrightarrow> S *<= x"
+ by (simp add: isUb_def)
+
+lemma isUbD2a: "isUb R S x \<Longrightarrow> x: R"
+ by (simp add: isUb_def)
+
+lemma isUbI: "S *<= x \<Longrightarrow> x: R \<Longrightarrow> isUb R S x"
+ by (simp add: isUb_def)
+
+lemma isLub_le_isUb: "isLub R S x \<Longrightarrow> isUb R S y \<Longrightarrow> x \<le> y"
+ unfolding isLub_def by (blast intro!: leastPD3)
+
+lemma isLub_ubs: "isLub R S x \<Longrightarrow> x <=* ubs R S"
+ unfolding ubs_def isLub_def by (rule leastPD2)
+
+lemma isLub_unique: "[| isLub R S x; isLub R S y |] ==> x = (y::'a::linorder)"
+ apply (frule isLub_isUb)
+ apply (frule_tac x = y in isLub_isUb)
+ apply (blast intro!: order_antisym dest!: isLub_le_isUb)
+ done
+
+lemma isUb_UNIV_I: "(\<And>y. y \<in> S \<Longrightarrow> y \<le> u) \<Longrightarrow> isUb UNIV S u"
+ by (simp add: isUbI setleI)
+
+
+definition greatestP :: "('a \<Rightarrow> bool) \<Rightarrow> 'a::ord \<Rightarrow> bool"
+ where "greatestP P x = (P x \<and> Collect P *<= x)"
+
+definition isLb :: "'a set \<Rightarrow> 'a set \<Rightarrow> 'a::ord \<Rightarrow> bool"
+ where "isLb R S x = (x <=* S \<and> x: R)"
+
+definition isGlb :: "'a set \<Rightarrow> 'a set \<Rightarrow> 'a::ord \<Rightarrow> bool"
+ where "isGlb R S x = greatestP (isLb R S) x"
+
+definition lbs :: "'a set \<Rightarrow> 'a::ord set \<Rightarrow> 'a set"
+ where "lbs R S = Collect (isLb R S)"
+
+
+subsection {* Rules about the Operators @{term greatestP}, @{term isLb} and @{term isGlb} *}
+
+lemma greatestPD1: "greatestP P x \<Longrightarrow> P x"
+ by (simp add: greatestP_def)
+
+lemma greatestPD2: "greatestP P x \<Longrightarrow> Collect P *<= x"
+ by (simp add: greatestP_def)
+
+lemma greatestPD3: "greatestP P x \<Longrightarrow> y: Collect P \<Longrightarrow> x \<ge> y"
+ by (blast dest!: greatestPD2 setleD)
+
+lemma isGlbD1: "isGlb R S x \<Longrightarrow> x <=* S"
+ by (simp add: isGlb_def isLb_def greatestP_def)
+
+lemma isGlbD1a: "isGlb R S x \<Longrightarrow> x: R"
+ by (simp add: isGlb_def isLb_def greatestP_def)
+
+lemma isGlb_isLb: "isGlb R S x \<Longrightarrow> isLb R S x"
+ unfolding isLb_def by (blast dest: isGlbD1 isGlbD1a)
+
+lemma isGlbD2: "isGlb R S x \<Longrightarrow> y : S \<Longrightarrow> y \<ge> x"
+ by (blast dest!: isGlbD1 setgeD)
+
+lemma isGlbD3: "isGlb R S x \<Longrightarrow> greatestP (isLb R S) x"
+ by (simp add: isGlb_def)
+
+lemma isGlbI1: "greatestP (isLb R S) x \<Longrightarrow> isGlb R S x"
+ by (simp add: isGlb_def)
+
+lemma isGlbI2: "isLb R S x \<Longrightarrow> Collect (isLb R S) *<= x \<Longrightarrow> isGlb R S x"
+ by (simp add: isGlb_def greatestP_def)
+
+lemma isLbD: "isLb R S x \<Longrightarrow> y : S \<Longrightarrow> y \<ge> x"
+ by (simp add: isLb_def setge_def)
+
+lemma isLbD2: "isLb R S x \<Longrightarrow> x <=* S "
+ by (simp add: isLb_def)
+
+lemma isLbD2a: "isLb R S x \<Longrightarrow> x: R"
+ by (simp add: isLb_def)
+
+lemma isLbI: "x <=* S \<Longrightarrow> x: R \<Longrightarrow> isLb R S x"
+ by (simp add: isLb_def)
+
+lemma isGlb_le_isLb: "isGlb R S x \<Longrightarrow> isLb R S y \<Longrightarrow> x \<ge> y"
+ unfolding isGlb_def by (blast intro!: greatestPD3)
+
+lemma isGlb_ubs: "isGlb R S x \<Longrightarrow> lbs R S *<= x"
+ unfolding lbs_def isGlb_def by (rule greatestPD2)
+
+lemma isGlb_unique: "[| isGlb R S x; isGlb R S y |] ==> x = (y::'a::linorder)"
+ apply (frule isGlb_isLb)
+ apply (frule_tac x = y in isGlb_isLb)
+ apply (blast intro!: order_antisym dest!: isGlb_le_isLb)
+ done
+
+lemma bdd_above_setle: "bdd_above A \<longleftrightarrow> (\<exists>a. A *<= a)"
+ by (auto simp: bdd_above_def setle_def)
+
+lemma bdd_below_setge: "bdd_below A \<longleftrightarrow> (\<exists>a. a <=* A)"
+ by (auto simp: bdd_below_def setge_def)
+
+lemma isLub_cSup:
+ "(S::'a :: conditionally_complete_lattice set) \<noteq> {} \<Longrightarrow> (\<exists>b. S *<= b) \<Longrightarrow> isLub UNIV S (Sup S)"
+ by (auto simp add: isLub_def setle_def leastP_def isUb_def
+ intro!: setgeI cSup_upper cSup_least)
+
+lemma isGlb_cInf:
+ "(S::'a :: conditionally_complete_lattice set) \<noteq> {} \<Longrightarrow> (\<exists>b. b <=* S) \<Longrightarrow> isGlb UNIV S (Inf S)"
+ by (auto simp add: isGlb_def setge_def greatestP_def isLb_def
+ intro!: setleI cInf_lower cInf_greatest)
+
+lemma cSup_le: "(S::'a::conditionally_complete_lattice set) \<noteq> {} \<Longrightarrow> S *<= b \<Longrightarrow> Sup S \<le> b"
+ by (metis cSup_least setle_def)
+
+lemma cInf_ge: "(S::'a :: conditionally_complete_lattice set) \<noteq> {} \<Longrightarrow> b <=* S \<Longrightarrow> Inf S \<ge> b"
+ by (metis cInf_greatest setge_def)
+
+lemma cSup_bounds:
+ fixes S :: "'a :: conditionally_complete_lattice set"
+ shows "S \<noteq> {} \<Longrightarrow> a <=* S \<Longrightarrow> S *<= b \<Longrightarrow> a \<le> Sup S \<and> Sup S \<le> b"
+ using cSup_least[of S b] cSup_upper2[of _ S a]
+ by (auto simp: bdd_above_setle setge_def setle_def)
+
+lemma cSup_unique: "(S::'a :: {conditionally_complete_linorder, no_bot} set) *<= b \<Longrightarrow> (\<forall>b'<b. \<exists>x\<in>S. b' < x) \<Longrightarrow> Sup S = b"
+ by (rule cSup_eq) (auto simp: not_le[symmetric] setle_def)
+
+lemma cInf_unique: "b <=* (S::'a :: {conditionally_complete_linorder, no_top} set) \<Longrightarrow> (\<forall>b'>b. \<exists>x\<in>S. b' > x) \<Longrightarrow> Inf S = b"
+ by (rule cInf_eq) (auto simp: not_le[symmetric] setge_def)
+
+text{* Use completeness of reals (supremum property) to show that any bounded sequence has a least upper bound*}
+
+lemma reals_complete: "\<exists>X. X \<in> S \<Longrightarrow> \<exists>Y. isUb (UNIV::real set) S Y \<Longrightarrow> \<exists>t. isLub (UNIV :: real set) S t"
+ by (intro exI[of _ "Sup S"] isLub_cSup) (auto simp: setle_def isUb_def intro!: cSup_upper)
+
+lemma Bseq_isUb: "\<And>X :: nat \<Rightarrow> real. Bseq X \<Longrightarrow> \<exists>U. isUb (UNIV::real set) {x. \<exists>n. X n = x} U"
+ by (auto intro: isUbI setleI simp add: Bseq_def abs_le_iff)
+
+lemma Bseq_isLub: "\<And>X :: nat \<Rightarrow> real. Bseq X \<Longrightarrow> \<exists>U. isLub (UNIV::real set) {x. \<exists>n. X n = x} U"
+ by (blast intro: reals_complete Bseq_isUb)
+
+lemma isLub_mono_imp_LIMSEQ:
+ fixes X :: "nat \<Rightarrow> real"
+ assumes u: "isLub UNIV {x. \<exists>n. X n = x} u" (* FIXME: use 'range X' *)
+ assumes X: "\<forall>m n. m \<le> n \<longrightarrow> X m \<le> X n"
+ shows "X ----> u"
+proof -
+ have "X ----> (SUP i. X i)"
+ using u[THEN isLubD1] X
+ by (intro LIMSEQ_incseq_SUP) (auto simp: incseq_def image_def eq_commute bdd_above_setle)
+ also have "(SUP i. X i) = u"
+ using isLub_cSup[of "range X"] u[THEN isLubD1]
+ by (intro isLub_unique[OF _ u]) (auto simp add: SUP_def image_def eq_commute)
+ finally show ?thesis .
+qed
+
+lemmas real_isGlb_unique = isGlb_unique[where 'a=real]
+
+lemma real_le_inf_subset: "t \<noteq> {} \<Longrightarrow> t \<subseteq> s \<Longrightarrow> \<exists>b. b <=* s \<Longrightarrow> Inf s \<le> Inf (t::real set)"
+ by (rule cInf_superset_mono) (auto simp: bdd_below_setge)
+
+lemma real_ge_sup_subset: "t \<noteq> {} \<Longrightarrow> t \<subseteq> s \<Longrightarrow> \<exists>b. s *<= b \<Longrightarrow> Sup s \<ge> Sup (t::real set)"
+ by (rule cSup_subset_mono) (auto simp: bdd_above_setle)
+
+end
--- a/src/HOL/Library/Lubs_Glbs.thy Wed Oct 29 12:01:39 2014 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,245 +0,0 @@
-(* Title: HOL/Library/Lubs_Glbs.thy
- Author: Jacques D. Fleuriot, University of Cambridge
- Author: Amine Chaieb, University of Cambridge *)
-
-header {* Definitions of Least Upper Bounds and Greatest Lower Bounds *}
-
-theory Lubs_Glbs
-imports Complex_Main
-begin
-
-text {* Thanks to suggestions by James Margetson *}
-
-definition setle :: "'a set \<Rightarrow> 'a::ord \<Rightarrow> bool" (infixl "*<=" 70)
- where "S *<= x = (ALL y: S. y \<le> x)"
-
-definition setge :: "'a::ord \<Rightarrow> 'a set \<Rightarrow> bool" (infixl "<=*" 70)
- where "x <=* S = (ALL y: S. x \<le> y)"
-
-
-subsection {* Rules for the Relations @{text "*<="} and @{text "<=*"} *}
-
-lemma setleI: "ALL y: S. y \<le> x \<Longrightarrow> S *<= x"
- by (simp add: setle_def)
-
-lemma setleD: "S *<= x \<Longrightarrow> y: S \<Longrightarrow> y \<le> x"
- by (simp add: setle_def)
-
-lemma setgeI: "ALL y: S. x \<le> y \<Longrightarrow> x <=* S"
- by (simp add: setge_def)
-
-lemma setgeD: "x <=* S \<Longrightarrow> y: S \<Longrightarrow> x \<le> y"
- by (simp add: setge_def)
-
-
-definition leastP :: "('a \<Rightarrow> bool) \<Rightarrow> 'a::ord \<Rightarrow> bool"
- where "leastP P x = (P x \<and> x <=* Collect P)"
-
-definition isUb :: "'a set \<Rightarrow> 'a set \<Rightarrow> 'a::ord \<Rightarrow> bool"
- where "isUb R S x = (S *<= x \<and> x: R)"
-
-definition isLub :: "'a set \<Rightarrow> 'a set \<Rightarrow> 'a::ord \<Rightarrow> bool"
- where "isLub R S x = leastP (isUb R S) x"
-
-definition ubs :: "'a set \<Rightarrow> 'a::ord set \<Rightarrow> 'a set"
- where "ubs R S = Collect (isUb R S)"
-
-
-subsection {* Rules about the Operators @{term leastP}, @{term ub} and @{term lub} *}
-
-lemma leastPD1: "leastP P x \<Longrightarrow> P x"
- by (simp add: leastP_def)
-
-lemma leastPD2: "leastP P x \<Longrightarrow> x <=* Collect P"
- by (simp add: leastP_def)
-
-lemma leastPD3: "leastP P x \<Longrightarrow> y: Collect P \<Longrightarrow> x \<le> y"
- by (blast dest!: leastPD2 setgeD)
-
-lemma isLubD1: "isLub R S x \<Longrightarrow> S *<= x"
- by (simp add: isLub_def isUb_def leastP_def)
-
-lemma isLubD1a: "isLub R S x \<Longrightarrow> x: R"
- by (simp add: isLub_def isUb_def leastP_def)
-
-lemma isLub_isUb: "isLub R S x \<Longrightarrow> isUb R S x"
- unfolding isUb_def by (blast dest: isLubD1 isLubD1a)
-
-lemma isLubD2: "isLub R S x \<Longrightarrow> y : S \<Longrightarrow> y \<le> x"
- by (blast dest!: isLubD1 setleD)
-
-lemma isLubD3: "isLub R S x \<Longrightarrow> leastP (isUb R S) x"
- by (simp add: isLub_def)
-
-lemma isLubI1: "leastP(isUb R S) x \<Longrightarrow> isLub R S x"
- by (simp add: isLub_def)
-
-lemma isLubI2: "isUb R S x \<Longrightarrow> x <=* Collect (isUb R S) \<Longrightarrow> isLub R S x"
- by (simp add: isLub_def leastP_def)
-
-lemma isUbD: "isUb R S x \<Longrightarrow> y : S \<Longrightarrow> y \<le> x"
- by (simp add: isUb_def setle_def)
-
-lemma isUbD2: "isUb R S x \<Longrightarrow> S *<= x"
- by (simp add: isUb_def)
-
-lemma isUbD2a: "isUb R S x \<Longrightarrow> x: R"
- by (simp add: isUb_def)
-
-lemma isUbI: "S *<= x \<Longrightarrow> x: R \<Longrightarrow> isUb R S x"
- by (simp add: isUb_def)
-
-lemma isLub_le_isUb: "isLub R S x \<Longrightarrow> isUb R S y \<Longrightarrow> x \<le> y"
- unfolding isLub_def by (blast intro!: leastPD3)
-
-lemma isLub_ubs: "isLub R S x \<Longrightarrow> x <=* ubs R S"
- unfolding ubs_def isLub_def by (rule leastPD2)
-
-lemma isLub_unique: "[| isLub R S x; isLub R S y |] ==> x = (y::'a::linorder)"
- apply (frule isLub_isUb)
- apply (frule_tac x = y in isLub_isUb)
- apply (blast intro!: order_antisym dest!: isLub_le_isUb)
- done
-
-lemma isUb_UNIV_I: "(\<And>y. y \<in> S \<Longrightarrow> y \<le> u) \<Longrightarrow> isUb UNIV S u"
- by (simp add: isUbI setleI)
-
-
-definition greatestP :: "('a \<Rightarrow> bool) \<Rightarrow> 'a::ord \<Rightarrow> bool"
- where "greatestP P x = (P x \<and> Collect P *<= x)"
-
-definition isLb :: "'a set \<Rightarrow> 'a set \<Rightarrow> 'a::ord \<Rightarrow> bool"
- where "isLb R S x = (x <=* S \<and> x: R)"
-
-definition isGlb :: "'a set \<Rightarrow> 'a set \<Rightarrow> 'a::ord \<Rightarrow> bool"
- where "isGlb R S x = greatestP (isLb R S) x"
-
-definition lbs :: "'a set \<Rightarrow> 'a::ord set \<Rightarrow> 'a set"
- where "lbs R S = Collect (isLb R S)"
-
-
-subsection {* Rules about the Operators @{term greatestP}, @{term isLb} and @{term isGlb} *}
-
-lemma greatestPD1: "greatestP P x \<Longrightarrow> P x"
- by (simp add: greatestP_def)
-
-lemma greatestPD2: "greatestP P x \<Longrightarrow> Collect P *<= x"
- by (simp add: greatestP_def)
-
-lemma greatestPD3: "greatestP P x \<Longrightarrow> y: Collect P \<Longrightarrow> x \<ge> y"
- by (blast dest!: greatestPD2 setleD)
-
-lemma isGlbD1: "isGlb R S x \<Longrightarrow> x <=* S"
- by (simp add: isGlb_def isLb_def greatestP_def)
-
-lemma isGlbD1a: "isGlb R S x \<Longrightarrow> x: R"
- by (simp add: isGlb_def isLb_def greatestP_def)
-
-lemma isGlb_isLb: "isGlb R S x \<Longrightarrow> isLb R S x"
- unfolding isLb_def by (blast dest: isGlbD1 isGlbD1a)
-
-lemma isGlbD2: "isGlb R S x \<Longrightarrow> y : S \<Longrightarrow> y \<ge> x"
- by (blast dest!: isGlbD1 setgeD)
-
-lemma isGlbD3: "isGlb R S x \<Longrightarrow> greatestP (isLb R S) x"
- by (simp add: isGlb_def)
-
-lemma isGlbI1: "greatestP (isLb R S) x \<Longrightarrow> isGlb R S x"
- by (simp add: isGlb_def)
-
-lemma isGlbI2: "isLb R S x \<Longrightarrow> Collect (isLb R S) *<= x \<Longrightarrow> isGlb R S x"
- by (simp add: isGlb_def greatestP_def)
-
-lemma isLbD: "isLb R S x \<Longrightarrow> y : S \<Longrightarrow> y \<ge> x"
- by (simp add: isLb_def setge_def)
-
-lemma isLbD2: "isLb R S x \<Longrightarrow> x <=* S "
- by (simp add: isLb_def)
-
-lemma isLbD2a: "isLb R S x \<Longrightarrow> x: R"
- by (simp add: isLb_def)
-
-lemma isLbI: "x <=* S \<Longrightarrow> x: R \<Longrightarrow> isLb R S x"
- by (simp add: isLb_def)
-
-lemma isGlb_le_isLb: "isGlb R S x \<Longrightarrow> isLb R S y \<Longrightarrow> x \<ge> y"
- unfolding isGlb_def by (blast intro!: greatestPD3)
-
-lemma isGlb_ubs: "isGlb R S x \<Longrightarrow> lbs R S *<= x"
- unfolding lbs_def isGlb_def by (rule greatestPD2)
-
-lemma isGlb_unique: "[| isGlb R S x; isGlb R S y |] ==> x = (y::'a::linorder)"
- apply (frule isGlb_isLb)
- apply (frule_tac x = y in isGlb_isLb)
- apply (blast intro!: order_antisym dest!: isGlb_le_isLb)
- done
-
-lemma bdd_above_setle: "bdd_above A \<longleftrightarrow> (\<exists>a. A *<= a)"
- by (auto simp: bdd_above_def setle_def)
-
-lemma bdd_below_setge: "bdd_below A \<longleftrightarrow> (\<exists>a. a <=* A)"
- by (auto simp: bdd_below_def setge_def)
-
-lemma isLub_cSup:
- "(S::'a :: conditionally_complete_lattice set) \<noteq> {} \<Longrightarrow> (\<exists>b. S *<= b) \<Longrightarrow> isLub UNIV S (Sup S)"
- by (auto simp add: isLub_def setle_def leastP_def isUb_def
- intro!: setgeI cSup_upper cSup_least)
-
-lemma isGlb_cInf:
- "(S::'a :: conditionally_complete_lattice set) \<noteq> {} \<Longrightarrow> (\<exists>b. b <=* S) \<Longrightarrow> isGlb UNIV S (Inf S)"
- by (auto simp add: isGlb_def setge_def greatestP_def isLb_def
- intro!: setleI cInf_lower cInf_greatest)
-
-lemma cSup_le: "(S::'a::conditionally_complete_lattice set) \<noteq> {} \<Longrightarrow> S *<= b \<Longrightarrow> Sup S \<le> b"
- by (metis cSup_least setle_def)
-
-lemma cInf_ge: "(S::'a :: conditionally_complete_lattice set) \<noteq> {} \<Longrightarrow> b <=* S \<Longrightarrow> Inf S \<ge> b"
- by (metis cInf_greatest setge_def)
-
-lemma cSup_bounds:
- fixes S :: "'a :: conditionally_complete_lattice set"
- shows "S \<noteq> {} \<Longrightarrow> a <=* S \<Longrightarrow> S *<= b \<Longrightarrow> a \<le> Sup S \<and> Sup S \<le> b"
- using cSup_least[of S b] cSup_upper2[of _ S a]
- by (auto simp: bdd_above_setle setge_def setle_def)
-
-lemma cSup_unique: "(S::'a :: {conditionally_complete_linorder, no_bot} set) *<= b \<Longrightarrow> (\<forall>b'<b. \<exists>x\<in>S. b' < x) \<Longrightarrow> Sup S = b"
- by (rule cSup_eq) (auto simp: not_le[symmetric] setle_def)
-
-lemma cInf_unique: "b <=* (S::'a :: {conditionally_complete_linorder, no_top} set) \<Longrightarrow> (\<forall>b'>b. \<exists>x\<in>S. b' > x) \<Longrightarrow> Inf S = b"
- by (rule cInf_eq) (auto simp: not_le[symmetric] setge_def)
-
-text{* Use completeness of reals (supremum property) to show that any bounded sequence has a least upper bound*}
-
-lemma reals_complete: "\<exists>X. X \<in> S \<Longrightarrow> \<exists>Y. isUb (UNIV::real set) S Y \<Longrightarrow> \<exists>t. isLub (UNIV :: real set) S t"
- by (intro exI[of _ "Sup S"] isLub_cSup) (auto simp: setle_def isUb_def intro!: cSup_upper)
-
-lemma Bseq_isUb: "\<And>X :: nat \<Rightarrow> real. Bseq X \<Longrightarrow> \<exists>U. isUb (UNIV::real set) {x. \<exists>n. X n = x} U"
- by (auto intro: isUbI setleI simp add: Bseq_def abs_le_iff)
-
-lemma Bseq_isLub: "\<And>X :: nat \<Rightarrow> real. Bseq X \<Longrightarrow> \<exists>U. isLub (UNIV::real set) {x. \<exists>n. X n = x} U"
- by (blast intro: reals_complete Bseq_isUb)
-
-lemma isLub_mono_imp_LIMSEQ:
- fixes X :: "nat \<Rightarrow> real"
- assumes u: "isLub UNIV {x. \<exists>n. X n = x} u" (* FIXME: use 'range X' *)
- assumes X: "\<forall>m n. m \<le> n \<longrightarrow> X m \<le> X n"
- shows "X ----> u"
-proof -
- have "X ----> (SUP i. X i)"
- using u[THEN isLubD1] X
- by (intro LIMSEQ_incseq_SUP) (auto simp: incseq_def image_def eq_commute bdd_above_setle)
- also have "(SUP i. X i) = u"
- using isLub_cSup[of "range X"] u[THEN isLubD1]
- by (intro isLub_unique[OF _ u]) (auto simp add: SUP_def image_def eq_commute)
- finally show ?thesis .
-qed
-
-lemmas real_isGlb_unique = isGlb_unique[where 'a=real]
-
-lemma real_le_inf_subset: "t \<noteq> {} \<Longrightarrow> t \<subseteq> s \<Longrightarrow> \<exists>b. b <=* s \<Longrightarrow> Inf s \<le> Inf (t::real set)"
- by (rule cInf_superset_mono) (auto simp: bdd_below_setge)
-
-lemma real_ge_sup_subset: "t \<noteq> {} \<Longrightarrow> t \<subseteq> s \<Longrightarrow> \<exists>b. s *<= b \<Longrightarrow> Sup s \<ge> Sup (t::real set)"
- by (rule cSup_subset_mono) (auto simp: bdd_above_setle)
-
-end
--- a/src/HOL/Library/Old_Recdef.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Library/Old_Recdef.thy Wed Oct 29 19:23:32 2014 +0100
@@ -68,7 +68,7 @@
ML_file "~~/src/HOL/Tools/TFL/tfl.ML"
ML_file "~~/src/HOL/Tools/TFL/post.ML"
ML_file "~~/src/HOL/Tools/recdef.ML"
-setup Recdef.setup
+
subsection {* Rule setup *}
--- a/src/HOL/Library/Old_SMT.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Library/Old_SMT.thy Wed Oct 29 19:23:32 2014 +0100
@@ -420,10 +420,7 @@
by auto
ML_file "Old_SMT/old_smt_real.ML"
-setup Old_SMT_Real.setup
-
ML_file "Old_SMT/old_smt_word.ML"
-setup Old_SMT_Word.setup
hide_type (open) pattern
hide_const fun_app term_true term_false z3div z3mod
--- a/src/HOL/Library/Old_SMT/old_smt_real.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Library/Old_SMT/old_smt_real.ML Wed Oct 29 19:23:32 2014 +0100
@@ -4,12 +4,7 @@
SMT setup for reals.
*)
-signature OLD_SMT_REAL =
-sig
- val setup: theory -> theory
-end
-
-structure Old_SMT_Real: OLD_SMT_REAL =
+structure Old_SMT_Real: sig end =
struct
@@ -125,12 +120,13 @@
(* setup *)
-val setup =
- Context.theory_map (
- Old_SMTLIB_Interface.add_logic (10, smtlib_logic) #>
- setup_builtins #>
- Old_Z3_Interface.add_mk_builtins z3_mk_builtins #>
- fold Old_Z3_Proof_Reconstruction.add_z3_rule real_rules #>
- Old_Z3_Proof_Tools.add_simproc real_linarith_proc)
+val _ =
+ Theory.setup
+ (Context.theory_map (
+ Old_SMTLIB_Interface.add_logic (10, smtlib_logic) #>
+ setup_builtins #>
+ Old_Z3_Interface.add_mk_builtins z3_mk_builtins #>
+ fold Old_Z3_Proof_Reconstruction.add_z3_rule real_rules #>
+ Old_Z3_Proof_Tools.add_simproc real_linarith_proc))
end
--- a/src/HOL/Library/Old_SMT/old_smt_word.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Library/Old_SMT/old_smt_word.ML Wed Oct 29 19:23:32 2014 +0100
@@ -4,12 +4,7 @@
SMT setup for words.
*)
-signature OLD_SMT_WORD =
-sig
- val setup: theory -> theory
-end
-
-structure Old_SMT_Word: OLD_SMT_WORD =
+structure Old_SMT_Word: sig end =
struct
open Word_Lib
@@ -143,9 +138,9 @@
(* setup *)
-val setup =
- Context.theory_map (
- Old_SMTLIB_Interface.add_logic (20, smtlib_logic) #>
- setup_builtins)
+val _ =
+ Theory.setup
+ (Context.theory_map
+ (Old_SMTLIB_Interface.add_logic (20, smtlib_logic) #> setup_builtins))
end
--- a/src/HOL/Library/Predicate_Compile_Quickcheck.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Library/Predicate_Compile_Quickcheck.thy Wed Oct 29 19:23:32 2014 +0100
@@ -8,6 +8,4 @@
ML_file "../Tools/Predicate_Compile/predicate_compile_quickcheck.ML"
-setup {* Predicate_Compile_Quickcheck.setup *}
-
end
--- a/src/HOL/Library/Refute.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Library/Refute.thy Wed Oct 29 19:23:32 2014 +0100
@@ -13,7 +13,6 @@
begin
ML_file "refute.ML"
-setup Refute.setup
refute_params
[itself = 1,
--- a/src/HOL/Library/refute.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Library/refute.ML Wed Oct 29 19:23:32 2014 +0100
@@ -52,8 +52,6 @@
val refute_goal :
Proof.context -> (string * string) list -> thm -> int -> string
- val setup : theory -> theory
-
(* ------------------------------------------------------------------------- *)
(* Additional functions used by Nitpick (to be factored out) *)
(* ------------------------------------------------------------------------- *)
@@ -2926,37 +2924,33 @@
(* ------------------------------------------------------------------------- *)
-(* use 'setup Refute.setup' in an Isabelle theory to initialize the 'Refute' *)
-(* structure *)
-(* ------------------------------------------------------------------------- *)
-
-(* ------------------------------------------------------------------------- *)
(* Note: the interpreters and printers are used in reverse order; however, *)
(* an interpreter that can handle non-atomic terms ends up being *)
(* applied before the 'stlc_interpreter' breaks the term apart into *)
(* subterms that are then passed to other interpreters! *)
(* ------------------------------------------------------------------------- *)
(* FIXME formal @{const_name} for some entries (!??) *)
-val setup =
- add_interpreter "stlc" stlc_interpreter #>
- add_interpreter "Pure" Pure_interpreter #>
- add_interpreter "HOLogic" HOLogic_interpreter #>
- add_interpreter "set" set_interpreter #>
- add_interpreter "IDT" IDT_interpreter #>
- add_interpreter "IDT_constructor" IDT_constructor_interpreter #>
- add_interpreter "IDT_recursion" IDT_recursion_interpreter #>
- add_interpreter "Finite_Set.card" Finite_Set_card_interpreter #>
- add_interpreter "Finite_Set.finite" Finite_Set_finite_interpreter #>
- add_interpreter "Nat_Orderings.less" Nat_less_interpreter #>
- add_interpreter "Nat_HOL.plus" Nat_plus_interpreter #>
- add_interpreter "Nat_HOL.minus" Nat_minus_interpreter #>
- add_interpreter "Nat_HOL.times" Nat_times_interpreter #>
- add_interpreter "List.append" List_append_interpreter #>
- add_interpreter "Product_Type.prod.fst" Product_Type_fst_interpreter #>
- add_interpreter "Product_Type.prod.snd" Product_Type_snd_interpreter #>
- add_printer "stlc" stlc_printer #>
- add_printer "set" set_printer #>
- add_printer "IDT" IDT_printer;
+val _ =
+ Theory.setup
+ (add_interpreter "stlc" stlc_interpreter #>
+ add_interpreter "Pure" Pure_interpreter #>
+ add_interpreter "HOLogic" HOLogic_interpreter #>
+ add_interpreter "set" set_interpreter #>
+ add_interpreter "IDT" IDT_interpreter #>
+ add_interpreter "IDT_constructor" IDT_constructor_interpreter #>
+ add_interpreter "IDT_recursion" IDT_recursion_interpreter #>
+ add_interpreter "Finite_Set.card" Finite_Set_card_interpreter #>
+ add_interpreter "Finite_Set.finite" Finite_Set_finite_interpreter #>
+ add_interpreter "Nat_Orderings.less" Nat_less_interpreter #>
+ add_interpreter "Nat_HOL.plus" Nat_plus_interpreter #>
+ add_interpreter "Nat_HOL.minus" Nat_minus_interpreter #>
+ add_interpreter "Nat_HOL.times" Nat_times_interpreter #>
+ add_interpreter "List.append" List_append_interpreter #>
+ add_interpreter "Product_Type.prod.fst" Product_Type_fst_interpreter #>
+ add_interpreter "Product_Type.prod.snd" Product_Type_snd_interpreter #>
+ add_printer "stlc" stlc_printer #>
+ add_printer "set" set_printer #>
+ add_printer "IDT" IDT_printer);
--- a/src/HOL/Lifting.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Lifting.thy Wed Oct 29 19:23:32 2014 +0100
@@ -548,7 +548,6 @@
named_theorems relator_eq_onp
"theorems that a relator of an eq_onp is an eq_onp of the corresponding predicate"
ML_file "Tools/Lifting/lifting_info.ML"
-setup Lifting_Info.setup
(* setup for the function type *)
declare fun_quotient[quot_map]
--- a/src/HOL/Meson.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Meson.thy Wed Oct 29 19:23:32 2014 +0100
@@ -195,8 +195,6 @@
ML_file "Tools/Meson/meson_clausify.ML"
ML_file "Tools/Meson/meson_tactic.ML"
-setup {* Meson_Tactic.setup *}
-
hide_const (open) COMBI COMBK COMBB COMBC COMBS skolem
hide_fact (open) not_conjD not_disjD not_notD not_allD not_exD imp_to_disjD
not_impD iff_to_disjD not_iffD not_refl_disj_D conj_exD1 conj_exD2 disj_exD
--- a/src/HOL/Metis.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Metis.thy Wed Oct 29 19:23:32 2014 +0100
@@ -44,8 +44,6 @@
ML_file "Tools/Metis/metis_reconstruct.ML"
ML_file "Tools/Metis/metis_tactic.ML"
-setup {* Metis_Tactic.setup *}
-
hide_const (open) select fFalse fTrue fNot fComp fconj fdisj fimplies fAll fEx fequal lambda
hide_fact (open) select_def not_atomize atomize_not_select not_atomize_select select_FalseI
fFalse_def fTrue_def fNot_def fconj_def fdisj_def fimplies_def fAll_def fEx_def fequal_def
--- a/src/HOL/NSA/NSA.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/NSA/NSA.thy Wed Oct 29 19:23:32 2014 +0100
@@ -6,7 +6,7 @@
header{*Infinite Numbers, Infinitesimals, Infinitely Close Relation*}
theory NSA
-imports HyperDef "~~/src/HOL/Library/Lubs_Glbs"
+imports HyperDef "~~/src/HOL/Library/Lub_Glb"
begin
definition
--- a/src/HOL/NSA/StarDef.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/NSA/StarDef.thy Wed Oct 29 19:23:32 2014 +0100
@@ -88,7 +88,6 @@
text {*Initialize transfer tactic.*}
ML_file "transfer.ML"
-setup Transfer_Principle.setup
method_setup transfer = {*
Attrib.thms >> (fn ths => fn ctxt =>
--- a/src/HOL/NSA/transfer.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/NSA/transfer.ML Wed Oct 29 19:23:32 2014 +0100
@@ -8,7 +8,6 @@
sig
val transfer_tac: Proof.context -> thm list -> int -> tactic
val add_const: string -> theory -> theory
- val setup: theory -> theory
end;
structure Transfer_Principle: TRANSFER_PRINCIPLE =
@@ -106,12 +105,13 @@
(fn {intros,unfolds,refolds,consts} =>
{intros=intros, unfolds=unfolds, refolds=refolds, consts=c::consts}))
-val setup =
- Attrib.setup @{binding transfer_intro} (Attrib.add_del intro_add intro_del)
- "declaration of transfer introduction rule" #>
- Attrib.setup @{binding transfer_unfold} (Attrib.add_del unfold_add unfold_del)
- "declaration of transfer unfolding rule" #>
- Attrib.setup @{binding transfer_refold} (Attrib.add_del refold_add refold_del)
- "declaration of transfer refolding rule"
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding transfer_intro} (Attrib.add_del intro_add intro_del)
+ "declaration of transfer introduction rule" #>
+ Attrib.setup @{binding transfer_unfold} (Attrib.add_del unfold_add unfold_del)
+ "declaration of transfer unfolding rule" #>
+ Attrib.setup @{binding transfer_refold} (Attrib.add_del refold_add refold_del)
+ "declaration of transfer refolding rule")
end;
--- a/src/HOL/Nat.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Nat.thy Wed Oct 29 19:23:32 2014 +0100
@@ -1599,8 +1599,6 @@
shows "u = s"
using 2 1 by (rule trans)
-setup Arith_Data.setup
-
ML_file "Tools/nat_arith.ML"
simproc_setup nateq_cancel_sums
--- a/src/HOL/Nat_Transfer.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Nat_Transfer.thy Wed Oct 29 19:23:32 2014 +0100
@@ -16,7 +16,6 @@
by (simp add: transfer_morphism_def)
ML_file "Tools/legacy_transfer.ML"
-setup Legacy_Transfer.setup
subsection {* Set up transfer from nat to int *}
--- a/src/HOL/Orderings.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Orderings.thy Wed Oct 29 19:23:32 2014 +0100
@@ -400,6 +400,8 @@
sig
val print_structures: Proof.context -> unit
val order_tac: Proof.context -> thm list -> int -> tactic
+ val add_struct: string * term list -> string -> attribute
+ val del_struct: string * term list -> attribute
end;
structure Orders: ORDERS =
@@ -483,26 +485,24 @@
(* attributes *)
-fun add_struct_thm s tag =
+fun add_struct s tag =
Thm.declaration_attribute
(fn thm => Data.map (AList.map_default struct_eq (s, Order_Tac.empty TrueI) (Order_Tac.update tag thm)));
fun del_struct s =
Thm.declaration_attribute
(fn _ => Data.map (AList.delete struct_eq s));
-val _ =
- Theory.setup
- (Attrib.setup @{binding order}
- (Scan.lift ((Args.add -- Args.name >> (fn (_, s) => SOME s) || Args.del >> K NONE) --|
- Args.colon (* FIXME || Scan.succeed true *) ) -- Scan.lift Args.name --
- Scan.repeat Args.term
- >> (fn ((SOME tag, n), ts) => add_struct_thm (n, ts) tag
- | ((NONE, n), ts) => del_struct (n, ts)))
- "theorems controlling transitivity reasoner");
-
end;
*}
+attribute_setup order = {*
+ Scan.lift ((Args.add -- Args.name >> (fn (_, s) => SOME s) || Args.del >> K NONE) --|
+ Args.colon (* FIXME || Scan.succeed true *) ) -- Scan.lift Args.name --
+ Scan.repeat Args.term
+ >> (fn ((SOME tag, n), ts) => Orders.add_struct (n, ts) tag
+ | ((NONE, n), ts) => Orders.del_struct (n, ts))
+*} "theorems controlling transitivity reasoner"
+
method_setup order = {*
Scan.succeed (fn ctxt => SIMPLE_METHOD' (Orders.order_tac ctxt []))
*} "transitivity reasoner"
--- a/src/HOL/Predicate_Compile.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Predicate_Compile.thy Wed Oct 29 19:23:32 2014 +0100
@@ -21,7 +21,6 @@
ML_file "Tools/Predicate_Compile/predicate_compile_specialisation.ML"
ML_file "Tools/Predicate_Compile/predicate_compile.ML"
-setup Predicate_Compile.setup
subsection {* Set membership as a generator predicate *}
--- a/src/HOL/Presburger.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Presburger.thy Wed Oct 29 19:23:32 2014 +0100
@@ -390,7 +390,6 @@
by (simp cong: conj_cong)
ML_file "Tools/Qelim/cooper.ML"
-setup Cooper.setup
method_setup presburger = {*
let
--- a/src/HOL/Product_Type.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Product_Type.thy Wed Oct 29 19:23:32 2014 +0100
@@ -50,9 +50,7 @@
shows "(CASE True \<equiv> f) &&& (CASE False \<equiv> g)"
using assms by simp_all
-setup {*
- Code.add_case @{thm If_case_cert}
-*}
+setup {* Code.add_case @{thm If_case_cert} *}
code_printing
constant "HOL.equal :: bool \<Rightarrow> bool \<Rightarrow> bool" \<rightharpoonup> (Haskell) infix 4 "=="
@@ -784,7 +782,6 @@
by (simp only: internal_split_def split_conv)
ML_file "Tools/split_rule.ML"
-setup Split_Rule.setup
hide_const internal_split
--- a/src/HOL/Quickcheck_Benchmark/Needham_Schroeder_Base.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Quickcheck_Benchmark/Needham_Schroeder_Base.thy Wed Oct 29 19:23:32 2014 +0100
@@ -194,7 +194,7 @@
declare ListMem_iff[symmetric, code_pred_inline]
declare [[quickcheck_timing]]
-setup {* Exhaustive_Generators.setup_exhaustive_datatype_interpretation *}
+setup Exhaustive_Generators.setup_exhaustive_datatype_interpretation
declare [[quickcheck_full_support = false]]
end
\ No newline at end of file
--- a/src/HOL/Quickcheck_Examples/Completeness.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Quickcheck_Examples/Completeness.thy Wed Oct 29 19:23:32 2014 +0100
@@ -20,7 +20,7 @@
"is_some x \<longleftrightarrow> x \<noteq> None"
by (cases x) simp_all
-setup {* Exhaustive_Generators.setup_exhaustive_datatype_interpretation *}
+setup Exhaustive_Generators.setup_exhaustive_datatype_interpretation
subsection {* Defining the size of values *}
--- a/src/HOL/Quickcheck_Examples/Quickcheck_Examples.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Quickcheck_Examples/Quickcheck_Examples.thy Wed Oct 29 19:23:32 2014 +0100
@@ -544,7 +544,7 @@
text {* with the simple testing scheme *}
-setup {* Exhaustive_Generators.setup_exhaustive_datatype_interpretation *}
+setup Exhaustive_Generators.setup_exhaustive_datatype_interpretation
declare [[quickcheck_full_support = false]]
lemma
--- a/src/HOL/Quickcheck_Examples/Quickcheck_Interfaces.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Quickcheck_Examples/Quickcheck_Interfaces.thy Wed Oct 29 19:23:32 2014 +0100
@@ -11,7 +11,7 @@
First, this requires to setup special generators for all datatypes via the following command.
*}
-setup {* Exhaustive_Generators.setup_bounded_forall_datatype_interpretation *}
+setup Exhaustive_Generators.setup_bounded_forall_datatype_interpretation
text {*
Now, the function Quickcheck.mk_batch_validator : Proof.context -> term list -> (int -> bool) list option
--- a/src/HOL/Quickcheck_Exhaustive.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Quickcheck_Exhaustive.thy Wed Oct 29 19:23:32 2014 +0100
@@ -618,8 +618,6 @@
ML_file "Tools/Quickcheck/exhaustive_generators.ML"
-setup {* Exhaustive_Generators.setup *}
-
declare [[quickcheck_batch_tester = exhaustive]]
subsection {* Defining generators for abstract types *}
--- a/src/HOL/Quickcheck_Narrowing.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Quickcheck_Narrowing.thy Wed Oct 29 19:23:32 2014 +0100
@@ -197,8 +197,6 @@
ML_file "Tools/Quickcheck/narrowing_generators.ML"
-setup {* Narrowing_Generators.setup *}
-
definition narrowing_dummy_partial_term_of :: "('a :: partial_term_of) itself => narrowing_term => term"
where
"narrowing_dummy_partial_term_of = partial_term_of"
--- a/src/HOL/Quickcheck_Random.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Quickcheck_Random.thy Wed Oct 29 19:23:32 2014 +0100
@@ -207,7 +207,6 @@
ML_file "Tools/Quickcheck/quickcheck_common.ML"
ML_file "Tools/Quickcheck/random_generators.ML"
-setup Random_Generators.setup
subsection {* Code setup *}
--- a/src/HOL/Semiring_Normalization.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Semiring_Normalization.thy Wed Oct 29 19:23:32 2014 +0100
@@ -8,8 +8,6 @@
imports Numeral_Simprocs Nat_Transfer
begin
-ML_file "Tools/semiring_normalizer.ML"
-
text {* Prelude *}
class comm_semiring_1_cancel_crossproduct = comm_semiring_1_cancel +
@@ -69,7 +67,7 @@
text {* Semiring normalization proper *}
-setup Semiring_Normalizer.setup
+ML_file "Tools/semiring_normalizer.ML"
context comm_semiring_1
begin
--- a/src/HOL/Statespace/StateSpaceLocale.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Statespace/StateSpaceLocale.thy Wed Oct 29 19:23:32 2014 +0100
@@ -10,7 +10,6 @@
ML_file "state_space.ML"
ML_file "state_fun.ML"
-setup StateFun.setup
text {* For every type that is to be stored in a state space, an
instance of this locale is imported in order convert the abstract and
--- a/src/HOL/Statespace/state_fun.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Statespace/state_fun.ML Wed Oct 29 19:23:32 2014 +0100
@@ -16,8 +16,6 @@
val ex_lookup_ss : simpset
val lazy_conj_simproc : simproc
val string_eq_simp_tac : Proof.context -> int -> tactic
-
- val setup : theory -> theory
end;
structure StateFun: STATE_FUN =
@@ -107,8 +105,7 @@
(merge_ss (ss1, ss2), merge_ss (ex_ss1, ex_ss2), b1 orelse b2);
);
-val init_state_fun_data =
- Context.theory_map (Data.put (lookup_ss, ex_lookup_ss, false));
+val _ = Theory.setup (Context.theory_map (Data.put (lookup_ss, ex_lookup_ss, false)));
val lookup_simproc =
Simplifier.simproc_global @{theory} "lookup_simp" ["lookup d n (update d' c m v s)"]
@@ -370,29 +367,27 @@
val mk_constr = gen_constr_destr (fn a => fn b => Syntax.const @{const_name Fun.comp} $ a $ b) "";
val mk_destr = gen_constr_destr (fn a => fn b => Syntax.const @{const_name Fun.comp} $ b $ a) "the_";
-
-val statefun_simp_attr = Thm.declaration_attribute (fn thm => fn context =>
- let
- val ctxt = Context.proof_of context;
- val (lookup_ss, ex_lookup_ss, simprocs_active) = Data.get context;
- val (lookup_ss', ex_lookup_ss') =
- (case concl_of thm of
- (_ $ ((Const (@{const_name Ex}, _) $ _))) =>
- (lookup_ss, simpset_map ctxt (Simplifier.add_simp thm) ex_lookup_ss)
- | _ =>
- (simpset_map ctxt (Simplifier.add_simp thm) lookup_ss, ex_lookup_ss));
- val activate_simprocs =
- if simprocs_active then I
- else Simplifier.map_ss (fn ctxt => ctxt addsimprocs [lookup_simproc, update_simproc]);
- in
- context
- |> activate_simprocs
- |> Data.put (lookup_ss', ex_lookup_ss', true)
- end);
-
-val setup =
- init_state_fun_data #>
- Attrib.setup @{binding statefun_simp} (Scan.succeed statefun_simp_attr)
- "simplification in statespaces";
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding statefun_simp}
+ (Scan.succeed (Thm.declaration_attribute (fn thm => fn context =>
+ let
+ val ctxt = Context.proof_of context;
+ val (lookup_ss, ex_lookup_ss, simprocs_active) = Data.get context;
+ val (lookup_ss', ex_lookup_ss') =
+ (case concl_of thm of
+ (_ $ ((Const (@{const_name Ex}, _) $ _))) =>
+ (lookup_ss, simpset_map ctxt (Simplifier.add_simp thm) ex_lookup_ss)
+ | _ =>
+ (simpset_map ctxt (Simplifier.add_simp thm) lookup_ss, ex_lookup_ss));
+ val activate_simprocs =
+ if simprocs_active then I
+ else Simplifier.map_ss (fn ctxt => ctxt addsimprocs [lookup_simproc, update_simproc]);
+ in
+ context
+ |> activate_simprocs
+ |> Data.put (lookup_ss', ex_lookup_ss', true)
+ end)))
+ "simplification in statespaces");
end;
--- a/src/HOL/String.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/String.thy Wed Oct 29 19:23:32 2014 +0100
@@ -126,7 +126,6 @@
"_String" :: "str_position => string" ("_")
ML_file "Tools/string_syntax.ML"
-setup String_Syntax.setup
lemma UNIV_char:
"UNIV = image (split Char) (UNIV \<times> UNIV)"
--- a/src/HOL/Tools/Function/context_tree.ML Wed Oct 29 12:01:39 2014 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,295 +0,0 @@
-(* Title: HOL/Tools/Function/context_tree.ML
- Author: Alexander Krauss, TU Muenchen
-
-Construction and traversal of trees of nested contexts along a term.
-*)
-
-signature FUNCTION_CTXTREE =
-sig
- (* poor man's contexts: fixes + assumes *)
- type ctxt = (string * typ) list * thm list
- type ctx_tree
-
- (* FIXME: This interface is a mess and needs to be cleaned up! *)
- val get_function_congs : Proof.context -> thm list
- val add_function_cong : thm -> Context.generic -> Context.generic
- val map_function_congs : (thm list -> thm list) -> Context.generic -> Context.generic
-
- val cong_add: attribute
- val cong_del: attribute
-
- val mk_tree: (string * typ) -> term -> Proof.context -> term -> ctx_tree
-
- val inst_tree: theory -> term -> term -> ctx_tree -> ctx_tree
-
- val export_term : ctxt -> term -> term
- val export_thm : theory -> ctxt -> thm -> thm
- val import_thm : theory -> ctxt -> thm -> thm
-
- val traverse_tree :
- (ctxt -> term ->
- (ctxt * thm) list ->
- (ctxt * thm) list * 'b ->
- (ctxt * thm) list * 'b)
- -> ctx_tree -> 'b -> 'b
-
- val rewrite_by_tree : Proof.context -> term -> thm -> (thm * thm) list ->
- ctx_tree -> thm * (thm * thm) list
-
- val setup : theory -> theory
-end
-
-structure Function_Ctx_Tree : FUNCTION_CTXTREE =
-struct
-
-type ctxt = (string * typ) list * thm list
-
-open Function_Common
-open Function_Lib
-
-structure FunctionCongs = Generic_Data
-(
- type T = thm list
- val empty = []
- val extend = I
- val merge = Thm.merge_thms
-);
-
-val get_function_congs = FunctionCongs.get o Context.Proof
-val map_function_congs = FunctionCongs.map
-val add_function_cong = FunctionCongs.map o Thm.add_thm
-
-(* congruence rules *)
-
-val cong_add = Thm.declaration_attribute (map_function_congs o Thm.add_thm o safe_mk_meta_eq);
-val cong_del = Thm.declaration_attribute (map_function_congs o Thm.del_thm o safe_mk_meta_eq);
-
-
-type depgraph = int Int_Graph.T
-
-datatype ctx_tree =
- Leaf of term
- | Cong of (thm * depgraph * (ctxt * ctx_tree) list)
- | RCall of (term * ctx_tree)
-
-
-(* Maps "Trueprop A = B" to "A" *)
-val rhs_of = snd o HOLogic.dest_eq o HOLogic.dest_Trueprop
-
-
-(*** Dependency analysis for congruence rules ***)
-
-fun branch_vars t =
- let
- val t' = snd (dest_all_all t)
- val (assumes, concl) = Logic.strip_horn t'
- in
- (fold Term.add_vars assumes [], Term.add_vars concl [])
- end
-
-fun cong_deps crule =
- let
- val num_branches = map_index (apsnd branch_vars) (prems_of crule)
- in
- Int_Graph.empty
- |> fold (fn (i,_)=> Int_Graph.new_node (i,i)) num_branches
- |> fold_product (fn (i, (c1, _)) => fn (j, (_, t2)) =>
- if i = j orelse null (inter (op =) c1 t2)
- then I else Int_Graph.add_edge_acyclic (i,j))
- num_branches num_branches
- end
-
-val default_congs =
- map (fn c => c RS eq_reflection) [@{thm "cong"}, @{thm "ext"}]
-
-(* Called on the INSTANTIATED branches of the congruence rule *)
-fun mk_branch ctxt t =
- let
- val ((params, impl), ctxt') = Variable.focus t ctxt
- val (assms, concl) = Logic.strip_horn impl
- in
- (ctxt', map #2 params, assms, rhs_of concl)
- end
-
-fun find_cong_rule ctxt fvar h ((r,dep)::rs) t =
- (let
- val thy = Proof_Context.theory_of ctxt
-
- val tt' = Logic.mk_equals (Pattern.rewrite_term thy [(Free fvar, h)] [] t, t)
- val (c, subs) = (concl_of r, prems_of r)
-
- val subst =
- Pattern.match (Proof_Context.theory_of ctxt) (c, tt') (Vartab.empty, Vartab.empty)
- val branches = map (mk_branch ctxt o Envir.beta_norm o Envir.subst_term subst) subs
- val inst = map (fn v =>
- (cterm_of thy (Var v), cterm_of thy (Envir.subst_term subst (Var v))))
- (Term.add_vars c [])
- in
- (cterm_instantiate inst r, dep, branches)
- end
- handle Pattern.MATCH => find_cong_rule ctxt fvar h rs t)
- | find_cong_rule _ _ _ [] _ = raise General.Fail "No cong rule found!"
-
-
-fun mk_tree fvar h ctxt t =
- let
- val congs = get_function_congs ctxt
-
- (* FIXME: Save in theory: *)
- val congs_deps = map (fn c => (c, cong_deps c)) (congs @ default_congs)
-
- fun matchcall (a $ b) = if a = Free fvar then SOME b else NONE
- | matchcall _ = NONE
-
- fun mk_tree' ctxt t =
- case matchcall t of
- SOME arg => RCall (t, mk_tree' ctxt arg)
- | NONE =>
- if not (exists_subterm (fn Free v => v = fvar | _ => false) t) then Leaf t
- else
- let
- val (r, dep, branches) = find_cong_rule ctxt fvar h congs_deps t
- fun subtree (ctxt', fixes, assumes, st) =
- ((fixes,
- map (Thm.assume o cterm_of (Proof_Context.theory_of ctxt)) assumes),
- mk_tree' ctxt' st)
- in
- Cong (r, dep, map subtree branches)
- end
- in
- mk_tree' ctxt t
- end
-
-fun inst_tree thy fvar f tr =
- let
- val cfvar = cterm_of thy fvar
- val cf = cterm_of thy f
-
- fun inst_term t =
- subst_bound(f, abstract_over (fvar, t))
-
- val inst_thm = Thm.forall_elim cf o Thm.forall_intr cfvar
-
- fun inst_tree_aux (Leaf t) = Leaf t
- | inst_tree_aux (Cong (crule, deps, branches)) =
- Cong (inst_thm crule, deps, map inst_branch branches)
- | inst_tree_aux (RCall (t, str)) =
- RCall (inst_term t, inst_tree_aux str)
- and inst_branch ((fxs, assms), str) =
- ((fxs, map (Thm.assume o cterm_of thy o inst_term o prop_of) assms),
- inst_tree_aux str)
- in
- inst_tree_aux tr
- end
-
-
-(* Poor man's contexts: Only fixes and assumes *)
-fun compose (fs1, as1) (fs2, as2) = (fs1 @ fs2, as1 @ as2)
-
-fun export_term (fixes, assumes) =
- fold_rev (curry Logic.mk_implies o prop_of) assumes
- #> fold_rev (Logic.all o Free) fixes
-
-fun export_thm thy (fixes, assumes) =
- fold_rev (Thm.implies_intr o cprop_of) assumes
- #> fold_rev (Thm.forall_intr o cterm_of thy o Free) fixes
-
-fun import_thm thy (fixes, athms) =
- fold (Thm.forall_elim o cterm_of thy o Free) fixes
- #> fold Thm.elim_implies athms
-
-
-(* folds in the order of the dependencies of a graph. *)
-fun fold_deps G f x =
- let
- fun fill_table i (T, x) =
- case Inttab.lookup T i of
- SOME _ => (T, x)
- | NONE =>
- let
- val (T', x') = Int_Graph.Keys.fold fill_table (Int_Graph.imm_succs G i) (T, x)
- val (v, x'') = f (the o Inttab.lookup T') i x'
- in
- (Inttab.update (i, v) T', x'')
- end
-
- val (T, x) = fold fill_table (Int_Graph.keys G) (Inttab.empty, x)
- in
- (Inttab.fold (cons o snd) T [], x)
- end
-
-fun traverse_tree rcOp tr =
- let
- fun traverse_help ctxt (Leaf _) _ x = ([], x)
- | traverse_help ctxt (RCall (t, st)) u x =
- rcOp ctxt t u (traverse_help ctxt st u x)
- | traverse_help ctxt (Cong (_, deps, branches)) u x =
- let
- fun sub_step lu i x =
- let
- val (ctxt', subtree) = nth branches i
- val used = Int_Graph.Keys.fold_rev (append o lu) (Int_Graph.imm_succs deps i) u
- val (subs, x') = traverse_help (compose ctxt ctxt') subtree used x
- val exported_subs = map (apfst (compose ctxt')) subs (* FIXME: Right order of composition? *)
- in
- (exported_subs, x')
- end
- in
- fold_deps deps sub_step x
- |> apfst flat
- end
- in
- snd o traverse_help ([], []) tr []
- end
-
-fun rewrite_by_tree ctxt h ih x tr =
- let
- val thy = Proof_Context.theory_of ctxt
- fun rewrite_help _ _ x (Leaf t) = (Thm.reflexive (cterm_of thy t), x)
- | rewrite_help fix h_as x (RCall (_ $ arg, st)) =
- let
- val (inner, (lRi,ha)::x') = rewrite_help fix h_as x st (* "a' = a" *)
-
- val iha = import_thm thy (fix, h_as) ha (* (a', h a') : G *)
- |> Conv.fconv_rule (Conv.arg_conv (Conv.comb_conv (Conv.arg_conv (K inner))))
- (* (a, h a) : G *)
- val inst_ih = instantiate' [] [SOME (cterm_of thy arg)] ih
- val eq = Thm.implies_elim (Thm.implies_elim inst_ih lRi) iha (* h a = f a *)
-
- val h_a'_eq_h_a = Thm.combination (Thm.reflexive (cterm_of thy h)) inner
- val h_a_eq_f_a = eq RS eq_reflection
- val result = Thm.transitive h_a'_eq_h_a h_a_eq_f_a
- in
- (result, x')
- end
- | rewrite_help fix h_as x (Cong (crule, deps, branches)) =
- let
- fun sub_step lu i x =
- let
- val ((fixes, assumes), st) = nth branches i
- val used = map lu (Int_Graph.immediate_succs deps i)
- |> map (fn u_eq => (u_eq RS sym) RS eq_reflection)
- |> filter_out Thm.is_reflexive
-
- val assumes' = map (simplify (put_simpset HOL_basic_ss ctxt addsimps used)) assumes
-
- val (subeq, x') =
- rewrite_help (fix @ fixes) (h_as @ assumes') x st
- val subeq_exp =
- export_thm thy (fixes, assumes) (subeq RS meta_eq_to_obj_eq)
- in
- (subeq_exp, x')
- end
- val (subthms, x') = fold_deps deps sub_step x
- in
- (fold_rev (curry op COMP) subthms crule, x')
- end
- in
- rewrite_help [] [] x tr
- end
-
-val setup =
- Attrib.setup @{binding fundef_cong} (Attrib.add_del cong_add cong_del)
- "declaration of congruence rule for function definitions"
-
-end
--- a/src/HOL/Tools/Function/fun.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Function/fun.ML Wed Oct 29 19:23:32 2014 +0100
@@ -14,8 +14,6 @@
val add_fun_cmd : (binding * string option * mixfix) list ->
(Attrib.binding * string) list -> Function_Common.function_config ->
bool -> local_theory -> Proof.context
-
- val setup : theory -> theory
end
structure Function_Fun : FUNCTION_FUN =
@@ -148,8 +146,8 @@
else
Function_Common.empty_preproc check_defs config ctxt fixes spec
-val setup =
- Context.theory_map (Function_Common.set_preproc sequential_preproc)
+val _ = Theory.setup (Context.theory_map (Function_Common.set_preproc sequential_preproc))
+
val fun_config = FunctionConfig { sequential=true, default=NONE,
--- a/src/HOL/Tools/Function/function.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Function/function.ML Wed Oct 29 19:23:32 2014 +0100
@@ -32,7 +32,6 @@
val termination : term option -> local_theory -> Proof.state
val termination_cmd : string option -> local_theory -> Proof.state
- val setup : theory -> theory
val get_congs : Proof.context -> thm list
val get_info : Proof.context -> term -> info
@@ -264,24 +263,21 @@
(* Datatype hook to declare datatype congs as "function_congs" *)
-
fun add_case_cong n thy =
let
val cong = #case_cong (Old_Datatype_Data.the_info thy n)
|> safe_mk_meta_eq
in
Context.theory_map
- (Function_Ctx_Tree.map_function_congs (Thm.add_thm cong)) thy
+ (Function_Context_Tree.map_function_congs (Thm.add_thm cong)) thy
end
-val setup_case_cong = Old_Datatype_Data.interpretation (K (fold add_case_cong))
+val _ = Theory.setup (Old_Datatype_Data.interpretation (K (fold add_case_cong)))
-(* setup *)
+(* get info *)
-val setup = setup_case_cong
-
-val get_congs = Function_Ctx_Tree.get_function_congs
+val get_congs = Function_Context_Tree.get_function_congs
fun get_info ctxt t = Item_Net.retrieve (get_function ctxt) t
|> the_single |> snd
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Function/function_context_tree.ML Wed Oct 29 19:23:32 2014 +0100
@@ -0,0 +1,289 @@
+(* Title: HOL/Tools/Function/function_context_tree.ML
+ Author: Alexander Krauss, TU Muenchen
+
+Construction and traversal of trees of nested contexts along a term.
+*)
+
+signature FUNCTION_CONTEXT_TREE =
+sig
+ (* poor man's contexts: fixes + assumes *)
+ type ctxt = (string * typ) list * thm list
+ type ctx_tree
+
+ (* FIXME: This interface is a mess and needs to be cleaned up! *)
+ val get_function_congs : Proof.context -> thm list
+ val add_function_cong : thm -> Context.generic -> Context.generic
+ val map_function_congs : (thm list -> thm list) -> Context.generic -> Context.generic
+
+ val cong_add: attribute
+ val cong_del: attribute
+
+ val mk_tree: (string * typ) -> term -> Proof.context -> term -> ctx_tree
+
+ val inst_tree: theory -> term -> term -> ctx_tree -> ctx_tree
+
+ val export_term : ctxt -> term -> term
+ val export_thm : theory -> ctxt -> thm -> thm
+ val import_thm : theory -> ctxt -> thm -> thm
+
+ val traverse_tree :
+ (ctxt -> term ->
+ (ctxt * thm) list ->
+ (ctxt * thm) list * 'b ->
+ (ctxt * thm) list * 'b)
+ -> ctx_tree -> 'b -> 'b
+
+ val rewrite_by_tree : Proof.context -> term -> thm -> (thm * thm) list ->
+ ctx_tree -> thm * (thm * thm) list
+end
+
+structure Function_Context_Tree : FUNCTION_CONTEXT_TREE =
+struct
+
+type ctxt = (string * typ) list * thm list
+
+open Function_Common
+open Function_Lib
+
+structure FunctionCongs = Generic_Data
+(
+ type T = thm list
+ val empty = []
+ val extend = I
+ val merge = Thm.merge_thms
+);
+
+val get_function_congs = FunctionCongs.get o Context.Proof
+val map_function_congs = FunctionCongs.map
+val add_function_cong = FunctionCongs.map o Thm.add_thm
+
+(* congruence rules *)
+
+val cong_add = Thm.declaration_attribute (map_function_congs o Thm.add_thm o safe_mk_meta_eq);
+val cong_del = Thm.declaration_attribute (map_function_congs o Thm.del_thm o safe_mk_meta_eq);
+
+
+type depgraph = int Int_Graph.T
+
+datatype ctx_tree =
+ Leaf of term
+ | Cong of (thm * depgraph * (ctxt * ctx_tree) list)
+ | RCall of (term * ctx_tree)
+
+
+(* Maps "Trueprop A = B" to "A" *)
+val rhs_of = snd o HOLogic.dest_eq o HOLogic.dest_Trueprop
+
+
+(*** Dependency analysis for congruence rules ***)
+
+fun branch_vars t =
+ let
+ val t' = snd (dest_all_all t)
+ val (assumes, concl) = Logic.strip_horn t'
+ in
+ (fold Term.add_vars assumes [], Term.add_vars concl [])
+ end
+
+fun cong_deps crule =
+ let
+ val num_branches = map_index (apsnd branch_vars) (prems_of crule)
+ in
+ Int_Graph.empty
+ |> fold (fn (i,_)=> Int_Graph.new_node (i,i)) num_branches
+ |> fold_product (fn (i, (c1, _)) => fn (j, (_, t2)) =>
+ if i = j orelse null (inter (op =) c1 t2)
+ then I else Int_Graph.add_edge_acyclic (i,j))
+ num_branches num_branches
+ end
+
+val default_congs =
+ map (fn c => c RS eq_reflection) [@{thm "cong"}, @{thm "ext"}]
+
+(* Called on the INSTANTIATED branches of the congruence rule *)
+fun mk_branch ctxt t =
+ let
+ val ((params, impl), ctxt') = Variable.focus t ctxt
+ val (assms, concl) = Logic.strip_horn impl
+ in
+ (ctxt', map #2 params, assms, rhs_of concl)
+ end
+
+fun find_cong_rule ctxt fvar h ((r,dep)::rs) t =
+ (let
+ val thy = Proof_Context.theory_of ctxt
+
+ val tt' = Logic.mk_equals (Pattern.rewrite_term thy [(Free fvar, h)] [] t, t)
+ val (c, subs) = (concl_of r, prems_of r)
+
+ val subst =
+ Pattern.match (Proof_Context.theory_of ctxt) (c, tt') (Vartab.empty, Vartab.empty)
+ val branches = map (mk_branch ctxt o Envir.beta_norm o Envir.subst_term subst) subs
+ val inst = map (fn v =>
+ (cterm_of thy (Var v), cterm_of thy (Envir.subst_term subst (Var v))))
+ (Term.add_vars c [])
+ in
+ (cterm_instantiate inst r, dep, branches)
+ end
+ handle Pattern.MATCH => find_cong_rule ctxt fvar h rs t)
+ | find_cong_rule _ _ _ [] _ = raise General.Fail "No cong rule found!"
+
+
+fun mk_tree fvar h ctxt t =
+ let
+ val congs = get_function_congs ctxt
+
+ (* FIXME: Save in theory: *)
+ val congs_deps = map (fn c => (c, cong_deps c)) (congs @ default_congs)
+
+ fun matchcall (a $ b) = if a = Free fvar then SOME b else NONE
+ | matchcall _ = NONE
+
+ fun mk_tree' ctxt t =
+ case matchcall t of
+ SOME arg => RCall (t, mk_tree' ctxt arg)
+ | NONE =>
+ if not (exists_subterm (fn Free v => v = fvar | _ => false) t) then Leaf t
+ else
+ let
+ val (r, dep, branches) = find_cong_rule ctxt fvar h congs_deps t
+ fun subtree (ctxt', fixes, assumes, st) =
+ ((fixes,
+ map (Thm.assume o cterm_of (Proof_Context.theory_of ctxt)) assumes),
+ mk_tree' ctxt' st)
+ in
+ Cong (r, dep, map subtree branches)
+ end
+ in
+ mk_tree' ctxt t
+ end
+
+fun inst_tree thy fvar f tr =
+ let
+ val cfvar = cterm_of thy fvar
+ val cf = cterm_of thy f
+
+ fun inst_term t =
+ subst_bound(f, abstract_over (fvar, t))
+
+ val inst_thm = Thm.forall_elim cf o Thm.forall_intr cfvar
+
+ fun inst_tree_aux (Leaf t) = Leaf t
+ | inst_tree_aux (Cong (crule, deps, branches)) =
+ Cong (inst_thm crule, deps, map inst_branch branches)
+ | inst_tree_aux (RCall (t, str)) =
+ RCall (inst_term t, inst_tree_aux str)
+ and inst_branch ((fxs, assms), str) =
+ ((fxs, map (Thm.assume o cterm_of thy o inst_term o prop_of) assms),
+ inst_tree_aux str)
+ in
+ inst_tree_aux tr
+ end
+
+
+(* Poor man's contexts: Only fixes and assumes *)
+fun compose (fs1, as1) (fs2, as2) = (fs1 @ fs2, as1 @ as2)
+
+fun export_term (fixes, assumes) =
+ fold_rev (curry Logic.mk_implies o prop_of) assumes
+ #> fold_rev (Logic.all o Free) fixes
+
+fun export_thm thy (fixes, assumes) =
+ fold_rev (Thm.implies_intr o cprop_of) assumes
+ #> fold_rev (Thm.forall_intr o cterm_of thy o Free) fixes
+
+fun import_thm thy (fixes, athms) =
+ fold (Thm.forall_elim o cterm_of thy o Free) fixes
+ #> fold Thm.elim_implies athms
+
+
+(* folds in the order of the dependencies of a graph. *)
+fun fold_deps G f x =
+ let
+ fun fill_table i (T, x) =
+ case Inttab.lookup T i of
+ SOME _ => (T, x)
+ | NONE =>
+ let
+ val (T', x') = Int_Graph.Keys.fold fill_table (Int_Graph.imm_succs G i) (T, x)
+ val (v, x'') = f (the o Inttab.lookup T') i x'
+ in
+ (Inttab.update (i, v) T', x'')
+ end
+
+ val (T, x) = fold fill_table (Int_Graph.keys G) (Inttab.empty, x)
+ in
+ (Inttab.fold (cons o snd) T [], x)
+ end
+
+fun traverse_tree rcOp tr =
+ let
+ fun traverse_help ctxt (Leaf _) _ x = ([], x)
+ | traverse_help ctxt (RCall (t, st)) u x =
+ rcOp ctxt t u (traverse_help ctxt st u x)
+ | traverse_help ctxt (Cong (_, deps, branches)) u x =
+ let
+ fun sub_step lu i x =
+ let
+ val (ctxt', subtree) = nth branches i
+ val used = Int_Graph.Keys.fold_rev (append o lu) (Int_Graph.imm_succs deps i) u
+ val (subs, x') = traverse_help (compose ctxt ctxt') subtree used x
+ val exported_subs = map (apfst (compose ctxt')) subs (* FIXME: Right order of composition? *)
+ in
+ (exported_subs, x')
+ end
+ in
+ fold_deps deps sub_step x
+ |> apfst flat
+ end
+ in
+ snd o traverse_help ([], []) tr []
+ end
+
+fun rewrite_by_tree ctxt h ih x tr =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ fun rewrite_help _ _ x (Leaf t) = (Thm.reflexive (cterm_of thy t), x)
+ | rewrite_help fix h_as x (RCall (_ $ arg, st)) =
+ let
+ val (inner, (lRi,ha)::x') = rewrite_help fix h_as x st (* "a' = a" *)
+
+ val iha = import_thm thy (fix, h_as) ha (* (a', h a') : G *)
+ |> Conv.fconv_rule (Conv.arg_conv (Conv.comb_conv (Conv.arg_conv (K inner))))
+ (* (a, h a) : G *)
+ val inst_ih = instantiate' [] [SOME (cterm_of thy arg)] ih
+ val eq = Thm.implies_elim (Thm.implies_elim inst_ih lRi) iha (* h a = f a *)
+
+ val h_a'_eq_h_a = Thm.combination (Thm.reflexive (cterm_of thy h)) inner
+ val h_a_eq_f_a = eq RS eq_reflection
+ val result = Thm.transitive h_a'_eq_h_a h_a_eq_f_a
+ in
+ (result, x')
+ end
+ | rewrite_help fix h_as x (Cong (crule, deps, branches)) =
+ let
+ fun sub_step lu i x =
+ let
+ val ((fixes, assumes), st) = nth branches i
+ val used = map lu (Int_Graph.immediate_succs deps i)
+ |> map (fn u_eq => (u_eq RS sym) RS eq_reflection)
+ |> filter_out Thm.is_reflexive
+
+ val assumes' = map (simplify (put_simpset HOL_basic_ss ctxt addsimps used)) assumes
+
+ val (subeq, x') =
+ rewrite_help (fix @ fixes) (h_as @ assumes') x st
+ val subeq_exp =
+ export_thm thy (fixes, assumes) (subeq RS meta_eq_to_obj_eq)
+ in
+ (subeq_exp, x')
+ end
+ val (subthms, x') = fold_deps deps sub_step x
+ in
+ (fold_rev (curry op COMP) subthms crule, x')
+ end
+ in
+ rewrite_help [] [] x tr
+ end
+
+end
--- a/src/HOL/Tools/Function/function_core.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Function/function_core.ML Wed Oct 29 19:23:32 2014 +0100
@@ -73,7 +73,7 @@
{no: int,
qglr : ((string * typ) list * term list * term * term),
cdata : clause_context,
- tree: Function_Ctx_Tree.ctx_tree,
+ tree: Function_Context_Tree.ctx_tree,
lGI: thm,
RCs: rec_call_info list}
@@ -99,7 +99,7 @@
([], (fixes, assumes, arg) :: xs)
| add_Ri _ _ _ _ = raise Match
in
- rev (Function_Ctx_Tree.traverse_tree add_Ri tree [])
+ rev (Function_Context_Tree.traverse_tree add_Ri tree [])
end
@@ -277,7 +277,7 @@
Thm.assume (cterm_of thy (subst_bounds (rev qs, h_assum)))) RCs
val (eql, _) =
- Function_Ctx_Tree.rewrite_by_tree ctxt h ih_elim_case (Ris ~~ h_assums) tree
+ Function_Context_Tree.rewrite_by_tree ctxt h ih_elim_case (Ris ~~ h_assums) tree
val replace_lemma = (eql RS meta_eq_to_obj_eq)
|> Thm.implies_intr (cprop_of case_hyp)
@@ -733,11 +733,11 @@
fun step (fixes, assumes) (_ $ arg) u (sub,(hyps,thms)) =
let
val used = (u @ sub)
- |> map (fn (ctxt, thm) => Function_Ctx_Tree.export_thm thy ctxt thm)
+ |> map (fn (ctxt, thm) => Function_Context_Tree.export_thm thy ctxt thm)
val hyp = HOLogic.mk_Trueprop (R' $ arg $ lhs)
|> fold_rev (curry Logic.mk_implies o prop_of) used (* additional hyps *)
- |> Function_Ctx_Tree.export_term (fixes, assumes)
+ |> Function_Context_Tree.export_term (fixes, assumes)
|> fold_rev (curry Logic.mk_implies o prop_of) ags
|> fold_rev mk_forall_rename (map fst oqs ~~ qs)
|> cterm_of thy
@@ -745,7 +745,7 @@
val thm = Thm.assume hyp
|> fold Thm.forall_elim cqs
|> fold Thm.elim_implies ags
- |> Function_Ctx_Tree.import_thm thy (fixes, assumes)
+ |> Function_Context_Tree.import_thm thy (fixes, assumes)
|> fold Thm.elim_implies used (* "(arg, lhs) : R'" *)
val z_eq_arg = HOLogic.mk_Trueprop (mk_eq (z, arg))
@@ -757,7 +757,7 @@
(Conv.arg_conv (Conv.arg_conv (K (Thm.symmetric (z_eq_arg RS eq_reflection)))))
val ethm = z_acc_local
- |> Function_Ctx_Tree.export_thm thy (fixes,
+ |> Function_Context_Tree.export_thm thy (fixes,
z_eq_arg :: case_hyp :: ags @ assumes)
|> fold_rev forall_intr_rename (map fst oqs ~~ cqs)
@@ -767,7 +767,7 @@
end
| step _ _ _ _ = raise Match
in
- Function_Ctx_Tree.traverse_tree step tree
+ Function_Context_Tree.traverse_tree step tree
end
@@ -846,7 +846,7 @@
val n = length abstract_qglrs
fun build_tree (ClauseContext { ctxt, rhs, ...}) =
- Function_Ctx_Tree.mk_tree (fname, fT) h ctxt rhs
+ Function_Context_Tree.mk_tree (fname, fT) h ctxt rhs
val trees = map build_tree clauses
val RCss = map find_calls trees
@@ -858,7 +858,7 @@
PROFILE "def_fun" (define_function (defname ^ "_sumC_def") (fname, mixfix) domT ranT G default) lthy
val RCss = map (map (inst_RC (Proof_Context.theory_of lthy) fvar f)) RCss
- val trees = map (Function_Ctx_Tree.inst_tree (Proof_Context.theory_of lthy) fvar f) trees
+ val trees = map (Function_Context_Tree.inst_tree (Proof_Context.theory_of lthy) fvar f) trees
val ((R, RIntro_thmss, R_elim), lthy) =
PROFILE "def_rel" (define_recursion_relation (rel_name defname) domT abstract_qglrs clauses RCss) lthy
--- a/src/HOL/Tools/Function/lexicographic_order.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Function/lexicographic_order.ML Wed Oct 29 19:23:32 2014 +0100
@@ -9,7 +9,6 @@
sig
val lex_order_tac : bool -> Proof.context -> tactic -> tactic
val lexicographic_order_tac : bool -> Proof.context -> tactic
- val setup: theory -> theory
end
structure Lexicographic_Order : LEXICOGRAPHIC_ORDER =
@@ -214,7 +213,9 @@
lex_order_tac quiet ctxt
(auto_tac (ctxt addsimps (Named_Theorems.get ctxt @{named_theorems termination_simp})))
-val setup =
- Context.theory_map (Function_Common.set_termination_prover (lexicographic_order_tac false))
+val _ =
+ Theory.setup
+ (Context.theory_map
+ (Function_Common.set_termination_prover (lexicographic_order_tac false)))
end;
--- a/src/HOL/Tools/Function/scnp_reconstruct.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Function/scnp_reconstruct.ML Wed Oct 29 19:23:32 2014 +0100
@@ -7,13 +7,10 @@
signature SCNP_RECONSTRUCT =
sig
-
val sizechange_tac : Proof.context -> tactic -> tactic
val decomp_scnp_tac : ScnpSolve.label list -> Proof.context -> tactic
- val setup : theory -> theory
-
datatype multiset_setup =
Multiset of
{
@@ -30,9 +27,7 @@
reduction_pair : thm
}
-
val multiset_setup : multiset_setup -> theory -> theory
-
end
structure ScnpReconstruct : SCNP_RECONSTRUCT =
@@ -45,6 +40,7 @@
val natT = HOLogic.natT
val nat_pairT = HOLogic.mk_prodT (natT, natT)
+
(* Theory dependencies *)
datatype multiset_setup =
@@ -74,26 +70,24 @@
val multiset_setup = Multiset_Setup.put o SOME
fun undef _ = error "undef"
+
fun get_multiset_setup thy = Multiset_Setup.get thy
|> the_default (Multiset
-{ msetT = undef, mk_mset=undef,
- mset_regroup_conv=undef, mset_member_tac = undef,
- mset_nonempty_tac = undef, mset_pwleq_tac = undef,
- set_of_simps = [],reduction_pair = refl,
- smsI'=refl, wmsI2''=refl, wmsI1=refl })
+ { msetT = undef, mk_mset=undef,
+ mset_regroup_conv=undef, mset_member_tac = undef,
+ mset_nonempty_tac = undef, mset_pwleq_tac = undef,
+ set_of_simps = [],reduction_pair = refl,
+ smsI'=refl, wmsI2''=refl, wmsI1=refl })
fun order_rpair _ MAX = @{thm max_rpair_set}
| order_rpair msrp MS = msrp
| order_rpair _ MIN = @{thm min_rpair_set}
-fun ord_intros_max true =
- (@{thm smax_emptyI}, @{thm smax_insertI})
- | ord_intros_max false =
- (@{thm wmax_emptyI}, @{thm wmax_insertI})
-fun ord_intros_min true =
- (@{thm smin_emptyI}, @{thm smin_insertI})
- | ord_intros_min false =
- (@{thm wmin_emptyI}, @{thm wmin_insertI})
+fun ord_intros_max true = (@{thm smax_emptyI}, @{thm smax_insertI})
+ | ord_intros_max false = (@{thm wmax_emptyI}, @{thm wmax_insertI})
+
+fun ord_intros_min true = (@{thm smin_emptyI}, @{thm smin_insertI})
+ | ord_intros_min false = (@{thm wmin_emptyI}, @{thm wmin_insertI})
fun gen_probl D cs =
let
@@ -131,6 +125,7 @@
THEN etac @{thm ssubst} 1
THEN Local_Defs.unfold_tac ctxt @{thms split_conv triv_forall_equality sum.case}
+
(* Sets *)
val setT = HOLogic.mk_setT
@@ -154,20 +149,20 @@
val Multiset
{ msetT, mk_mset,
mset_regroup_conv, mset_pwleq_tac, set_of_simps,
- smsI', wmsI2'', wmsI1, reduction_pair=ms_rp, ...}
+ smsI', wmsI2'', wmsI1, reduction_pair=ms_rp, ...}
= get_multiset_setup thy
fun measure_fn p = nth (Termination.get_measures D p)
fun get_desc_thm cidx m1 m2 bStrict =
- case Termination.get_descent D (nth cs cidx) m1 m2
- of SOME (Termination.Less thm) =>
+ (case Termination.get_descent D (nth cs cidx) m1 m2 of
+ SOME (Termination.Less thm) =>
if bStrict then thm
else (thm COMP (Thm.lift_rule (cprop_of thm) @{thm less_imp_le}))
- | SOME (Termination.LessEq (thm, _)) =>
+ | SOME (Termination.LessEq (thm, _)) =>
if not bStrict then thm
else raise Fail "get_desc_thm"
- | _ => raise Fail "get_desc_thm"
+ | _ => raise Fail "get_desc_thm")
val (label, lev, sl, covering) = certificate
@@ -184,7 +179,8 @@
(not tag_flag)
|> Conv.fconv_rule (Thm.beta_conversion true)
- val rule = if strict
+ val rule =
+ if strict
then if b < a then @{thm pair_lessI2} else @{thm pair_lessI1}
else if b <= a then @{thm pair_leqI2} else @{thm pair_leqI1}
in
@@ -193,72 +189,72 @@
end
fun steps_tac MAX strict lq lp =
- let
- val (empty, step) = ord_intros_max strict
- in
- if length lq = 0
- then rtac empty 1 THEN set_finite_tac 1
- THEN (if strict then set_nonempty_tac 1 else all_tac)
- else
let
- val (j, b) :: rest = lq
- val (i, a) = the (covering g strict j)
- fun choose xs = set_member_tac (Library.find_index (curry op = (i, a)) xs) 1
- val solve_tac = choose lp THEN less_proof strict (j, b) (i, a)
+ val (empty, step) = ord_intros_max strict
in
- rtac step 1 THEN solve_tac THEN steps_tac MAX strict rest lp
+ if length lq = 0
+ then rtac empty 1 THEN set_finite_tac 1
+ THEN (if strict then set_nonempty_tac 1 else all_tac)
+ else
+ let
+ val (j, b) :: rest = lq
+ val (i, a) = the (covering g strict j)
+ fun choose xs = set_member_tac (Library.find_index (curry op = (i, a)) xs) 1
+ val solve_tac = choose lp THEN less_proof strict (j, b) (i, a)
+ in
+ rtac step 1 THEN solve_tac THEN steps_tac MAX strict rest lp
+ end
end
- end
| steps_tac MIN strict lq lp =
- let
- val (empty, step) = ord_intros_min strict
- in
- if length lp = 0
- then rtac empty 1
- THEN (if strict then set_nonempty_tac 1 else all_tac)
- else
let
- val (i, a) :: rest = lp
- val (j, b) = the (covering g strict i)
- fun choose xs = set_member_tac (Library.find_index (curry op = (j, b)) xs) 1
- val solve_tac = choose lq THEN less_proof strict (j, b) (i, a)
+ val (empty, step) = ord_intros_min strict
in
- rtac step 1 THEN solve_tac THEN steps_tac MIN strict lq rest
+ if length lp = 0
+ then rtac empty 1
+ THEN (if strict then set_nonempty_tac 1 else all_tac)
+ else
+ let
+ val (i, a) :: rest = lp
+ val (j, b) = the (covering g strict i)
+ fun choose xs = set_member_tac (Library.find_index (curry op = (j, b)) xs) 1
+ val solve_tac = choose lq THEN less_proof strict (j, b) (i, a)
+ in
+ rtac step 1 THEN solve_tac THEN steps_tac MIN strict lq rest
+ end
end
- end
| steps_tac MS strict lq lp =
- let
- fun get_str_cover (j, b) =
- if is_some (covering g true j) then SOME (j, b) else NONE
- fun get_wk_cover (j, b) = the (covering g false j)
+ let
+ fun get_str_cover (j, b) =
+ if is_some (covering g true j) then SOME (j, b) else NONE
+ fun get_wk_cover (j, b) = the (covering g false j)
- val qs = subtract (op =) (map_filter get_str_cover lq) lq
- val ps = map get_wk_cover qs
+ val qs = subtract (op =) (map_filter get_str_cover lq) lq
+ val ps = map get_wk_cover qs
- fun indices xs ys = map (fn y => Library.find_index (curry op = y) xs) ys
- val iqs = indices lq qs
- val ips = indices lp ps
+ fun indices xs ys = map (fn y => Library.find_index (curry op = y) xs) ys
+ val iqs = indices lq qs
+ val ips = indices lp ps
- local open Conv in
- fun t_conv a C =
- params_conv ~1 (K ((concl_conv ~1 o arg_conv o arg1_conv o a) C)) ctxt
- val goal_rewrite =
- t_conv arg1_conv (mset_regroup_conv iqs)
- then_conv t_conv arg_conv (mset_regroup_conv ips)
- end
- in
- CONVERSION goal_rewrite 1
- THEN (if strict then rtac smsI' 1
- else if qs = lq then rtac wmsI2'' 1
- else rtac wmsI1 1)
- THEN mset_pwleq_tac 1
- THEN EVERY (map2 (less_proof false) qs ps)
- THEN (if strict orelse qs <> lq
- then Local_Defs.unfold_tac ctxt set_of_simps
- THEN steps_tac MAX true
- (subtract (op =) qs lq) (subtract (op =) ps lp)
- else all_tac)
- end
+ local open Conv in
+ fun t_conv a C =
+ params_conv ~1 (K ((concl_conv ~1 o arg_conv o arg1_conv o a) C)) ctxt
+ val goal_rewrite =
+ t_conv arg1_conv (mset_regroup_conv iqs)
+ then_conv t_conv arg_conv (mset_regroup_conv ips)
+ end
+ in
+ CONVERSION goal_rewrite 1
+ THEN (if strict then rtac smsI' 1
+ else if qs = lq then rtac wmsI2'' 1
+ else rtac wmsI1 1)
+ THEN mset_pwleq_tac 1
+ THEN EVERY (map2 (less_proof false) qs ps)
+ THEN (if strict orelse qs <> lq
+ then Local_Defs.unfold_tac ctxt set_of_simps
+ THEN steps_tac MAX true
+ (subtract (op =) qs lq) (subtract (op =) ps lp)
+ else all_tac)
+ end
in
rem_inv_img ctxt
THEN steps_tac label strict (nth lev q) (nth lev p)
@@ -270,8 +266,8 @@
HOLogic.pair_const natT natT $
(measure_fn p i $ Bound 0) $ HOLogic.mk_number natT tag
- fun pt_lev (p, lm) = Abs ("x", Termination.get_types D p,
- mk_set nat_pairT (map (tag_pair p) lm))
+ fun pt_lev (p, lm) =
+ Abs ("x", Termination.get_types D p, mk_set nat_pairT (map (tag_pair p) lm))
val level_mapping =
map_index pt_lev lev
@@ -295,36 +291,32 @@
fun single_scnp_tac use_tags orders ctxt D = Termination.CALLS (fn (cs, i) =>
let
val ms_configured = is_some (Multiset_Setup.get (Proof_Context.theory_of ctxt))
- val orders' = if ms_configured then orders
- else filter_out (curry op = MS) orders
+ val orders' =
+ if ms_configured then orders
+ else filter_out (curry op = MS) orders
val gp = gen_probl D cs
val certificate = generate_certificate use_tags orders' gp
in
- case certificate
- of NONE => no_tac
- | SOME cert =>
- SELECT_GOAL (reconstruct_tac ctxt D cs gp cert) i
- THEN TRY (rtac @{thm wf_empty} i)
+ (case certificate of
+ NONE => no_tac
+ | SOME cert =>
+ SELECT_GOAL (reconstruct_tac ctxt D cs gp cert) i
+ THEN TRY (rtac @{thm wf_empty} i))
end)
-local open Termination in
-
fun gen_decomp_scnp_tac orders autom_tac ctxt =
-TERMINATION ctxt autom_tac (fn D =>
- let
- val decompose = decompose_tac D
- val scnp_full = single_scnp_tac true orders ctxt D
- in
- REPEAT_ALL_NEW (scnp_full ORELSE' decompose)
- end)
-end
+ Termination.TERMINATION ctxt autom_tac (fn D =>
+ let
+ val decompose = Termination.decompose_tac D
+ val scnp_full = single_scnp_tac true orders ctxt D
+ in
+ REPEAT_ALL_NEW (scnp_full ORELSE' decompose)
+ end)
fun gen_sizechange_tac orders autom_tac ctxt =
TRY (Function_Common.apply_termination_rule ctxt 1)
THEN TRY (Termination.wf_union_tac ctxt)
- THEN
- (rtac @{thm wf_empty} 1
- ORELSE gen_decomp_scnp_tac orders autom_tac ctxt 1)
+ THEN (rtac @{thm wf_empty} 1 ORELSE gen_decomp_scnp_tac orders autom_tac ctxt 1)
fun sizechange_tac ctxt autom_tac =
gen_sizechange_tac [MAX, MS, MIN] autom_tac ctxt
@@ -347,9 +339,11 @@
(Args.$$$ "ms" >> K MS))
|| Scan.succeed [MAX, MS, MIN]
-val setup = Method.setup @{binding size_change}
- (Scan.lift orders --| Method.sections clasimp_modifiers >>
- (fn orders => SIMPLE_METHOD o decomp_scnp_tac orders))
- "termination prover with graph decomposition and the NP subset of size change termination"
+val _ =
+ Theory.setup
+ (Method.setup @{binding size_change}
+ (Scan.lift orders --| Method.sections clasimp_modifiers >>
+ (fn orders => SIMPLE_METHOD o decomp_scnp_tac orders))
+ "termination prover with graph decomposition and the NP subset of size change termination")
end
--- a/src/HOL/Tools/Lifting/lifting_info.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Lifting/lifting_info.ML Wed Oct 29 19:23:32 2014 +0100
@@ -42,8 +42,6 @@
val get_relator_distr_data : Proof.context -> relator_distr_data Symtab.table
val get_restore_data : Proof.context -> restore_data Symtab.table
val get_no_code_types : Proof.context -> unit Symtab.table
-
- val setup: theory -> theory
end
structure Lifting_Info: LIFTING_INFO =
@@ -194,9 +192,10 @@
Data.map (map_quot_maps (Symtab.update (relatorT_name, minfo))) ctxt
end
-val quot_map_attribute_setup =
- Attrib.setup @{binding quot_map} (Scan.succeed (Thm.declaration_attribute add_quot_map))
- "declaration of the Quotient map theorem"
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding quot_map} (Scan.succeed (Thm.declaration_attribute add_quot_map))
+ "declaration of the Quotient map theorem")
fun print_quot_maps ctxt =
let
@@ -255,9 +254,10 @@
|> Pretty.writeln
end
-val quot_del_attribute_setup =
- Attrib.setup @{binding quot_del} (Scan.succeed (Thm.declaration_attribute delete_quotients))
- "deletes the Quotient theorem"
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding quot_del} (Scan.succeed (Thm.declaration_attribute delete_quotients))
+ "deletes the Quotient theorem")
(* data for restoring Transfer/Lifting context *)
@@ -498,15 +498,16 @@
val lookup_relator_distr_data = Symtab.lookup o get_relator_distr_data
-val relator_distr_attribute_setup =
- Attrib.setup @{binding relator_mono} (Scan.succeed (Thm.declaration_attribute add_mono_rule))
- "declaration of relator's monoticity"
- #> Attrib.setup @{binding relator_distr} (Scan.succeed (Thm.declaration_attribute add_distr_rule))
- "declaration of relator's distributivity over OO"
- #> Global_Theory.add_thms_dynamic
- (@{binding relator_distr_raw}, get_distr_rules_raw)
- #> Global_Theory.add_thms_dynamic
- (@{binding relator_mono_raw}, get_mono_rules_raw)
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding relator_mono} (Scan.succeed (Thm.declaration_attribute add_mono_rule))
+ "declaration of relator's monoticity"
+ #> Attrib.setup @{binding relator_distr} (Scan.succeed (Thm.declaration_attribute add_distr_rule))
+ "declaration of relator's distributivity over OO"
+ #> Global_Theory.add_thms_dynamic
+ (@{binding relator_distr_raw}, get_distr_rules_raw)
+ #> Global_Theory.add_thms_dynamic
+ (@{binding relator_mono_raw}, get_mono_rules_raw))
(* no_code types *)
@@ -515,13 +516,6 @@
fun is_no_code_type ctxt type_name = (Symtab.defined o get_no_code_types) ctxt type_name
-(* theory setup *)
-
-val setup =
- quot_map_attribute_setup
- #> quot_del_attribute_setup
- #> relator_distr_attribute_setup
-
(* setup fixed eq_onp rules *)
val _ = Context.>>
--- a/src/HOL/Tools/Meson/meson_tactic.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Meson/meson_tactic.ML Wed Oct 29 19:23:32 2014 +0100
@@ -8,21 +8,19 @@
signature MESON_TACTIC =
sig
val meson_general_tac : Proof.context -> thm list -> int -> tactic
- val setup: theory -> theory
end;
structure Meson_Tactic : MESON_TACTIC =
struct
-open Meson_Clausify
-
fun meson_general_tac ctxt ths =
let val ctxt' = put_claset HOL_cs ctxt
- in Meson.meson_tac ctxt' (maps (snd o cnf_axiom ctxt' false true 0) ths) end
+ in Meson.meson_tac ctxt' (maps (snd o Meson_Clausify.cnf_axiom ctxt' false true 0) ths) end
-val setup =
- Method.setup @{binding meson} (Attrib.thms >> (fn ths => fn ctxt =>
- SIMPLE_METHOD' (CHANGED_PROP o meson_general_tac ctxt ths)))
- "MESON resolution proof procedure"
+val _ =
+ Theory.setup
+ (Method.setup @{binding meson} (Attrib.thms >> (fn ths => fn ctxt =>
+ SIMPLE_METHOD' (CHANGED_PROP o meson_general_tac ctxt ths)))
+ "MESON resolution proof procedure")
end;
--- a/src/HOL/Tools/Metis/metis_tactic.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Metis/metis_tactic.ML Wed Oct 29 19:23:32 2014 +0100
@@ -18,7 +18,6 @@
val metis_tac : string list -> string -> Proof.context -> thm list -> int -> tactic
val metis_lam_transs : string list
val parse_metis_options : (string list option * string option) parser
- val setup : theory -> theory
end
structure Metis_Tactic : METIS_TACTIC =
@@ -297,9 +296,10 @@
|> (case s' of SOME s' => consider_opt s' | _ => I)))
(NONE, NONE)
-val setup =
- Method.setup @{binding metis}
- (Scan.lift parse_metis_options -- Attrib.thms >> (METHOD oo metis_method))
- "Metis for FOL and HOL problems"
+val _ =
+ Theory.setup
+ (Method.setup @{binding metis}
+ (Scan.lift parse_metis_options -- Attrib.thms >> (METHOD oo metis_method))
+ "Metis for FOL and HOL problems")
end;
--- a/src/HOL/Tools/Predicate_Compile/core_data.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Predicate_Compile/core_data.ML Wed Oct 29 19:23:32 2014 +0100
@@ -27,7 +27,7 @@
needs_random : mode list
};
- structure PredData : THEORY_DATA
+ structure PredData : THEORY_DATA (* FIXME keep data private *)
(* queries *)
val defined_functions : compilation -> Proof.context -> string -> bool
--- a/src/HOL/Tools/Predicate_Compile/predicate_compile.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Predicate_Compile/predicate_compile.ML Wed Oct 29 19:23:32 2014 +0100
@@ -7,7 +7,6 @@
signature PREDICATE_COMPILE =
sig
- val setup : theory -> theory
val preprocess : Predicate_Compile_Aux.options -> term -> theory -> theory
val present_graph : bool Unsynchronized.ref
val intro_hook : (theory -> thm -> unit) option Unsynchronized.ref
@@ -213,8 +212,6 @@
else Predicate_Compile_Core.code_pred_cmd options raw_const lthy
end
-val setup = Predicate_Compile_Core.setup
-
(* Parser for mode annotations *)
--- a/src/HOL/Tools/Predicate_Compile/predicate_compile_core.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Predicate_Compile/predicate_compile_core.ML Wed Oct 29 19:23:32 2014 +0100
@@ -12,7 +12,6 @@
type compilation = Predicate_Compile_Aux.compilation
type compilation_funs = Predicate_Compile_Aux.compilation_funs
- val setup : theory -> theory
val code_pred : options -> string -> Proof.context -> Proof.state
val code_pred_cmd : options -> string -> Proof.context -> Proof.state
val values_cmd : string list -> mode option list option ->
@@ -1615,10 +1614,11 @@
val values_timeout =
Attrib.setup_config_real @{binding values_timeout} (K default_values_timeout)
-val setup =
- PredData.put (Graph.empty) #>
- Attrib.setup @{binding code_pred_intro} (Scan.lift (Scan.option Args.name) >> attrib' add_intro)
- "adding alternative introduction rules for code generation of inductive predicates"
+val _ =
+ Theory.setup
+ (PredData.put (Graph.empty) #>
+ Attrib.setup @{binding code_pred_intro} (Scan.lift (Scan.option Args.name) >> attrib' add_intro)
+ "adding alternative introduction rules for code generation of inductive predicates")
fun qualified_binding a =
Binding.qualify true (Long_Name.qualifier a) (Binding.name (Long_Name.base_name a));
--- a/src/HOL/Tools/Predicate_Compile/predicate_compile_quickcheck.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Predicate_Compile/predicate_compile_quickcheck.ML Wed Oct 29 19:23:32 2014 +0100
@@ -31,7 +31,6 @@
val nrandom : int Unsynchronized.ref
val debug : bool Unsynchronized.ref
val no_higher_order_predicate : string list Unsynchronized.ref
- val setup : theory -> theory
end;
structure Predicate_Compile_Quickcheck : PREDICATE_COMPILE_QUICKCHECK =
@@ -423,11 +422,12 @@
val smart_slow_exhaustive_active =
Attrib.setup_config_bool @{binding quickcheck_slow_smart_exhaustive_active} (K false)
-val setup =
- Exhaustive_Generators.setup_exhaustive_datatype_interpretation
- #> Context.theory_map (Quickcheck.add_tester ("smart_exhaustive",
- (smart_exhaustive_active, test_goals (Predicate_Compile_Aux.Pos_Generator_CPS, false))))
- #> Context.theory_map (Quickcheck.add_tester ("smart_slow_exhaustive",
- (smart_slow_exhaustive_active, test_goals (Predicate_Compile_Aux.Pos_Generator_DSeq, false))))
+val _ =
+ Theory.setup
+ (Exhaustive_Generators.setup_exhaustive_datatype_interpretation #>
+ Context.theory_map (Quickcheck.add_tester ("smart_exhaustive",
+ (smart_exhaustive_active, test_goals (Predicate_Compile_Aux.Pos_Generator_CPS, false)))) #>
+ Context.theory_map (Quickcheck.add_tester ("smart_slow_exhaustive",
+ (smart_slow_exhaustive_active, test_goals (Predicate_Compile_Aux.Pos_Generator_DSeq, false)))))
end
--- a/src/HOL/Tools/Qelim/cooper.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Qelim/cooper.ML Wed Oct 29 19:23:32 2014 +0100
@@ -13,7 +13,6 @@
exception COOPER of string
val conv: Proof.context -> conv
val tac: bool -> thm list -> thm list -> Proof.context -> int -> tactic
- val setup: theory -> theory
end;
structure Cooper: COOPER =
@@ -835,6 +834,7 @@
[@{thm "split_zdiv"}, @{thm "split_zmod"}, @{thm "split_div'"},
@{thm "split_min"}, @{thm "split_max"}, @{thm "abs_split"}])
in
+
fun nat_to_int_tac ctxt =
simp_tac (put_simpset ss1 ctxt) THEN_ALL_NEW
simp_tac (put_simpset ss2 ctxt) THEN_ALL_NEW
@@ -842,16 +842,17 @@
fun div_mod_tac ctxt = simp_tac (put_simpset div_mod_ss ctxt);
fun splits_tac ctxt = simp_tac (put_simpset splits_ss ctxt);
+
end;
fun core_tac ctxt = CSUBGOAL (fn (p, i) =>
let
- val cpth =
+ val cpth =
if Config.get ctxt quick_and_dirty
then oracle (ctxt, Envir.beta_norm (Envir.eta_long [] (term_of (Thm.dest_arg p))))
else Conv.arg_conv (conv ctxt) p
- val p' = Thm.rhs_of cpth
- val th = Thm.implies_intr p' (Thm.equal_elim (Thm.symmetric cpth) (Thm.assume p'))
+ val p' = Thm.rhs_of cpth
+ val th = Thm.implies_intr p' (Thm.equal_elim (Thm.symmetric cpth) (Thm.assume p'))
in rtac th i end
handle COOPER _ => no_tac);
@@ -881,7 +882,7 @@
end 1);
-(* theory setup *)
+(* attribute syntax *)
local
@@ -896,11 +897,12 @@
in
-val setup =
- Attrib.setup @{binding presburger}
- ((Scan.lift (Args.$$$ "del") |-- optional (keyword constsN |-- terms)) >> del ||
- optional (keyword constsN |-- terms) >> add) "data for Cooper's algorithm"
- #> Arith_Data.add_tactic "Presburger arithmetic" (K (tac true [] []));
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding presburger}
+ ((Scan.lift (Args.$$$ "del") |-- optional (keyword constsN |-- terms)) >> del ||
+ optional (keyword constsN |-- terms) >> add) "data for Cooper's algorithm"
+ #> Arith_Data.add_tactic "Presburger arithmetic" (K (tac true [] [])));
end;
--- a/src/HOL/Tools/Quickcheck/exhaustive_generators.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Quickcheck/exhaustive_generators.ML Wed Oct 29 19:23:32 2014 +0100
@@ -21,7 +21,6 @@
val quickcheck_pretty : bool Config.T
val setup_exhaustive_datatype_interpretation : theory -> theory
val setup_bounded_forall_datatype_interpretation : theory -> theory
- val setup: theory -> theory
val instantiate_full_exhaustive_datatype : Old_Datatype_Aux.config -> Old_Datatype_Aux.descr ->
(string * sort) list -> string list -> string -> string list * string list -> typ list * typ list -> theory -> theory
@@ -547,11 +546,12 @@
val active = Attrib.setup_config_bool @{binding quickcheck_exhaustive_active} (K true);
-val setup =
- Quickcheck_Common.datatype_interpretation @{plugin quickcheck_full_exhaustive}
- (@{sort full_exhaustive}, instantiate_full_exhaustive_datatype)
- #> Context.theory_map (Quickcheck.add_tester ("exhaustive", (active, test_goals)))
- #> Context.theory_map (Quickcheck.add_batch_generator ("exhaustive", compile_generator_exprs))
- #> Context.theory_map (Quickcheck.add_batch_validator ("exhaustive", compile_validator_exprs));
+val _ =
+ Theory.setup
+ (Quickcheck_Common.datatype_interpretation @{plugin quickcheck_full_exhaustive}
+ (@{sort full_exhaustive}, instantiate_full_exhaustive_datatype)
+ #> Context.theory_map (Quickcheck.add_tester ("exhaustive", (active, test_goals)))
+ #> Context.theory_map (Quickcheck.add_batch_generator ("exhaustive", compile_generator_exprs))
+ #> Context.theory_map (Quickcheck.add_batch_validator ("exhaustive", compile_validator_exprs)));
end;
--- a/src/HOL/Tools/Quickcheck/narrowing_generators.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Quickcheck/narrowing_generators.ML Wed Oct 29 19:23:32 2014 +0100
@@ -16,7 +16,6 @@
| Empty_Assignment
val put_counterexample: (unit -> (bool * term list) option) -> Proof.context -> Proof.context
val put_existential_counterexample : (unit -> counterexample option) -> Proof.context -> Proof.context
- val setup: theory -> theory
end;
structure Narrowing_Generators : NARROWING_GENERATORS =
@@ -522,11 +521,12 @@
val active = Attrib.setup_config_bool @{binding quickcheck_narrowing_active} (K false);
-val setup =
- Code.datatype_interpretation ensure_partial_term_of
- #> Code.datatype_interpretation ensure_partial_term_of_code
- #> Quickcheck_Common.datatype_interpretation @{plugin quickcheck_narrowing}
- (@{sort narrowing}, instantiate_narrowing_datatype)
- #> Context.theory_map (Quickcheck.add_tester ("narrowing", (active, test_goals)))
+val _ =
+ Theory.setup
+ (Code.datatype_interpretation ensure_partial_term_of
+ #> Code.datatype_interpretation ensure_partial_term_of_code
+ #> Quickcheck_Common.datatype_interpretation @{plugin quickcheck_narrowing}
+ (@{sort narrowing}, instantiate_narrowing_datatype)
+ #> Context.theory_map (Quickcheck.add_tester ("narrowing", (active, test_goals))));
end;
--- a/src/HOL/Tools/Quickcheck/random_generators.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Quickcheck/random_generators.ML Wed Oct 29 19:23:32 2014 +0100
@@ -18,7 +18,6 @@
-> Proof.context -> Proof.context
val instantiate_random_datatype : Old_Datatype_Aux.config -> Old_Datatype_Aux.descr ->
(string * sort) list -> string list -> string -> string list * string list -> typ list * typ list -> theory -> theory
- val setup: theory -> theory
end;
structure Random_Generators : RANDOM_GENERATORS =
@@ -469,9 +468,10 @@
val active = Attrib.setup_config_bool @{binding quickcheck_random_active} (K false);
-val setup =
- Quickcheck_Common.datatype_interpretation @{plugin quickcheck_random}
- (@{sort random}, instantiate_random_datatype)
- #> Context.theory_map (Quickcheck.add_tester ("random", (active, test_goals)));
+val _ =
+ Theory.setup
+ (Quickcheck_Common.datatype_interpretation @{plugin quickcheck_random}
+ (@{sort random}, instantiate_random_datatype) #>
+ Context.theory_map (Quickcheck.add_tester ("random", (active, test_goals))));
end;
--- a/src/HOL/Tools/Transfer/transfer.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/Transfer/transfer.ML Wed Oct 29 19:23:32 2014 +0100
@@ -41,7 +41,6 @@
val transfer_tac: bool -> Proof.context -> int -> tactic
val transfer_prover_tac: Proof.context -> int -> tactic
val gen_frees_tac: (string * typ) list -> Proof.context -> int -> tactic
- val setup: theory -> theory
end
structure Transfer : TRANSFER =
@@ -50,7 +49,7 @@
(** Theory Data **)
val compound_xhs_empty_net = Item_Net.init (Thm.eq_thm_prop o pairself snd) (single o fst);
-val rewr_rules = Item_Net.init Thm.eq_thm_prop (single o fst o HOLogic.dest_eq
+val rewr_rules = Item_Net.init Thm.eq_thm_prop (single o fst o HOLogic.dest_eq
o HOLogic.dest_Trueprop o Thm.concl_of);
type pred_data = {rel_eq_onp: thm}
@@ -165,7 +164,7 @@
| _ => I) o
map_known_frees (Term.add_frees (Thm.concl_of thm)))
-fun del_transfer_thm thm = Data.map
+fun del_transfer_thm thm = Data.map
(map_transfer_raw (Item_Net.remove thm) o
map_compound_lhs
(case HOLogic.dest_Trueprop (Thm.concl_of thm) of
@@ -186,7 +185,7 @@
fun bottom_rewr_conv rewrs = Conv.bottom_conv (K (Conv.try_conv (Conv.rewrs_conv rewrs))) @{context}
fun top_rewr_conv rewrs = Conv.top_conv (K (Conv.try_conv (Conv.rewrs_conv rewrs))) @{context}
-fun transfer_rel_conv conv =
+fun transfer_rel_conv conv =
Conv.concl_conv ~1 (HOLogic.Trueprop_conv (Conv.fun2_conv (Conv.arg_conv conv)))
val Rel_rule = Thm.symmetric @{thm Rel_def}
@@ -258,7 +257,7 @@
(rel, fn rel' =>
Logic.list_implies (prems, HOLogic.mk_Trueprop (rel' $ x $ y)))
end
- val contracted_eq_thm =
+ val contracted_eq_thm =
Conv.fconv_rule (transfer_rel_conv (bottom_rewr_conv (get_relator_eq ctxt))) thm
handle CTERM _ => thm
in
@@ -279,13 +278,13 @@
in
(dom, fn dom' => Logic.list_implies (prems, HOLogic.mk_Trueprop (eq $ dom' $ y)))
end
- fun transfer_rel_conv conv =
+ fun transfer_rel_conv conv =
Conv.concl_conv ~1 (HOLogic.Trueprop_conv (Conv.arg1_conv (Conv.arg_conv conv)))
- val contracted_eq_thm =
+ val contracted_eq_thm =
Conv.fconv_rule (transfer_rel_conv (bottom_rewr_conv (get_relator_eq ctxt))) thm
in
gen_abstract_equalities ctxt dest contracted_eq_thm
- end
+ end
(** Replacing explicit Domainp predicates with Domainp assumptions **)
@@ -303,13 +302,13 @@
| fold_Domainp f (Abs (_, _, t)) = fold_Domainp f t
| fold_Domainp _ _ = I
-fun subst_terms tab t =
+fun subst_terms tab t =
let
val t' = Termtab.lookup tab t
in
case t' of
SOME t' => t'
- | NONE =>
+ | NONE =>
(case t of
u $ v => (subst_terms tab u) $ (subst_terms tab v)
| Abs (a, T, t) => Abs (a, T, subst_terms tab t)
@@ -384,13 +383,13 @@
(** Adding transfer domain rules **)
-fun prep_transfer_domain_thm ctxt thm =
- (abstract_equalities_domain ctxt o detect_transfer_rules) thm
+fun prep_transfer_domain_thm ctxt thm =
+ (abstract_equalities_domain ctxt o detect_transfer_rules) thm
-fun add_transfer_domain_thm thm ctxt = (add_transfer_thm o
+fun add_transfer_domain_thm thm ctxt = (add_transfer_thm o
prep_transfer_domain_thm (Context.proof_of ctxt)) thm ctxt
-fun del_transfer_domain_thm thm ctxt = (del_transfer_thm o
+fun del_transfer_domain_thm thm ctxt = (del_transfer_thm o
prep_transfer_domain_thm (Context.proof_of ctxt)) thm ctxt
(** Transfer proof method **)
@@ -558,8 +557,8 @@
end
fun retrieve_terms t net = map fst (Item_Net.retrieve net t)
-
-fun matches_list ctxt term =
+
+fun matches_list ctxt term =
is_some o find_first (fn pat => Pattern.matches (Proof_Context.theory_of ctxt) (pat, term))
fun transfer_rule_of_term ctxt equiv t : thm =
@@ -612,12 +611,12 @@
|> Thm.instantiate (map tinst binsts, map inst binsts)
end
-fun eq_rules_tac eq_rules = TRY o REPEAT_ALL_NEW (resolve_tac eq_rules)
+fun eq_rules_tac eq_rules = TRY o REPEAT_ALL_NEW (resolve_tac eq_rules)
THEN_ALL_NEW rtac @{thm is_equality_eq}
fun eq_tac ctxt = eq_rules_tac (get_relator_eq_raw ctxt)
-fun transfer_step_tac ctxt = (REPEAT_ALL_NEW (resolve_tac (get_transfer_raw ctxt))
+fun transfer_step_tac ctxt = (REPEAT_ALL_NEW (resolve_tac (get_transfer_raw ctxt))
THEN_ALL_NEW (DETERM o eq_rules_tac (get_relator_eq_raw ctxt)))
fun transfer_tac equiv ctxt i =
@@ -752,15 +751,15 @@
(* Attribute for transfer rules *)
-fun prep_rule ctxt =
+fun prep_rule ctxt =
abstract_domains_transfer ctxt o abstract_equalities_transfer ctxt o Conv.fconv_rule prep_conv
val transfer_add =
- Thm.declaration_attribute (fn thm => fn ctxt =>
+ Thm.declaration_attribute (fn thm => fn ctxt =>
(add_transfer_thm o prep_rule (Context.proof_of ctxt)) thm ctxt)
val transfer_del =
- Thm.declaration_attribute (fn thm => fn ctxt =>
+ Thm.declaration_attribute (fn thm => fn ctxt =>
(del_transfer_thm o prep_rule (Context.proof_of ctxt)) thm ctxt)
val transfer_attribute =
@@ -794,75 +793,76 @@
fun lookup_pred_data ctxt type_name = Symtab.lookup (get_pred_data ctxt) type_name
|> Option.map (morph_pred_data (Morphism.transfer_morphism (Proof_Context.theory_of ctxt)))
-fun update_pred_data type_name qinfo ctxt =
+fun update_pred_data type_name qinfo ctxt =
Data.map (map_pred_data (Symtab.update (type_name, qinfo))) ctxt
(* Theory setup *)
-val relator_eq_setup =
- let
- val name = @{binding relator_eq}
- fun add_thm thm context = context
- |> Data.map (map_relator_eq (Item_Net.update thm))
- |> Data.map (map_relator_eq_raw
- (Item_Net.update (abstract_equalities_relator_eq (Context.proof_of context) thm)))
- fun del_thm thm context = context
- |> Data.map (map_relator_eq (Item_Net.remove thm))
- |> Data.map (map_relator_eq_raw
- (Item_Net.remove (abstract_equalities_relator_eq (Context.proof_of context) thm)))
- val add = Thm.declaration_attribute add_thm
- val del = Thm.declaration_attribute del_thm
- val text = "declaration of relator equality rule (used by transfer method)"
- val content = Item_Net.content o #relator_eq o Data.get
- in
- Attrib.setup name (Attrib.add_del add del) text
- #> Global_Theory.add_thms_dynamic (name, content)
- end
+val _ =
+ Theory.setup
+ let
+ val name = @{binding relator_eq}
+ fun add_thm thm context = context
+ |> Data.map (map_relator_eq (Item_Net.update thm))
+ |> Data.map (map_relator_eq_raw
+ (Item_Net.update (abstract_equalities_relator_eq (Context.proof_of context) thm)))
+ fun del_thm thm context = context
+ |> Data.map (map_relator_eq (Item_Net.remove thm))
+ |> Data.map (map_relator_eq_raw
+ (Item_Net.remove (abstract_equalities_relator_eq (Context.proof_of context) thm)))
+ val add = Thm.declaration_attribute add_thm
+ val del = Thm.declaration_attribute del_thm
+ val text = "declaration of relator equality rule (used by transfer method)"
+ val content = Item_Net.content o #relator_eq o Data.get
+ in
+ Attrib.setup name (Attrib.add_del add del) text
+ #> Global_Theory.add_thms_dynamic (name, content)
+ end
-val relator_domain_setup =
- let
- val name = @{binding relator_domain}
- fun add_thm thm context =
- let
- val thm = abstract_domains_relator_domain (Context.proof_of context) thm
- in
- context |> Data.map (map_relator_domain (Item_Net.update thm)) |> add_transfer_domain_thm thm
- end
- fun del_thm thm context =
- let
- val thm = abstract_domains_relator_domain (Context.proof_of context) thm
- in
- context |> Data.map (map_relator_domain (Item_Net.remove thm)) |> del_transfer_domain_thm thm
- end
- val add = Thm.declaration_attribute add_thm
- val del = Thm.declaration_attribute del_thm
- val text = "declaration of relator domain rule (used by transfer method)"
- val content = Item_Net.content o #relator_domain o Data.get
- in
- Attrib.setup name (Attrib.add_del add del) text
- #> Global_Theory.add_thms_dynamic (name, content)
- end
+val _ =
+ Theory.setup
+ let
+ val name = @{binding relator_domain}
+ fun add_thm thm context =
+ let
+ val thm = abstract_domains_relator_domain (Context.proof_of context) thm
+ in
+ context |> Data.map (map_relator_domain (Item_Net.update thm)) |> add_transfer_domain_thm thm
+ end
+ fun del_thm thm context =
+ let
+ val thm = abstract_domains_relator_domain (Context.proof_of context) thm
+ in
+ context |> Data.map (map_relator_domain (Item_Net.remove thm)) |> del_transfer_domain_thm thm
+ end
+ val add = Thm.declaration_attribute add_thm
+ val del = Thm.declaration_attribute del_thm
+ val text = "declaration of relator domain rule (used by transfer method)"
+ val content = Item_Net.content o #relator_domain o Data.get
+ in
+ Attrib.setup name (Attrib.add_del add del) text
+ #> Global_Theory.add_thms_dynamic (name, content)
+ end
-val setup =
- relator_eq_setup
- #> relator_domain_setup
- #> Attrib.setup @{binding transfer_rule} transfer_attribute
- "transfer rule for transfer method"
- #> Global_Theory.add_thms_dynamic
- (@{binding transfer_raw}, Item_Net.content o #transfer_raw o Data.get)
- #> Attrib.setup @{binding transfer_domain_rule} transfer_domain_attribute
- "transfer domain rule for transfer method"
- #> Attrib.setup @{binding transferred} transferred_attribute_parser
- "raw theorem transferred to abstract theorem using transfer rules"
- #> Attrib.setup @{binding untransferred} untransferred_attribute_parser
- "abstract theorem transferred to raw theorem using transfer rules"
- #> Global_Theory.add_thms_dynamic
- (@{binding relator_eq_raw}, Item_Net.content o #relator_eq_raw o Data.get)
- #> Method.setup @{binding transfer} (transfer_method true)
- "generic theorem transfer method"
- #> Method.setup @{binding transfer'} (transfer_method false)
- "generic theorem transfer method"
- #> Method.setup @{binding transfer_prover} transfer_prover_method
- "for proving transfer rules"
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding transfer_rule} transfer_attribute
+ "transfer rule for transfer method"
+ #> Global_Theory.add_thms_dynamic
+ (@{binding transfer_raw}, Item_Net.content o #transfer_raw o Data.get)
+ #> Attrib.setup @{binding transfer_domain_rule} transfer_domain_attribute
+ "transfer domain rule for transfer method"
+ #> Attrib.setup @{binding transferred} transferred_attribute_parser
+ "raw theorem transferred to abstract theorem using transfer rules"
+ #> Attrib.setup @{binding untransferred} untransferred_attribute_parser
+ "abstract theorem transferred to raw theorem using transfer rules"
+ #> Global_Theory.add_thms_dynamic
+ (@{binding relator_eq_raw}, Item_Net.content o #relator_eq_raw o Data.get)
+ #> Method.setup @{binding transfer} (transfer_method true)
+ "generic theorem transfer method"
+ #> Method.setup @{binding transfer'} (transfer_method false)
+ "generic theorem transfer method"
+ #> Method.setup @{binding transfer_prover} transfer_prover_method
+ "for proving transfer rules")
end
--- a/src/HOL/Tools/arith_data.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/arith_data.ML Wed Oct 29 19:23:32 2014 +0100
@@ -20,8 +20,6 @@
val prove_conv2: tactic -> (Proof.context -> tactic) -> Proof.context -> term * term -> thm
val simp_all_tac: thm list -> Proof.context -> tactic
val simplify_meta_eq: thm list -> Proof.context -> thm -> thm
-
- val setup: theory -> theory
end;
structure Arith_Data: ARITH_DATA =
@@ -48,14 +46,15 @@
val arith_tac = gen_arith_tac false;
val verbose_arith_tac = gen_arith_tac true;
-val setup =
- Method.setup @{binding arith}
- (Scan.succeed (fn ctxt =>
- METHOD (fn facts =>
- HEADGOAL
- (Method.insert_tac (rev (Named_Theorems.get ctxt @{named_theorems arith}) @ facts)
- THEN' verbose_arith_tac ctxt))))
- "various arithmetic decision procedures";
+val _ =
+ Theory.setup
+ (Method.setup @{binding arith}
+ (Scan.succeed (fn ctxt =>
+ METHOD (fn facts =>
+ HEADGOAL
+ (Method.insert_tac (rev (Named_Theorems.get ctxt @{named_theorems arith}) @ facts)
+ THEN' verbose_arith_tac ctxt))))
+ "various arithmetic decision procedures");
(* some specialized syntactic operations *)
--- a/src/HOL/Tools/coinduction.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/coinduction.ML Wed Oct 29 19:23:32 2014 +0100
@@ -9,7 +9,6 @@
signature COINDUCTION =
sig
val coinduction_tac: Proof.context -> term list -> thm option -> thm list -> cases_tactic
- val setup: theory -> theory
end;
structure Coinduction : COINDUCTION =
@@ -19,11 +18,12 @@
open Ctr_Sugar_General_Tactics
fun filter_in_out _ [] = ([], [])
- | filter_in_out P (x :: xs) = (let
- val (ins, outs) = filter_in_out P xs;
- in
- if P x then (x :: ins, outs) else (ins, x :: outs)
- end);
+ | filter_in_out P (x :: xs) =
+ let
+ val (ins, outs) = filter_in_out P xs;
+ in
+ if P x then (x :: ins, outs) else (ins, x :: outs)
+ end;
fun ALLGOALS_SKIP skip tac st =
let fun doall n = if n = skip then all_tac else tac n THEN doall (n - 1)
@@ -43,7 +43,7 @@
fun coinduction_tac ctxt raw_vars opt_raw_thm prems = HEADGOAL (SUBGOAL_CASES (fn (goal, _, _) =>
let
val lhs_of_eq = HOLogic.dest_Trueprop #> HOLogic.dest_eq #> fst;
- fun find_coinduct t =
+ fun find_coinduct t =
Induct.find_coinductP ctxt t @
(try (Induct.find_coinductT ctxt o fastype_of o lhs_of_eq) t |> the_default [])
val raw_thm =
@@ -102,7 +102,7 @@
val (init, last) = funpow_yield (p + e - 1) HOLogic.conj_elim inv;
val inv_thms = init @ [last];
val eqs = take e inv_thms;
- fun is_local_var t =
+ fun is_local_var t =
member (fn (t, (_, t')) => t aconv (term_of t')) params t;
val (eqs, assms') = filter_in_out (is_local_var o lhs_of_eq o prop_of) eqs;
val assms = assms' @ drop e inv_thms
@@ -146,12 +146,13 @@
in
-val setup =
- Method.setup @{binding coinduction}
- (arbitrary -- Scan.option coinduct_rule >>
- (fn (arbitrary, opt_rule) => fn ctxt => fn facts =>
- Seq.DETERM (coinduction_tac ctxt arbitrary opt_rule facts)))
- "coinduction on types or predicates/sets";
+val _ =
+ Theory.setup
+ (Method.setup @{binding coinduction}
+ (arbitrary -- Scan.option coinduct_rule >>
+ (fn (arbitrary, opt_rule) => fn ctxt => fn facts =>
+ Seq.DETERM (coinduction_tac ctxt arbitrary opt_rule facts)))
+ "coinduction on types or predicates/sets");
end;
--- a/src/HOL/Tools/inductive.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/inductive.ML Wed Oct 29 19:23:32 2014 +0100
@@ -64,7 +64,6 @@
val partition_rules': thm -> (thm * 'a) list -> (string * (thm * 'a) list) list
val unpartition_rules: thm list -> (string * 'a list) list -> 'a list
val infer_intro_vars: thm -> int -> thm list -> term list list
- val setup: theory -> theory
end;
signature INDUCTIVE =
@@ -276,6 +275,11 @@
map_data (fn (infos, monos, equations) =>
(infos, Thm.del_thm (mk_mono (Context.proof_of context) thm) monos, equations)) context);
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding mono} (Attrib.add_del mono_add mono_del)
+ "declaration of monotonicity rule");
+
(* equations *)
@@ -587,19 +591,19 @@
val inductive_cases = gen_inductive_cases Attrib.check_src Syntax.read_prop;
val inductive_cases_i = gen_inductive_cases (K I) Syntax.check_prop;
-
-val ind_cases_setup =
- Method.setup @{binding ind_cases}
- (Scan.lift (Scan.repeat1 Args.name_inner_syntax --
- Scan.optional (Args.$$$ "for" |-- Scan.repeat1 Args.binding) []) >>
- (fn (raw_props, fixes) => fn ctxt =>
- let
- val (_, ctxt') = Variable.add_fixes_binding fixes ctxt;
- val props = Syntax.read_props ctxt' raw_props;
- val ctxt'' = fold Variable.declare_term props ctxt';
- val rules = Proof_Context.export ctxt'' ctxt (map (mk_cases ctxt'') props)
- in Method.erule ctxt 0 rules end))
- "dynamic case analysis on predicates";
+val _ =
+ Theory.setup
+ (Method.setup @{binding ind_cases}
+ (Scan.lift (Scan.repeat1 Args.name_inner_syntax --
+ Scan.optional (Args.$$$ "for" |-- Scan.repeat1 Args.binding) []) >>
+ (fn (raw_props, fixes) => fn ctxt =>
+ let
+ val (_, ctxt') = Variable.add_fixes_binding fixes ctxt;
+ val props = Syntax.read_props ctxt' raw_props;
+ val ctxt'' = fold Variable.declare_term props ctxt';
+ val rules = Proof_Context.export ctxt'' ctxt (map (mk_cases ctxt'') props)
+ in Method.erule ctxt 0 rules end))
+ "dynamic case analysis on predicates");
(* derivation of simplified equation *)
@@ -1142,17 +1146,7 @@
-(** package setup **)
-
-(* setup theory *)
-
-val setup =
- ind_cases_setup #>
- Attrib.setup @{binding mono} (Attrib.add_del mono_add mono_del)
- "declaration of monotonicity rule";
-
-
-(* outer syntax *)
+(** outer syntax **)
fun gen_ind_decl mk_def coind =
Parse.fixes -- Parse.for_fixes --
--- a/src/HOL/Tools/legacy_transfer.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/legacy_transfer.ML Wed Oct 29 19:23:32 2014 +0100
@@ -14,7 +14,6 @@
val add: thm -> bool -> entry -> Context.generic -> Context.generic
val del: thm -> entry -> Context.generic -> Context.generic
val drop: thm -> Context.generic -> Context.generic
- val setup: theory -> theory
end;
structure Legacy_Transfer : LEGACY_TRANSFER =
@@ -112,7 +111,7 @@
val transform = Thm.apply @{cterm "Trueprop"} o Thm.apply D;
val T = Thm.typ_of (Thm.ctyp_of_term a);
val (aT, bT) = (Term.range_type T, Term.domain_type T);
-
+
(* determine variables to transfer *)
val ctxt3 = ctxt2
|> Variable.declare_thm thm
@@ -168,7 +167,7 @@
cong = union Thm.eq_thm cong1 cong2, hints = union (op =) hints1 hints2 }
end;
-fun diminish_entry
+fun diminish_entry
{ inj = inj0, embed = embed0, return = return0, cong = cong0, hints = hints0 }
{ inj = inj2, embed = embed2, return = return2, cong = cong2, hints = hints2 } =
{ inj = subtract Thm.eq_thm inj0 inj2, embed = subtract Thm.eq_thm embed0 embed2,
@@ -243,28 +242,23 @@
in
-val transfer_attribute = keyword delN >> K (Thm.declaration_attribute drop)
- || keyword addN |-- Scan.optional mode true -- entry_pair
- >> (fn (guess, (entry_add, entry_del)) => Thm.declaration_attribute
- (fn thm => add thm guess entry_add #> del thm entry_del))
- || keyword_colon keyN |-- Attrib.thm
- >> (fn key => Thm.declaration_attribute
- (fn thm => add key false
- { inj = [], embed = [], return = [thm], cong = [], hints = [] }));
-
-val transferred_attribute = selection -- these (keyword_colon leavingN |-- names)
- >> (fn (selection, leave) => Thm.rule_attribute (fn context =>
- Conjunction.intr_balanced o transfer context selection leave));
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding transfer}
+ (keyword delN >> K (Thm.declaration_attribute drop)
+ || keyword addN |-- Scan.optional mode true -- entry_pair
+ >> (fn (guess, (entry_add, entry_del)) =>
+ Thm.declaration_attribute (fn thm => add thm guess entry_add #> del thm entry_del))
+ || keyword_colon keyN |-- Attrib.thm
+ >> (fn key => Thm.declaration_attribute (fn thm =>
+ add key false { inj = [], embed = [], return = [thm], cong = [], hints = [] })))
+ "install transfer data" #>
+ Attrib.setup @{binding transferred}
+ (selection -- these (keyword_colon leavingN |-- names)
+ >> (fn (selection, leave) => Thm.rule_attribute (fn context =>
+ Conjunction.intr_balanced o transfer context selection leave)))
+ "transfer theorems");
end;
-
-(* theory setup *)
-
-val setup =
- Attrib.setup @{binding transfer} transfer_attribute
- "Installs transfer data" #>
- Attrib.setup @{binding transferred} transferred_attribute
- "Transfers theorems";
-
end;
--- a/src/HOL/Tools/recdef.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/recdef.ML Wed Oct 29 19:23:32 2014 +0100
@@ -27,7 +27,6 @@
local_theory -> Proof.state
val recdef_tc_i: bstring * Token.src list -> string -> int option -> bool ->
local_theory -> Proof.state
- val setup: theory -> theory
end;
structure Recdef: RECDEF =
@@ -278,13 +277,14 @@
(* setup theory *)
-val setup =
- Attrib.setup @{binding recdef_simp} (Attrib.add_del simp_add simp_del)
- "declaration of recdef simp rule" #>
- Attrib.setup @{binding recdef_cong} (Attrib.add_del cong_add cong_del)
- "declaration of recdef cong rule" #>
- Attrib.setup @{binding recdef_wf} (Attrib.add_del wf_add wf_del)
- "declaration of recdef wf rule";
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding recdef_simp} (Attrib.add_del simp_add simp_del)
+ "declaration of recdef simp rule" #>
+ Attrib.setup @{binding recdef_cong} (Attrib.add_del cong_add cong_del)
+ "declaration of recdef cong rule" #>
+ Attrib.setup @{binding recdef_wf} (Attrib.add_del wf_add wf_del)
+ "declaration of recdef wf rule");
(* outer syntax *)
--- a/src/HOL/Tools/semiring_normalizer.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/semiring_normalizer.ML Wed Oct 29 19:23:32 2014 +0100
@@ -41,8 +41,6 @@
main: Proof.context -> conv,
pow: Proof.context -> conv,
sub: Proof.context -> conv}
-
- val setup: theory -> theory
end
structure Semiring_Normalizer: SEMIRING_NORMALIZER =
@@ -912,20 +910,21 @@
in
-val setup =
- Attrib.setup @{binding normalizer}
- (Scan.lift (Args.$$$ delN >> K del) ||
- ((keyword2 semiringN opsN |-- terms) --
- (keyword2 semiringN rulesN |-- thms)) --
- (optional (keyword2 ringN opsN |-- terms) --
- optional (keyword2 ringN rulesN |-- thms)) --
- (optional (keyword2 fieldN opsN |-- terms) --
- optional (keyword2 fieldN rulesN |-- thms)) --
- optional (keyword2 idomN rulesN |-- thms) --
- optional (keyword2 idealN rulesN |-- thms)
- >> (fn ((((sr, r), f), id), idl) =>
- add {semiring = sr, ring = r, field = f, idom = id, ideal = idl}))
- "semiring normalizer data";
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding normalizer}
+ (Scan.lift (Args.$$$ delN >> K del) ||
+ ((keyword2 semiringN opsN |-- terms) --
+ (keyword2 semiringN rulesN |-- thms)) --
+ (optional (keyword2 ringN opsN |-- terms) --
+ optional (keyword2 ringN rulesN |-- thms)) --
+ (optional (keyword2 fieldN opsN |-- terms) --
+ optional (keyword2 fieldN rulesN |-- thms)) --
+ optional (keyword2 idomN rulesN |-- thms) --
+ optional (keyword2 idealN rulesN |-- thms)
+ >> (fn ((((sr, r), f), id), idl) =>
+ add {semiring = sr, ring = r, field = f, idom = id, ideal = idl}))
+ "semiring normalizer data");
end;
--- a/src/HOL/Tools/split_rule.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/split_rule.ML Wed Oct 29 19:23:32 2014 +0100
@@ -9,7 +9,6 @@
val split_rule_var: Proof.context -> term -> thm -> thm
val split_rule: Proof.context -> thm -> thm
val complete_split_rule: Proof.context -> thm -> thm
- val setup: theory -> theory
end;
structure Split_Rule: SPLIT_RULE =
@@ -110,14 +109,15 @@
(* attribute syntax *)
-val setup =
- Attrib.setup @{binding split_format}
- (Scan.lift (Args.parens (Args.$$$ "complete")
- >> K (Thm.rule_attribute (complete_split_rule o Context.proof_of))))
- "split pair-typed subterms in premises, or function arguments" #>
- Attrib.setup @{binding split_rule}
- (Scan.succeed (Thm.rule_attribute (split_rule o Context.proof_of)))
- "curries ALL function variables occurring in a rule's conclusion";
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding split_format}
+ (Scan.lift (Args.parens (Args.$$$ "complete")
+ >> K (Thm.rule_attribute (complete_split_rule o Context.proof_of))))
+ "split pair-typed subterms in premises, or function arguments" #>
+ Attrib.setup @{binding split_rule}
+ (Scan.succeed (Thm.rule_attribute (split_rule o Context.proof_of)))
+ "curries ALL function variables occurring in a rule's conclusion");
end;
--- a/src/HOL/Tools/string_syntax.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Tools/string_syntax.ML Wed Oct 29 19:23:32 2014 +0100
@@ -4,15 +4,9 @@
Concrete syntax for hex chars and strings.
*)
-signature STRING_SYNTAX =
-sig
- val setup: theory -> theory
-end;
-
-structure String_Syntax: STRING_SYNTAX =
+structure String_Syntax: sig end =
struct
-
(* nibble *)
val mk_nib =
@@ -91,12 +85,13 @@
(* theory setup *)
-val setup =
- Sign.parse_ast_translation
- [(@{syntax_const "_Char"}, K char_ast_tr),
- (@{syntax_const "_String"}, K string_ast_tr)] #>
- Sign.print_ast_translation
- [(@{const_syntax Char}, K char_ast_tr'),
- (@{syntax_const "_list"}, K list_ast_tr')];
+val _ =
+ Theory.setup
+ (Sign.parse_ast_translation
+ [(@{syntax_const "_Char"}, K char_ast_tr),
+ (@{syntax_const "_String"}, K string_ast_tr)] #>
+ Sign.print_ast_translation
+ [(@{const_syntax Char}, K char_ast_tr'),
+ (@{syntax_const "_list"}, K list_ast_tr')]);
end;
--- a/src/HOL/Transfer.thy Wed Oct 29 12:01:39 2014 +0100
+++ b/src/HOL/Transfer.thy Wed Oct 29 19:23:32 2014 +0100
@@ -249,7 +249,6 @@
by auto
ML_file "Tools/Transfer/transfer.ML"
-setup Transfer.setup
declare refl [transfer_rule]
hide_const (open) Rel
--- a/src/Provers/blast.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Provers/blast.ML Wed Oct 29 19:23:32 2014 +0100
@@ -62,7 +62,6 @@
val trace: bool Config.T
val stats: bool Config.T
val blast_tac: Proof.context -> int -> tactic
- val setup: theory -> theory
(*debugging tools*)
type branch
val tryIt: Proof.context -> int -> string ->
@@ -77,7 +76,7 @@
(* options *)
-val (depth_limit, setup_depth_limit) = Attrib.config_int @{binding blast_depth_limit} (K 20);
+val depth_limit = Attrib.setup_config_int @{binding blast_depth_limit} (K 20);
val (trace, _) = Attrib.config_bool @{binding blast_trace} (K false);
val (stats, _) = Attrib.config_bool @{binding blast_stats} (K false);
@@ -1294,14 +1293,15 @@
in {fullTrace = !fullTrace, result = res} end;
+
(** method setup **)
-val setup =
- setup_depth_limit #>
- Method.setup @{binding blast}
- (Scan.lift (Scan.option Parse.nat) --| Method.sections Classical.cla_modifiers >>
- (fn NONE => SIMPLE_METHOD' o blast_tac
- | SOME lim => (fn ctxt => SIMPLE_METHOD' (depth_tac ctxt lim))))
- "classical tableau prover";
+val _ =
+ Theory.setup
+ (Method.setup @{binding blast}
+ (Scan.lift (Scan.option Parse.nat) --| Method.sections Classical.cla_modifiers >>
+ (fn NONE => SIMPLE_METHOD' o blast_tac
+ | SOME lim => (fn ctxt => SIMPLE_METHOD' (depth_tac ctxt lim))))
+ "classical tableau prover");
end;
--- a/src/Provers/clasimp.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Provers/clasimp.ML Wed Oct 29 19:23:32 2014 +0100
@@ -31,7 +31,6 @@
val iff_del: attribute
val iff_modifiers: Method.modifier parser list
val clasimp_modifiers: Method.modifier parser list
- val clasimp_setup: theory -> theory
end;
functor Clasimp(Data: CLASIMP_DATA): CLASIMP =
@@ -180,10 +179,14 @@
(* attributes *)
-fun iff_att x = (Scan.lift
- (Args.del >> K iff_del ||
- Scan.option Args.add -- Args.query >> K iff_add' ||
- Scan.option Args.add >> K iff_add)) x;
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding iff}
+ (Scan.lift
+ (Args.del >> K iff_del ||
+ Scan.option Args.add -- Args.query >> K iff_add' ||
+ Scan.option Args.add >> K iff_add))
+ "declaration of Simplifier / Classical rules");
(* method modifiers *)
@@ -211,17 +214,14 @@
(fn NONE => SIMPLE_METHOD o CHANGED_PROP o auto_tac
| SOME (m, n) => (fn ctxt => SIMPLE_METHOD (CHANGED_PROP (mk_auto_tac ctxt m n))));
-
-(* theory setup *)
-
-val clasimp_setup =
- Attrib.setup @{binding iff} iff_att "declaration of Simplifier / Classical rules" #>
- Method.setup @{binding fastforce} (clasimp_method' fast_force_tac) "combined fast and simp" #>
- Method.setup @{binding slowsimp} (clasimp_method' slow_simp_tac) "combined slow and simp" #>
- Method.setup @{binding bestsimp} (clasimp_method' best_simp_tac) "combined best and simp" #>
- Method.setup @{binding force} (clasimp_method' force_tac) "force" #>
- Method.setup @{binding auto} auto_method "auto" #>
- Method.setup @{binding clarsimp} (clasimp_method' (CHANGED_PROP oo clarsimp_tac))
- "clarify simplified goal";
+val _ =
+ Theory.setup
+ (Method.setup @{binding fastforce} (clasimp_method' fast_force_tac) "combined fast and simp" #>
+ Method.setup @{binding slowsimp} (clasimp_method' slow_simp_tac) "combined slow and simp" #>
+ Method.setup @{binding bestsimp} (clasimp_method' best_simp_tac) "combined best and simp" #>
+ Method.setup @{binding force} (clasimp_method' force_tac) "force" #>
+ Method.setup @{binding auto} auto_method "auto" #>
+ Method.setup @{binding clarsimp} (clasimp_method' (CHANGED_PROP oo clarsimp_tac))
+ "clarify simplified goal");
end;
--- a/src/Provers/classical.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Provers/classical.ML Wed Oct 29 19:23:32 2014 +0100
@@ -124,7 +124,6 @@
(Proof.context -> tactic) -> (Proof.context -> Proof.method) context_parser
val cla_method':
(Proof.context -> int -> tactic) -> (Proof.context -> Proof.method) context_parser
- val setup: theory -> theory
end;
@@ -882,17 +881,18 @@
val elimN = "elim";
val destN = "dest";
-val setup_attrs =
- Attrib.setup @{binding swapped} (Scan.succeed swapped)
- "classical swap of introduction rule" #>
- Attrib.setup @{binding dest} (Context_Rules.add safe_dest haz_dest Context_Rules.dest_query)
- "declaration of Classical destruction rule" #>
- Attrib.setup @{binding elim} (Context_Rules.add safe_elim haz_elim Context_Rules.elim_query)
- "declaration of Classical elimination rule" #>
- Attrib.setup @{binding intro} (Context_Rules.add safe_intro haz_intro Context_Rules.intro_query)
- "declaration of Classical introduction rule" #>
- Attrib.setup @{binding rule} (Scan.lift Args.del >> K rule_del)
- "remove declaration of intro/elim/dest rule";
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding swapped} (Scan.succeed swapped)
+ "classical swap of introduction rule" #>
+ Attrib.setup @{binding dest} (Context_Rules.add safe_dest haz_dest Context_Rules.dest_query)
+ "declaration of Classical destruction rule" #>
+ Attrib.setup @{binding elim} (Context_Rules.add safe_elim haz_elim Context_Rules.elim_query)
+ "declaration of Classical elimination rule" #>
+ Attrib.setup @{binding intro} (Context_Rules.add safe_intro haz_intro Context_Rules.intro_query)
+ "declaration of Classical introduction rule" #>
+ Attrib.setup @{binding rule} (Scan.lift Args.del >> K rule_del)
+ "remove declaration of intro/elim/dest rule");
@@ -941,46 +941,40 @@
-(** setup_methods **)
+(** method setup **)
-val setup_methods =
- Method.setup @{binding default}
- (Attrib.thms >> (fn rules => fn ctxt => METHOD (default_tac ctxt rules)))
- "apply some intro/elim rule (potentially classical)" #>
- Method.setup @{binding rule}
- (Attrib.thms >> (fn rules => fn ctxt => METHOD (HEADGOAL o rule_tac ctxt rules)))
- "apply some intro/elim rule (potentially classical)" #>
- Method.setup @{binding contradiction}
- (Scan.succeed (fn ctxt => Method.rule ctxt [Data.not_elim, Drule.rotate_prems 1 Data.not_elim]))
- "proof by contradiction" #>
- Method.setup @{binding clarify} (cla_method' (CHANGED_PROP oo clarify_tac))
- "repeatedly apply safe steps" #>
- Method.setup @{binding fast} (cla_method' fast_tac) "classical prover (depth-first)" #>
- Method.setup @{binding slow} (cla_method' slow_tac) "classical prover (slow depth-first)" #>
- Method.setup @{binding best} (cla_method' best_tac) "classical prover (best-first)" #>
- Method.setup @{binding deepen}
- (Scan.lift (Scan.optional Parse.nat 4) --| Method.sections cla_modifiers
- >> (fn n => fn ctxt => SIMPLE_METHOD' (deepen_tac ctxt n)))
- "classical prover (iterative deepening)" #>
- Method.setup @{binding safe} (cla_method (CHANGED_PROP o safe_tac))
- "classical prover (apply safe rules)" #>
- Method.setup @{binding safe_step} (cla_method' safe_step_tac)
- "single classical step (safe rules)" #>
- Method.setup @{binding inst_step} (cla_method' inst_step_tac)
- "single classical step (safe rules, allow instantiations)" #>
- Method.setup @{binding step} (cla_method' step_tac)
- "single classical step (safe and unsafe rules)" #>
- Method.setup @{binding slow_step} (cla_method' slow_step_tac)
- "single classical step (safe and unsafe rules, allow backtracking)" #>
- Method.setup @{binding clarify_step} (cla_method' clarify_step_tac)
- "single classical step (safe rules, without splitting)";
-
-
-
-(** theory setup **)
-
-val setup = setup_attrs #> setup_methods;
-
+val _ =
+ Theory.setup
+ (Method.setup @{binding default}
+ (Attrib.thms >> (fn rules => fn ctxt => METHOD (default_tac ctxt rules)))
+ "apply some intro/elim rule (potentially classical)" #>
+ Method.setup @{binding rule}
+ (Attrib.thms >> (fn rules => fn ctxt => METHOD (HEADGOAL o rule_tac ctxt rules)))
+ "apply some intro/elim rule (potentially classical)" #>
+ Method.setup @{binding contradiction}
+ (Scan.succeed (fn ctxt => Method.rule ctxt [Data.not_elim, Drule.rotate_prems 1 Data.not_elim]))
+ "proof by contradiction" #>
+ Method.setup @{binding clarify} (cla_method' (CHANGED_PROP oo clarify_tac))
+ "repeatedly apply safe steps" #>
+ Method.setup @{binding fast} (cla_method' fast_tac) "classical prover (depth-first)" #>
+ Method.setup @{binding slow} (cla_method' slow_tac) "classical prover (slow depth-first)" #>
+ Method.setup @{binding best} (cla_method' best_tac) "classical prover (best-first)" #>
+ Method.setup @{binding deepen}
+ (Scan.lift (Scan.optional Parse.nat 4) --| Method.sections cla_modifiers
+ >> (fn n => fn ctxt => SIMPLE_METHOD' (deepen_tac ctxt n)))
+ "classical prover (iterative deepening)" #>
+ Method.setup @{binding safe} (cla_method (CHANGED_PROP o safe_tac))
+ "classical prover (apply safe rules)" #>
+ Method.setup @{binding safe_step} (cla_method' safe_step_tac)
+ "single classical step (safe rules)" #>
+ Method.setup @{binding inst_step} (cla_method' inst_step_tac)
+ "single classical step (safe rules, allow instantiations)" #>
+ Method.setup @{binding step} (cla_method' step_tac)
+ "single classical step (safe and unsafe rules)" #>
+ Method.setup @{binding slow_step} (cla_method' slow_step_tac)
+ "single classical step (safe and unsafe rules, allow backtracking)" #>
+ Method.setup @{binding clarify_step} (cla_method' clarify_step_tac)
+ "single classical step (safe rules, without splitting)");
(** outer syntax **)
--- a/src/Provers/hypsubst.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Provers/hypsubst.ML Wed Oct 29 19:23:32 2014 +0100
@@ -52,7 +52,6 @@
val hyp_subst_tac : Proof.context -> int -> tactic
val blast_hyp_subst_tac : bool -> int -> tactic
val stac : thm -> int -> tactic
- val hypsubst_setup : theory -> theory
end;
functor Hypsubst(Data: HYPSUBST_DATA): HYPSUBST =
@@ -120,7 +119,7 @@
handle TERM _ => eq_var_aux (k+1) B (A :: hs)
| Match => eq_var_aux (k+1) B (A :: hs))
| eq_var_aux k _ _ = raise EQ_VAR
-
+
in eq_var_aux 0 t [] end;
val thin_free_eq_tac = SUBGOAL (fn (t, i) => let
@@ -228,11 +227,10 @@
gen_hyp_subst_tac ctxt false, vars_gen_hyp_subst_tac false,
if thin then thin_free_eq_tac else K no_tac];
-val (hyp_subst_thin, hyp_subst_thin_setup) = Attrib.config_bool
- @{binding hypsubst_thin} (K false);
+val hyp_subst_thin = Attrib.setup_config_bool @{binding hypsubst_thin} (K false);
-fun hyp_subst_tac ctxt = hyp_subst_tac_thin
- (Config.get ctxt hyp_subst_thin) ctxt
+fun hyp_subst_tac ctxt =
+ hyp_subst_tac_thin (Config.get ctxt hyp_subst_thin) ctxt;
(*Substitutes for Bound variables only -- this is always safe*)
fun bound_hyp_subst_tac ctxt =
@@ -296,18 +294,18 @@
in CHANGED_GOAL (rtac (th' RS ssubst)) end;
-(* theory setup *)
+(* method setup *)
-val hypsubst_setup =
- Method.setup @{binding hypsubst}
- (Scan.succeed (fn ctxt => SIMPLE_METHOD' (CHANGED_PROP o hyp_subst_tac ctxt)))
- "substitution using an assumption (improper)" #>
- Method.setup @{binding hypsubst_thin}
- (Scan.succeed (fn ctxt => SIMPLE_METHOD'
- (CHANGED_PROP o hyp_subst_tac_thin true ctxt)))
- "substitution using an assumption, eliminating assumptions" #>
- hyp_subst_thin_setup #>
- Method.setup @{binding simplesubst} (Attrib.thm >> (fn th => K (SIMPLE_METHOD' (stac th))))
- "simple substitution";
+val _ =
+ Theory.setup
+ (Method.setup @{binding hypsubst}
+ (Scan.succeed (fn ctxt => SIMPLE_METHOD' (CHANGED_PROP o hyp_subst_tac ctxt)))
+ "substitution using an assumption (improper)" #>
+ Method.setup @{binding hypsubst_thin}
+ (Scan.succeed (fn ctxt => SIMPLE_METHOD'
+ (CHANGED_PROP o hyp_subst_tac_thin true ctxt)))
+ "substitution using an assumption, eliminating assumptions" #>
+ Method.setup @{binding simplesubst} (Attrib.thm >> (fn th => K (SIMPLE_METHOD' (stac th))))
+ "simple substitution");
end;
--- a/src/Provers/splitter.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Provers/splitter.ML Wed Oct 29 19:23:32 2014 +0100
@@ -37,7 +37,6 @@
val split_add: attribute
val split_del: attribute
val split_modifiers : Method.modifier parser list
- val setup: theory -> theory
end;
functor Splitter(Data: SPLITTER_DATA): SPLITTER =
@@ -453,6 +452,11 @@
val split_add = Simplifier.attrib add_split;
val split_del = Simplifier.attrib del_split;
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding split}
+ (Attrib.add_del split_add split_del) "declare case split rule");
+
(* methods *)
@@ -461,14 +465,10 @@
Args.$$$ splitN -- Args.add -- Args.colon >> K (Method.modifier split_add @{here}),
Args.$$$ splitN -- Args.del -- Args.colon >> K (Method.modifier split_del @{here})];
-
-(* theory setup *)
-
-val setup =
- Attrib.setup @{binding split}
- (Attrib.add_del split_add split_del) "declare case split rule" #>
- Method.setup @{binding split}
- (Attrib.thms >> (fn ths => K (SIMPLE_METHOD' (CHANGED_PROP o gen_split_tac ths))))
- "apply case split rule";
+val _ =
+ Theory.setup
+ (Method.setup @{binding split}
+ (Attrib.thms >> (fn ths => K (SIMPLE_METHOD' (CHANGED_PROP o gen_split_tac ths))))
+ "apply case split rule");
end;
--- a/src/Tools/case_product.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Tools/case_product.ML Wed Oct 29 19:23:32 2014 +0100
@@ -12,7 +12,7 @@
sig
val combine: Proof.context -> thm -> thm -> thm
val combine_annotated: Proof.context -> thm -> thm -> thm
- val setup: theory -> theory
+ val annotation: thm -> thm -> attribute
end
structure Case_Product: CASE_PRODUCT =
@@ -104,15 +104,15 @@
(* attribute setup *)
-val case_prod_attr =
- let
- fun combine_list ctxt = fold (fn x => fn y => combine_annotated ctxt y x)
- in
- Attrib.thms >> (fn thms => Thm.rule_attribute (fn ctxt => fn thm =>
- combine_list (Context.proof_of ctxt) thms thm))
- end
-
-val setup =
- Attrib.setup @{binding case_product} case_prod_attr "product with other case rules"
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding case_product}
+ let
+ fun combine_list ctxt = fold (fn x => fn y => combine_annotated ctxt y x)
+ in
+ Attrib.thms >> (fn thms => Thm.rule_attribute (fn ctxt => fn thm =>
+ combine_list (Context.proof_of ctxt) thms thm))
+ end
+ "product with other case rules")
end
--- a/src/Tools/eqsubst.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Tools/eqsubst.ML Wed Oct 29 19:23:32 2014 +0100
@@ -45,8 +45,6 @@
val searchf_lr_unify_all: searchinfo -> term -> match Seq.seq Seq.seq
val searchf_lr_unify_valid: searchinfo -> term -> match Seq.seq Seq.seq
val searchf_bt_unify_valid: searchinfo -> term -> match Seq.seq Seq.seq
-
- val setup : theory -> theory
end;
structure EqSubst: EQSUBST =
@@ -418,12 +416,12 @@
(* combination method that takes a flag (true indicates that subst
should be done to an assumption, false = apply to the conclusion of
the goal) as well as the theorems to use *)
-val setup =
- Method.setup @{binding subst}
- (Scan.lift (Args.mode "asm" -- Scan.optional (Args.parens (Scan.repeat Parse.nat)) [0]) --
- Attrib.thms >>
- (fn ((asm, occs), inthms) => fn ctxt =>
- SIMPLE_METHOD' ((if asm then eqsubst_asm_tac else eqsubst_tac) ctxt occs inthms)))
- "single-step substitution";
+val _ =
+ Theory.setup
+ (Method.setup @{binding subst}
+ (Scan.lift (Args.mode "asm" -- Scan.optional (Args.parens (Scan.repeat Parse.nat)) [0]) --
+ Attrib.thms >> (fn ((asm, occs), inthms) => fn ctxt =>
+ SIMPLE_METHOD' ((if asm then eqsubst_asm_tac else eqsubst_tac) ctxt occs inthms)))
+ "single-step substitution");
end;
--- a/src/Tools/induct.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Tools/induct.ML Wed Oct 29 19:23:32 2014 +0100
@@ -89,7 +89,6 @@
(string * typ) list list -> term option list -> thm list option ->
thm list -> int -> cases_tactic) ->
theory -> theory
- val setup: theory -> theory
end;
functor Induct(Induct_Args: INDUCT_ARGS): INDUCT =
@@ -368,15 +367,16 @@
in
-val attrib_setup =
- Attrib.setup @{binding cases} (attrib cases_type cases_pred cases_del)
- "declaration of cases rule" #>
- Attrib.setup @{binding induct} (attrib induct_type induct_pred induct_del)
- "declaration of induction rule" #>
- Attrib.setup @{binding coinduct} (attrib coinduct_type coinduct_pred coinduct_del)
- "declaration of coinduction rule" #>
- Attrib.setup @{binding induct_simp} (Attrib.add_del induct_simp_add induct_simp_del)
- "declaration of rules for simplifying induction or cases rules";
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding cases} (attrib cases_type cases_pred cases_del)
+ "declaration of cases rule" #>
+ Attrib.setup @{binding induct} (attrib induct_type induct_pred induct_del)
+ "declaration of induction rule" #>
+ Attrib.setup @{binding coinduct} (attrib coinduct_type coinduct_pred coinduct_del)
+ "declaration of coinduction rule" #>
+ Attrib.setup @{binding induct_simp} (Attrib.add_del induct_simp_add induct_simp_del)
+ "declaration of rules for simplifying induction or cases rules");
end;
@@ -923,14 +923,15 @@
in
-val cases_setup =
- Method.setup @{binding cases}
- (Scan.lift (Args.mode no_simpN) --
- (Parse.and_list' (Scan.repeat (unless_more_args inst)) -- Scan.option cases_rule) >>
- (fn (no_simp, (insts, opt_rule)) => fn ctxt =>
- METHOD_CASES (fn facts => Seq.DETERM (HEADGOAL
- (cases_tac ctxt (not no_simp) insts opt_rule facts)))))
- "case analysis on types or predicates/sets";
+val _ =
+ Theory.setup
+ (Method.setup @{binding cases}
+ (Scan.lift (Args.mode no_simpN) --
+ (Parse.and_list' (Scan.repeat (unless_more_args inst)) -- Scan.option cases_rule) >>
+ (fn (no_simp, (insts, opt_rule)) => fn ctxt =>
+ METHOD_CASES (fn facts => Seq.DETERM (HEADGOAL
+ (cases_tac ctxt (not no_simp) insts opt_rule facts)))))
+ "case analysis on types or predicates/sets");
fun gen_induct_setup binding tac =
Method.setup binding
@@ -941,21 +942,16 @@
Seq.DETERM (HEADGOAL (tac ctxt (not no_simp) insts arbitrary taking opt_rule facts))))
"induction on types or predicates/sets";
-val induct_setup = gen_induct_setup @{binding induct} induct_tac;
+val _ = Theory.setup (gen_induct_setup @{binding induct} induct_tac);
-val coinduct_setup =
- Method.setup @{binding coinduct}
- (Scan.repeat (unless_more_args inst) -- taking -- Scan.option coinduct_rule >>
- (fn ((insts, taking), opt_rule) => fn ctxt => fn facts =>
- Seq.DETERM (HEADGOAL (coinduct_tac ctxt insts taking opt_rule facts))))
- "coinduction on types or predicates/sets";
+val _ =
+ Theory.setup
+ (Method.setup @{binding coinduct}
+ (Scan.repeat (unless_more_args inst) -- taking -- Scan.option coinduct_rule >>
+ (fn ((insts, taking), opt_rule) => fn ctxt => fn facts =>
+ Seq.DETERM (HEADGOAL (coinduct_tac ctxt insts taking opt_rule facts))))
+ "coinduction on types or predicates/sets");
end;
-
-
-(** theory setup **)
-
-val setup = attrib_setup #> cases_setup #> induct_setup #> coinduct_setup;
-
end;
--- a/src/Tools/induct_tacs.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Tools/induct_tacs.ML Wed Oct 29 19:23:32 2014 +0100
@@ -10,7 +10,6 @@
val case_rule_tac: Proof.context -> string -> thm -> int -> tactic
val induct_tac: Proof.context -> string option list list -> int -> tactic
val induct_rules_tac: Proof.context -> string option list list -> thm list -> int -> tactic
- val setup: theory -> theory
end
structure Induct_Tacs: INDUCT_TACS =
@@ -128,15 +127,16 @@
in
-val setup =
- Method.setup @{binding case_tac}
- (Args.goal_spec -- Scan.lift Args.name_inner_syntax -- opt_rule >>
- (fn ((quant, s), r) => fn ctxt => SIMPLE_METHOD'' quant (gen_case_tac ctxt s r)))
- "unstructured case analysis on types" #>
- Method.setup @{binding induct_tac}
- (Args.goal_spec -- varss -- opt_rules >>
- (fn ((quant, vs), rs) => fn ctxt => SIMPLE_METHOD'' quant (gen_induct_tac ctxt vs rs)))
- "unstructured induction on types";
+val _ =
+ Theory.setup
+ (Method.setup @{binding case_tac}
+ (Args.goal_spec -- Scan.lift Args.name_inner_syntax -- opt_rule >>
+ (fn ((quant, s), r) => fn ctxt => SIMPLE_METHOD'' quant (gen_case_tac ctxt s r)))
+ "unstructured case analysis on types" #>
+ Method.setup @{binding induct_tac}
+ (Args.goal_spec -- varss -- opt_rules >>
+ (fn ((quant, vs), rs) => fn ctxt => SIMPLE_METHOD'' quant (gen_induct_tac ctxt vs rs)))
+ "unstructured induction on types");
end;
--- a/src/Tools/induction.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Tools/induction.ML Wed Oct 29 19:23:32 2014 +0100
@@ -3,7 +3,6 @@
val induction_tac: Proof.context -> bool -> (binding option * (term * bool)) option list list ->
(string * typ) list list -> term option list -> thm list option ->
thm list -> int -> cases_tactic
- val setup: theory -> theory
end
structure Induction: INDUCTION =
@@ -37,7 +36,7 @@
val induction_tac = Induct.gen_induct_tac (K name_hyps)
-val setup = Induct.gen_induct_setup @{binding induction} induction_tac
+val _ = Theory.setup (Induct.gen_induct_setup @{binding induction} induction_tac)
end
--- a/src/Tools/jEdit/src/token_markup.scala Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Tools/jEdit/src/token_markup.scala Wed Oct 29 19:23:32 2014 +0100
@@ -254,7 +254,7 @@
} yield Text.Info(Text.Range(i - tok.source.length, i), tok)
- /* command span */
+ /* command spans */
def command_span(syntax: Outer_Syntax, buffer: JEditBuffer, offset: Text.Offset)
: Option[Text.Info[Command_Span.Span]] =
@@ -282,6 +282,32 @@
else None
}
+ private def _command_span_iterator(
+ syntax: Outer_Syntax,
+ buffer: JEditBuffer,
+ offset: Text.Offset,
+ next_offset: Text.Range => Text.Offset): Iterator[Text.Info[Command_Span.Span]] =
+ new Iterator[Text.Info[Command_Span.Span]]
+ {
+ private var next_span = command_span(syntax, buffer, offset)
+ def hasNext: Boolean = next_span.isDefined
+ def next: Text.Info[Command_Span.Span] =
+ {
+ val span = next_span.getOrElse(Iterator.empty.next)
+ next_span = command_span(syntax, buffer, next_offset(span.range))
+ span
+ }
+ }
+
+ def command_span_iterator(syntax: Outer_Syntax, buffer: JEditBuffer, offset: Text.Offset)
+ : Iterator[Text.Info[Command_Span.Span]] =
+ _command_span_iterator(syntax, buffer, offset max 0, range => range.stop)
+
+ def command_span_reverse_iterator(syntax: Outer_Syntax, buffer: JEditBuffer, offset: Text.Offset)
+ : Iterator[Text.Info[Command_Span.Span]] =
+ _command_span_iterator(syntax, buffer,
+ (offset min buffer.getLength) - 1, range => range.start - 1)
+
/* token marker */
--- a/src/Tools/subtyping.ML Wed Oct 29 12:01:39 2014 +0100
+++ b/src/Tools/subtyping.ML Wed Oct 29 19:23:32 2014 +0100
@@ -10,7 +10,6 @@
val add_type_map: term -> Context.generic -> Context.generic
val add_coercion: term -> Context.generic -> Context.generic
val print_coercions: Proof.context -> unit
- val setup: theory -> theory
end;
structure Subtyping: SUBTYPING =
@@ -884,9 +883,11 @@
val coercion_enabled = Attrib.setup_config_bool @{binding coercion_enabled} (K false);
-val add_term_check =
- Syntax_Phases.term_check ~100 "coercions"
- (fn ctxt => Config.get ctxt coercion_enabled ? coercion_infer_types ctxt);
+val _ =
+ Theory.setup
+ (Context.theory_map
+ (Syntax_Phases.term_check ~100 "coercions"
+ (fn ctxt => Config.get ctxt coercion_enabled ? coercion_infer_types ctxt)));
(* declarations *)
@@ -1088,26 +1089,26 @@
end |> Pretty.writeln_chunks;
-(* theory setup *)
+(* attribute setup *)
val parse_coerce_args =
Args.$$$ "+" >> K PERMIT || Args.$$$ "-" >> K FORBID || Args.$$$ "0" >> K LEAVE
-val setup =
- Context.theory_map add_term_check #>
- Attrib.setup @{binding coercion}
- (Args.term >> (fn t => Thm.declaration_attribute (K (add_coercion t))))
- "declaration of new coercions" #>
- Attrib.setup @{binding coercion_delete}
- (Args.term >> (fn t => Thm.declaration_attribute (K (delete_coercion t))))
- "deletion of coercions" #>
- Attrib.setup @{binding coercion_map}
- (Args.term >> (fn t => Thm.declaration_attribute (K (add_type_map t))))
- "declaration of new map functions" #>
- Attrib.setup @{binding coercion_args}
- (Args.const {proper = false, strict = false} -- Scan.lift (Scan.repeat1 parse_coerce_args) >>
- (fn spec => Thm.declaration_attribute (K (map_coerce_args (Symtab.update spec)))))
- "declaration of new constants with coercion-invariant arguments";
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding coercion}
+ (Args.term >> (fn t => Thm.declaration_attribute (K (add_coercion t))))
+ "declaration of new coercions" #>
+ Attrib.setup @{binding coercion_delete}
+ (Args.term >> (fn t => Thm.declaration_attribute (K (delete_coercion t))))
+ "deletion of coercions" #>
+ Attrib.setup @{binding coercion_map}
+ (Args.term >> (fn t => Thm.declaration_attribute (K (add_type_map t))))
+ "declaration of new map functions" #>
+ Attrib.setup @{binding coercion_args}
+ (Args.const {proper = false, strict = false} -- Scan.lift (Scan.repeat1 parse_coerce_args) >>
+ (fn spec => Thm.declaration_attribute (K (map_coerce_args (Symtab.update spec)))))
+ "declaration of new constants with coercion-invariant arguments");
(* outer syntax commands *)