--- a/CONTRIBUTORS Sun Jun 09 20:24:16 2019 +0200
+++ b/CONTRIBUTORS Sun Jun 09 22:22:36 2019 +0200
@@ -3,6 +3,10 @@
listed as an author in one of the source files of this Isabelle distribution.
+Contributions to this Isabelle version
+--------------------------------------
+
+
Contributions to Isabelle2019
-----------------------------
--- a/NEWS Sun Jun 09 20:24:16 2019 +0200
+++ b/NEWS Sun Jun 09 22:22:36 2019 +0200
@@ -4,6 +4,10 @@
(Note: Isabelle/jEdit shows a tree-view of the NEWS file in Sidekick.)
+New in this Isabelle version
+----------------------------
+
+
New in Isabelle2019 (June 2019)
-------------------------------
--- a/src/Doc/Functions/Functions.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Doc/Functions/Functions.thy Sun Jun 09 22:22:36 2019 +0200
@@ -347,6 +347,15 @@
method a bit stronger: it can then use multiset orders internally.
\<close>
+subsection \<open>Configuring simplification rules for termination proofs\<close>
+
+text \<open>
+ Since both \<open>lexicographic_order\<close> and \<open>size_change\<close> rely on the simplifier internally,
+ there can sometimes be the need for adding additional simp rules to them.
+ This can be done either as arguments to the methods themselves, or globally via the
+ theorem attribute \<open>termination_simp\<close>, which is useful in rare cases.
+\<close>
+
section \<open>Mutual Recursion\<close>
text \<open>
--- a/src/Doc/Isar_Ref/HOL_Specific.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Doc/Isar_Ref/HOL_Specific.thy Sun Jun 09 22:22:36 2019 +0200
@@ -489,6 +489,7 @@
@{method_def (HOL) relation} & : & \<open>method\<close> \\
@{method_def (HOL) lexicographic_order} & : & \<open>method\<close> \\
@{method_def (HOL) size_change} & : & \<open>method\<close> \\
+ @{attribute_def (HOL) termination_simp} & : & \<open>attribute\<close> \\
@{method_def (HOL) induction_schema} & : & \<open>method\<close> \\
\end{matharray}
@@ -535,6 +536,10 @@
For local descent proofs, the @{syntax clasimpmod} modifiers are accepted
(as for @{method auto}).
+ \<^descr> @{attribute (HOL) termination_simp} declares extra rules for the
+ simplifier, when invoked in termination proofs. This can be useful, e.g.,
+ for special rules involving size estimations.
+
\<^descr> @{method (HOL) induction_schema} derives user-specified induction rules
from well-founded induction and completeness of patterns. This factors out
some operations that are done internally by the function package and makes
--- a/src/Doc/more_antiquote.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Doc/more_antiquote.ML Sun Jun 09 22:22:36 2019 +0200
@@ -20,12 +20,6 @@
(* code theorem antiquotation *)
-fun no_vars ctxt thm =
- let
- val ctxt' = Variable.set_body false ctxt;
- val ((_, [thm]), _) = Variable.import true [thm] ctxt';
- in thm end;
-
val _ =
Theory.setup (Thy_Output.antiquotation_pretty \<^binding>\<open>code_thms\<close> Args.term
(fn ctxt => fn raw_const =>
@@ -38,7 +32,7 @@
|> snd
|> these
|> map_filter (fn (_, (some_thm, proper)) => if proper then some_thm else NONE)
- |> map (HOLogic.mk_obj_eq o no_vars ctxt o Axclass.overload ctxt);
+ |> map (HOLogic.mk_obj_eq o Variable.import_vars ctxt o Axclass.overload ctxt);
in Pretty.chunks (map (Thy_Output.pretty_thm ctxt) thms) end));
end;
--- a/src/HOL/Analysis/Bochner_Integration.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Analysis/Bochner_Integration.thy Sun Jun 09 22:22:36 2019 +0200
@@ -1932,6 +1932,24 @@
integral\<^sup>L M f \<le> integral\<^sup>L M g"
by (intro integral_mono_AE) auto
+lemma integral_norm_bound_integral:
+ fixes f :: "'a::euclidean_space \<Rightarrow> 'b::{banach,second_countable_topology}"
+ assumes "integrable M f" "integrable M g" "\<And>x. x \<in> space M \<Longrightarrow> norm(f x) \<le> g x"
+ shows "norm (\<integral>x. f x \<partial>M) \<le> (\<integral>x. g x \<partial>M)"
+proof -
+ have "norm (\<integral>x. f x \<partial>M) \<le> (\<integral>x. norm (f x) \<partial>M)"
+ by (rule integral_norm_bound)
+ also have "... \<le> (\<integral>x. g x \<partial>M)"
+ using assms integrable_norm integral_mono by blast
+ finally show ?thesis .
+qed
+
+lemma integral_abs_bound_integral:
+ fixes f :: "'a::euclidean_space \<Rightarrow> real"
+ assumes "integrable M f" "integrable M g" "\<And>x. x \<in> space M \<Longrightarrow> \<bar>f x\<bar> \<le> g x"
+ shows "\<bar>\<integral>x. f x \<partial>M\<bar> \<le> (\<integral>x. g x \<partial>M)"
+ by (metis integral_norm_bound_integral assms real_norm_def)
+
text \<open>The next two statements are useful to bound Lebesgue integrals, as they avoid one
integrability assumption. The price to pay is that the upper function has to be nonnegative,
but this is often true and easy to check in computations.\<close>
--- a/src/HOL/Analysis/Equivalence_Lebesgue_Henstock_Integration.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Analysis/Equivalence_Lebesgue_Henstock_Integration.thy Sun Jun 09 22:22:36 2019 +0200
@@ -3119,12 +3119,29 @@
using f by (auto intro: has_integral_implies_lebesgue_measurable simp: integrable_on_def)
qed (use le in \<open>force intro!: always_eventually split: split_indicator\<close>)
+corollary absolutely_integrable_on_const [simp]:
+ fixes c :: "'a::euclidean_space"
+ assumes "S \<in> lmeasurable"
+ shows "(\<lambda>x. c) absolutely_integrable_on S"
+ by (metis (full_types) assms absolutely_integrable_integrable_bound integrable_on_const order_refl)
+
lemma absolutely_integrable_continuous:
fixes f :: "'a::euclidean_space \<Rightarrow> 'b::euclidean_space"
shows "continuous_on (cbox a b) f \<Longrightarrow> f absolutely_integrable_on cbox a b"
using absolutely_integrable_integrable_bound
by (simp add: absolutely_integrable_on_def continuous_on_norm integrable_continuous)
+lemma continous_imp_integrable:
+ fixes f :: "'a::euclidean_space \<Rightarrow> 'b::euclidean_space"
+ assumes "continuous_on (cbox a b) f"
+ shows "integrable (lebesgue_on (cbox a b)) f"
+proof -
+ have "f absolutely_integrable_on cbox a b"
+ by (simp add: absolutely_integrable_continuous assms)
+ then show ?thesis
+ by (simp add: integrable_restrict_space set_integrable_def)
+qed
+
subsection \<open>Componentwise\<close>
--- a/src/HOL/Analysis/Lebesgue_Measure.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Analysis/Lebesgue_Measure.thy Sun Jun 09 22:22:36 2019 +0200
@@ -304,15 +304,10 @@
qed (auto simp: Ioc_inj mono_F)
lemma measure_interval_measure_Ioc:
- assumes "a \<le> b"
- assumes mono_F: "\<And>x y. x \<le> y \<Longrightarrow> F x \<le> F y"
- assumes right_cont_F : "\<And>a. continuous (at_right a) F"
+ assumes "a \<le> b" and "\<And>x y. x \<le> y \<Longrightarrow> F x \<le> F y" and "\<And>a. continuous (at_right a) F"
shows "measure (interval_measure F) {a <.. b} = F b - F a"
unfolding measure_def
- apply (subst emeasure_interval_measure_Ioc)
- apply fact+
- apply (simp add: assms)
- done
+ by (simp add: assms emeasure_interval_measure_Ioc)
lemma emeasure_interval_measure_Ioc_eq:
"(\<And>x y. x \<le> y \<Longrightarrow> F x \<le> F y) \<Longrightarrow> (\<And>a. continuous (at_right a) F) \<Longrightarrow>
@@ -409,6 +404,11 @@
shows "f \<in> measurable (lebesgue_on S) M \<longleftrightarrow> g \<in> measurable (lebesgue_on S) M"
by (metis (mono_tags, lifting) IntD1 assms measurable_cong_simp space_restrict_space)
+lemma integrable_lebesgue_on_UNIV_eq:
+ fixes f :: "'a::euclidean_space \<Rightarrow> 'b::{banach, second_countable_topology}"
+ shows "integrable (lebesgue_on UNIV) f = integrable lebesgue f"
+ by (auto simp: integrable_restrict_space)
+
text\<^marker>\<open>tag unimportant\<close> \<open>Measurability of continuous functions\<close>
lemma continuous_imp_measurable_on_sets_lebesgue:
--- a/src/HOL/Data_Structures/Sorting.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Data_Structures/Sorting.thy Sun Jun 09 22:22:36 2019 +0200
@@ -242,7 +242,7 @@
qed
qed
-(* Beware of conversions: *)
+(* Beware of implicit conversions: *)
lemma c_msort_log: "length xs = 2^k \<Longrightarrow> c_msort xs \<le> length xs * log 2 (length xs)"
using c_msort_le[of xs k] apply (simp add: log_nat_power algebra_simps)
by (metis (mono_tags) numeral_power_eq_of_nat_cancel_iff of_nat_le_iff of_nat_mult)
@@ -250,7 +250,6 @@
subsection "Bottom-Up Merge Sort"
-(* Exercise: make tail recursive *)
fun merge_adj :: "('a::linorder) list list \<Rightarrow> 'a list list" where
"merge_adj [] = []" |
"merge_adj [xs] = [xs]" |
--- a/src/HOL/Data_Structures/Tree23.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Data_Structures/Tree23.thy Sun Jun 09 22:22:36 2019 +0200
@@ -34,13 +34,13 @@
text \<open>Balanced:\<close>
-fun bal :: "'a tree23 \<Rightarrow> bool" where
-"bal Leaf = True" |
-"bal (Node2 l _ r) = (bal l & bal r & height l = height r)" |
-"bal (Node3 l _ m _ r) =
- (bal l & bal m & bal r & height l = height m & height m = height r)"
+fun complete :: "'a tree23 \<Rightarrow> bool" where
+"complete Leaf = True" |
+"complete (Node2 l _ r) = (complete l & complete r & height l = height r)" |
+"complete (Node3 l _ m _ r) =
+ (complete l & complete m & complete r & height l = height m & height m = height r)"
-lemma ht_sz_if_bal: "bal t \<Longrightarrow> 2 ^ height t \<le> size t + 1"
+lemma ht_sz_if_complete: "complete t \<Longrightarrow> 2 ^ height t \<le> size t + 1"
by (induction t) auto
end
--- a/src/HOL/Data_Structures/Tree23_Map.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Data_Structures/Tree23_Map.thy Sun Jun 09 22:22:36 2019 +0200
@@ -22,37 +22,37 @@
EQ \<Rightarrow> Some b2 |
GT \<Rightarrow> lookup r x))"
-fun upd :: "'a::linorder \<Rightarrow> 'b \<Rightarrow> ('a*'b) tree23 \<Rightarrow> ('a*'b) up\<^sub>i" where
-"upd x y Leaf = Up\<^sub>i Leaf (x,y) Leaf" |
+fun upd :: "'a::linorder \<Rightarrow> 'b \<Rightarrow> ('a*'b) tree23 \<Rightarrow> ('a*'b) upI" where
+"upd x y Leaf = OF Leaf (x,y) Leaf" |
"upd x y (Node2 l ab r) = (case cmp x (fst ab) of
LT \<Rightarrow> (case upd x y l of
- T\<^sub>i l' => T\<^sub>i (Node2 l' ab r)
- | Up\<^sub>i l1 ab' l2 => T\<^sub>i (Node3 l1 ab' l2 ab r)) |
- EQ \<Rightarrow> T\<^sub>i (Node2 l (x,y) r) |
+ TI l' => TI (Node2 l' ab r)
+ | OF l1 ab' l2 => TI (Node3 l1 ab' l2 ab r)) |
+ EQ \<Rightarrow> TI (Node2 l (x,y) r) |
GT \<Rightarrow> (case upd x y r of
- T\<^sub>i r' => T\<^sub>i (Node2 l ab r')
- | Up\<^sub>i r1 ab' r2 => T\<^sub>i (Node3 l ab r1 ab' r2)))" |
+ TI r' => TI (Node2 l ab r')
+ | OF r1 ab' r2 => TI (Node3 l ab r1 ab' r2)))" |
"upd x y (Node3 l ab1 m ab2 r) = (case cmp x (fst ab1) of
LT \<Rightarrow> (case upd x y l of
- T\<^sub>i l' => T\<^sub>i (Node3 l' ab1 m ab2 r)
- | Up\<^sub>i l1 ab' l2 => Up\<^sub>i (Node2 l1 ab' l2) ab1 (Node2 m ab2 r)) |
- EQ \<Rightarrow> T\<^sub>i (Node3 l (x,y) m ab2 r) |
+ TI l' => TI (Node3 l' ab1 m ab2 r)
+ | OF l1 ab' l2 => OF (Node2 l1 ab' l2) ab1 (Node2 m ab2 r)) |
+ EQ \<Rightarrow> TI (Node3 l (x,y) m ab2 r) |
GT \<Rightarrow> (case cmp x (fst ab2) of
LT \<Rightarrow> (case upd x y m of
- T\<^sub>i m' => T\<^sub>i (Node3 l ab1 m' ab2 r)
- | Up\<^sub>i m1 ab' m2 => Up\<^sub>i (Node2 l ab1 m1) ab' (Node2 m2 ab2 r)) |
- EQ \<Rightarrow> T\<^sub>i (Node3 l ab1 m (x,y) r) |
+ TI m' => TI (Node3 l ab1 m' ab2 r)
+ | OF m1 ab' m2 => OF (Node2 l ab1 m1) ab' (Node2 m2 ab2 r)) |
+ EQ \<Rightarrow> TI (Node3 l ab1 m (x,y) r) |
GT \<Rightarrow> (case upd x y r of
- T\<^sub>i r' => T\<^sub>i (Node3 l ab1 m ab2 r')
- | Up\<^sub>i r1 ab' r2 => Up\<^sub>i (Node2 l ab1 m) ab2 (Node2 r1 ab' r2))))"
+ TI r' => TI (Node3 l ab1 m ab2 r')
+ | OF r1 ab' r2 => OF (Node2 l ab1 m) ab2 (Node2 r1 ab' r2))))"
definition update :: "'a::linorder \<Rightarrow> 'b \<Rightarrow> ('a*'b) tree23 \<Rightarrow> ('a*'b) tree23" where
-"update a b t = tree\<^sub>i(upd a b t)"
+"update a b t = treeI(upd a b t)"
-fun del :: "'a::linorder \<Rightarrow> ('a*'b) tree23 \<Rightarrow> ('a*'b) up\<^sub>d" where
-"del x Leaf = T\<^sub>d Leaf" |
-"del x (Node2 Leaf ab1 Leaf) = (if x=fst ab1 then Up\<^sub>d Leaf else T\<^sub>d(Node2 Leaf ab1 Leaf))" |
-"del x (Node3 Leaf ab1 Leaf ab2 Leaf) = T\<^sub>d(if x=fst ab1 then Node2 Leaf ab2 Leaf
+fun del :: "'a::linorder \<Rightarrow> ('a*'b) tree23 \<Rightarrow> ('a*'b) upD" where
+"del x Leaf = TD Leaf" |
+"del x (Node2 Leaf ab1 Leaf) = (if x=fst ab1 then UF Leaf else TD(Node2 Leaf ab1 Leaf))" |
+"del x (Node3 Leaf ab1 Leaf ab2 Leaf) = TD(if x=fst ab1 then Node2 Leaf ab2 Leaf
else if x=fst ab2 then Node2 Leaf ab1 Leaf else Node3 Leaf ab1 Leaf ab2 Leaf)" |
"del x (Node2 l ab1 r) = (case cmp x (fst ab1) of
LT \<Rightarrow> node21 (del x l) ab1 r |
@@ -67,7 +67,7 @@
GT \<Rightarrow> node33 l ab1 m ab2 (del x r)))"
definition delete :: "'a::linorder \<Rightarrow> ('a*'b) tree23 \<Rightarrow> ('a*'b) tree23" where
-"delete x t = tree\<^sub>d(del x t)"
+"delete x t = treeD(del x t)"
subsection \<open>Functional Correctness\<close>
@@ -78,50 +78,50 @@
lemma inorder_upd:
- "sorted1(inorder t) \<Longrightarrow> inorder(tree\<^sub>i(upd x y t)) = upd_list x y (inorder t)"
-by(induction t) (auto simp: upd_list_simps split: up\<^sub>i.splits)
+ "sorted1(inorder t) \<Longrightarrow> inorder(treeI(upd x y t)) = upd_list x y (inorder t)"
+by(induction t) (auto simp: upd_list_simps split: upI.splits)
corollary inorder_update:
"sorted1(inorder t) \<Longrightarrow> inorder(update x y t) = upd_list x y (inorder t)"
by(simp add: update_def inorder_upd)
-lemma inorder_del: "\<lbrakk> bal t ; sorted1(inorder t) \<rbrakk> \<Longrightarrow>
- inorder(tree\<^sub>d (del x t)) = del_list x (inorder t)"
+lemma inorder_del: "\<lbrakk> complete t ; sorted1(inorder t) \<rbrakk> \<Longrightarrow>
+ inorder(treeD (del x t)) = del_list x (inorder t)"
by(induction t rule: del.induct)
(auto simp: del_list_simps inorder_nodes split_minD split!: if_split prod.splits)
-corollary inorder_delete: "\<lbrakk> bal t ; sorted1(inorder t) \<rbrakk> \<Longrightarrow>
+corollary inorder_delete: "\<lbrakk> complete t ; sorted1(inorder t) \<rbrakk> \<Longrightarrow>
inorder(delete x t) = del_list x (inorder t)"
by(simp add: delete_def inorder_del)
subsection \<open>Balancedness\<close>
-lemma bal_upd: "bal t \<Longrightarrow> bal (tree\<^sub>i(upd x y t)) \<and> height(upd x y t) = height t"
-by (induct t) (auto split!: if_split up\<^sub>i.split)(* 16 secs in 2015 *)
+lemma complete_upd: "complete t \<Longrightarrow> complete (treeI(upd x y t)) \<and> height(upd x y t) = height t"
+by (induct t) (auto split!: if_split upI.split)(* 16 secs in 2015 *)
-corollary bal_update: "bal t \<Longrightarrow> bal (update x y t)"
-by (simp add: update_def bal_upd)
+corollary complete_update: "complete t \<Longrightarrow> complete (update x y t)"
+by (simp add: update_def complete_upd)
-lemma height_del: "bal t \<Longrightarrow> height(del x t) = height t"
+lemma height_del: "complete t \<Longrightarrow> height(del x t) = height t"
by(induction x t rule: del.induct)
(auto simp add: heights max_def height_split_min split: prod.split)
-lemma bal_tree\<^sub>d_del: "bal t \<Longrightarrow> bal(tree\<^sub>d(del x t))"
+lemma complete_treeD_del: "complete t \<Longrightarrow> complete(treeD(del x t))"
by(induction x t rule: del.induct)
- (auto simp: bals bal_split_min height_del height_split_min split: prod.split)
+ (auto simp: completes complete_split_min height_del height_split_min split: prod.split)
-corollary bal_delete: "bal t \<Longrightarrow> bal(delete x t)"
-by(simp add: delete_def bal_tree\<^sub>d_del)
+corollary complete_delete: "complete t \<Longrightarrow> complete(delete x t)"
+by(simp add: delete_def complete_treeD_del)
subsection \<open>Overall Correctness\<close>
interpretation M: Map_by_Ordered
where empty = empty and lookup = lookup and update = update and delete = delete
-and inorder = inorder and inv = bal
+and inorder = inorder and inv = complete
proof (standard, goal_cases)
case 1 thus ?case by(simp add: empty_def)
next
@@ -133,9 +133,9 @@
next
case 5 thus ?case by(simp add: empty_def)
next
- case 6 thus ?case by(simp add: bal_update)
+ case 6 thus ?case by(simp add: complete_update)
next
- case 7 thus ?case by(simp add: bal_delete)
+ case 7 thus ?case by(simp add: complete_delete)
qed
end
--- a/src/HOL/Data_Structures/Tree23_Set.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Data_Structures/Tree23_Set.thy Sun Jun 09 22:22:36 2019 +0200
@@ -31,104 +31,104 @@
EQ \<Rightarrow> True |
GT \<Rightarrow> isin r x))"
-datatype 'a up\<^sub>i = T\<^sub>i "'a tree23" | Up\<^sub>i "'a tree23" 'a "'a tree23"
+datatype 'a upI = TI "'a tree23" | OF "'a tree23" 'a "'a tree23"
-fun tree\<^sub>i :: "'a up\<^sub>i \<Rightarrow> 'a tree23" where
-"tree\<^sub>i (T\<^sub>i t) = t" |
-"tree\<^sub>i (Up\<^sub>i l a r) = Node2 l a r"
+fun treeI :: "'a upI \<Rightarrow> 'a tree23" where
+"treeI (TI t) = t" |
+"treeI (OF l a r) = Node2 l a r"
-fun ins :: "'a::linorder \<Rightarrow> 'a tree23 \<Rightarrow> 'a up\<^sub>i" where
-"ins x Leaf = Up\<^sub>i Leaf x Leaf" |
+fun ins :: "'a::linorder \<Rightarrow> 'a tree23 \<Rightarrow> 'a upI" where
+"ins x Leaf = OF Leaf x Leaf" |
"ins x (Node2 l a r) =
(case cmp x a of
LT \<Rightarrow>
(case ins x l of
- T\<^sub>i l' => T\<^sub>i (Node2 l' a r) |
- Up\<^sub>i l1 b l2 => T\<^sub>i (Node3 l1 b l2 a r)) |
- EQ \<Rightarrow> T\<^sub>i (Node2 l x r) |
+ TI l' => TI (Node2 l' a r) |
+ OF l1 b l2 => TI (Node3 l1 b l2 a r)) |
+ EQ \<Rightarrow> TI (Node2 l x r) |
GT \<Rightarrow>
(case ins x r of
- T\<^sub>i r' => T\<^sub>i (Node2 l a r') |
- Up\<^sub>i r1 b r2 => T\<^sub>i (Node3 l a r1 b r2)))" |
+ TI r' => TI (Node2 l a r') |
+ OF r1 b r2 => TI (Node3 l a r1 b r2)))" |
"ins x (Node3 l a m b r) =
(case cmp x a of
LT \<Rightarrow>
(case ins x l of
- T\<^sub>i l' => T\<^sub>i (Node3 l' a m b r) |
- Up\<^sub>i l1 c l2 => Up\<^sub>i (Node2 l1 c l2) a (Node2 m b r)) |
- EQ \<Rightarrow> T\<^sub>i (Node3 l a m b r) |
+ TI l' => TI (Node3 l' a m b r) |
+ OF l1 c l2 => OF (Node2 l1 c l2) a (Node2 m b r)) |
+ EQ \<Rightarrow> TI (Node3 l a m b r) |
GT \<Rightarrow>
(case cmp x b of
GT \<Rightarrow>
(case ins x r of
- T\<^sub>i r' => T\<^sub>i (Node3 l a m b r') |
- Up\<^sub>i r1 c r2 => Up\<^sub>i (Node2 l a m) b (Node2 r1 c r2)) |
- EQ \<Rightarrow> T\<^sub>i (Node3 l a m b r) |
+ TI r' => TI (Node3 l a m b r') |
+ OF r1 c r2 => OF (Node2 l a m) b (Node2 r1 c r2)) |
+ EQ \<Rightarrow> TI (Node3 l a m b r) |
LT \<Rightarrow>
(case ins x m of
- T\<^sub>i m' => T\<^sub>i (Node3 l a m' b r) |
- Up\<^sub>i m1 c m2 => Up\<^sub>i (Node2 l a m1) c (Node2 m2 b r))))"
+ TI m' => TI (Node3 l a m' b r) |
+ OF m1 c m2 => OF (Node2 l a m1) c (Node2 m2 b r))))"
hide_const insert
definition insert :: "'a::linorder \<Rightarrow> 'a tree23 \<Rightarrow> 'a tree23" where
-"insert x t = tree\<^sub>i(ins x t)"
+"insert x t = treeI(ins x t)"
-datatype 'a up\<^sub>d = T\<^sub>d "'a tree23" | Up\<^sub>d "'a tree23"
+datatype 'a upD = TD "'a tree23" | UF "'a tree23"
-fun tree\<^sub>d :: "'a up\<^sub>d \<Rightarrow> 'a tree23" where
-"tree\<^sub>d (T\<^sub>d t) = t" |
-"tree\<^sub>d (Up\<^sub>d t) = t"
+fun treeD :: "'a upD \<Rightarrow> 'a tree23" where
+"treeD (TD t) = t" |
+"treeD (UF t) = t"
(* Variation: return None to signal no-change *)
-fun node21 :: "'a up\<^sub>d \<Rightarrow> 'a \<Rightarrow> 'a tree23 \<Rightarrow> 'a up\<^sub>d" where
-"node21 (T\<^sub>d t1) a t2 = T\<^sub>d(Node2 t1 a t2)" |
-"node21 (Up\<^sub>d t1) a (Node2 t2 b t3) = Up\<^sub>d(Node3 t1 a t2 b t3)" |
-"node21 (Up\<^sub>d t1) a (Node3 t2 b t3 c t4) = T\<^sub>d(Node2 (Node2 t1 a t2) b (Node2 t3 c t4))"
+fun node21 :: "'a upD \<Rightarrow> 'a \<Rightarrow> 'a tree23 \<Rightarrow> 'a upD" where
+"node21 (TD t1) a t2 = TD(Node2 t1 a t2)" |
+"node21 (UF t1) a (Node2 t2 b t3) = UF(Node3 t1 a t2 b t3)" |
+"node21 (UF t1) a (Node3 t2 b t3 c t4) = TD(Node2 (Node2 t1 a t2) b (Node2 t3 c t4))"
-fun node22 :: "'a tree23 \<Rightarrow> 'a \<Rightarrow> 'a up\<^sub>d \<Rightarrow> 'a up\<^sub>d" where
-"node22 t1 a (T\<^sub>d t2) = T\<^sub>d(Node2 t1 a t2)" |
-"node22 (Node2 t1 b t2) a (Up\<^sub>d t3) = Up\<^sub>d(Node3 t1 b t2 a t3)" |
-"node22 (Node3 t1 b t2 c t3) a (Up\<^sub>d t4) = T\<^sub>d(Node2 (Node2 t1 b t2) c (Node2 t3 a t4))"
+fun node22 :: "'a tree23 \<Rightarrow> 'a \<Rightarrow> 'a upD \<Rightarrow> 'a upD" where
+"node22 t1 a (TD t2) = TD(Node2 t1 a t2)" |
+"node22 (Node2 t1 b t2) a (UF t3) = UF(Node3 t1 b t2 a t3)" |
+"node22 (Node3 t1 b t2 c t3) a (UF t4) = TD(Node2 (Node2 t1 b t2) c (Node2 t3 a t4))"
-fun node31 :: "'a up\<^sub>d \<Rightarrow> 'a \<Rightarrow> 'a tree23 \<Rightarrow> 'a \<Rightarrow> 'a tree23 \<Rightarrow> 'a up\<^sub>d" where
-"node31 (T\<^sub>d t1) a t2 b t3 = T\<^sub>d(Node3 t1 a t2 b t3)" |
-"node31 (Up\<^sub>d t1) a (Node2 t2 b t3) c t4 = T\<^sub>d(Node2 (Node3 t1 a t2 b t3) c t4)" |
-"node31 (Up\<^sub>d t1) a (Node3 t2 b t3 c t4) d t5 = T\<^sub>d(Node3 (Node2 t1 a t2) b (Node2 t3 c t4) d t5)"
+fun node31 :: "'a upD \<Rightarrow> 'a \<Rightarrow> 'a tree23 \<Rightarrow> 'a \<Rightarrow> 'a tree23 \<Rightarrow> 'a upD" where
+"node31 (TD t1) a t2 b t3 = TD(Node3 t1 a t2 b t3)" |
+"node31 (UF t1) a (Node2 t2 b t3) c t4 = TD(Node2 (Node3 t1 a t2 b t3) c t4)" |
+"node31 (UF t1) a (Node3 t2 b t3 c t4) d t5 = TD(Node3 (Node2 t1 a t2) b (Node2 t3 c t4) d t5)"
-fun node32 :: "'a tree23 \<Rightarrow> 'a \<Rightarrow> 'a up\<^sub>d \<Rightarrow> 'a \<Rightarrow> 'a tree23 \<Rightarrow> 'a up\<^sub>d" where
-"node32 t1 a (T\<^sub>d t2) b t3 = T\<^sub>d(Node3 t1 a t2 b t3)" |
-"node32 t1 a (Up\<^sub>d t2) b (Node2 t3 c t4) = T\<^sub>d(Node2 t1 a (Node3 t2 b t3 c t4))" |
-"node32 t1 a (Up\<^sub>d t2) b (Node3 t3 c t4 d t5) = T\<^sub>d(Node3 t1 a (Node2 t2 b t3) c (Node2 t4 d t5))"
+fun node32 :: "'a tree23 \<Rightarrow> 'a \<Rightarrow> 'a upD \<Rightarrow> 'a \<Rightarrow> 'a tree23 \<Rightarrow> 'a upD" where
+"node32 t1 a (TD t2) b t3 = TD(Node3 t1 a t2 b t3)" |
+"node32 t1 a (UF t2) b (Node2 t3 c t4) = TD(Node2 t1 a (Node3 t2 b t3 c t4))" |
+"node32 t1 a (UF t2) b (Node3 t3 c t4 d t5) = TD(Node3 t1 a (Node2 t2 b t3) c (Node2 t4 d t5))"
-fun node33 :: "'a tree23 \<Rightarrow> 'a \<Rightarrow> 'a tree23 \<Rightarrow> 'a \<Rightarrow> 'a up\<^sub>d \<Rightarrow> 'a up\<^sub>d" where
-"node33 l a m b (T\<^sub>d r) = T\<^sub>d(Node3 l a m b r)" |
-"node33 t1 a (Node2 t2 b t3) c (Up\<^sub>d t4) = T\<^sub>d(Node2 t1 a (Node3 t2 b t3 c t4))" |
-"node33 t1 a (Node3 t2 b t3 c t4) d (Up\<^sub>d t5) = T\<^sub>d(Node3 t1 a (Node2 t2 b t3) c (Node2 t4 d t5))"
+fun node33 :: "'a tree23 \<Rightarrow> 'a \<Rightarrow> 'a tree23 \<Rightarrow> 'a \<Rightarrow> 'a upD \<Rightarrow> 'a upD" where
+"node33 l a m b (TD r) = TD(Node3 l a m b r)" |
+"node33 t1 a (Node2 t2 b t3) c (UF t4) = TD(Node2 t1 a (Node3 t2 b t3 c t4))" |
+"node33 t1 a (Node3 t2 b t3 c t4) d (UF t5) = TD(Node3 t1 a (Node2 t2 b t3) c (Node2 t4 d t5))"
-fun split_min :: "'a tree23 \<Rightarrow> 'a * 'a up\<^sub>d" where
-"split_min (Node2 Leaf a Leaf) = (a, Up\<^sub>d Leaf)" |
-"split_min (Node3 Leaf a Leaf b Leaf) = (a, T\<^sub>d(Node2 Leaf b Leaf))" |
+fun split_min :: "'a tree23 \<Rightarrow> 'a * 'a upD" where
+"split_min (Node2 Leaf a Leaf) = (a, UF Leaf)" |
+"split_min (Node3 Leaf a Leaf b Leaf) = (a, TD(Node2 Leaf b Leaf))" |
"split_min (Node2 l a r) = (let (x,l') = split_min l in (x, node21 l' a r))" |
"split_min (Node3 l a m b r) = (let (x,l') = split_min l in (x, node31 l' a m b r))"
text \<open>In the base cases of \<open>split_min\<close> and \<open>del\<close> it is enough to check if one subtree is a \<open>Leaf\<close>,
in which case balancedness implies that so are the others. Exercise.\<close>
-fun del :: "'a::linorder \<Rightarrow> 'a tree23 \<Rightarrow> 'a up\<^sub>d" where
-"del x Leaf = T\<^sub>d Leaf" |
+fun del :: "'a::linorder \<Rightarrow> 'a tree23 \<Rightarrow> 'a upD" where
+"del x Leaf = TD Leaf" |
"del x (Node2 Leaf a Leaf) =
- (if x = a then Up\<^sub>d Leaf else T\<^sub>d(Node2 Leaf a Leaf))" |
+ (if x = a then UF Leaf else TD(Node2 Leaf a Leaf))" |
"del x (Node3 Leaf a Leaf b Leaf) =
- T\<^sub>d(if x = a then Node2 Leaf b Leaf else
+ TD(if x = a then Node2 Leaf b Leaf else
if x = b then Node2 Leaf a Leaf
else Node3 Leaf a Leaf b Leaf)" |
"del x (Node2 l a r) =
(case cmp x a of
LT \<Rightarrow> node21 (del x l) a r |
GT \<Rightarrow> node22 l a (del x r) |
- EQ \<Rightarrow> let (a',t) = split_min r in node22 l a' t)" |
+ EQ \<Rightarrow> let (a',r') = split_min r in node22 l a' r')" |
"del x (Node3 l a m b r) =
(case cmp x a of
LT \<Rightarrow> node31 (del x l) a m b r |
@@ -140,7 +140,7 @@
GT \<Rightarrow> node33 l a m b (del x r)))"
definition delete :: "'a::linorder \<Rightarrow> 'a tree23 \<Rightarrow> 'a tree23" where
-"delete x t = tree\<^sub>d(del x t)"
+"delete x t = treeD(del x t)"
subsection "Functional Correctness"
@@ -154,8 +154,8 @@
subsubsection "Proofs for insert"
lemma inorder_ins:
- "sorted(inorder t) \<Longrightarrow> inorder(tree\<^sub>i(ins x t)) = ins_list x (inorder t)"
-by(induction t) (auto simp: ins_list_simps split: up\<^sub>i.splits)
+ "sorted(inorder t) \<Longrightarrow> inorder(treeI(ins x t)) = ins_list x (inorder t)"
+by(induction t) (auto simp: ins_list_simps split: upI.splits)
lemma inorder_insert:
"sorted(inorder t) \<Longrightarrow> inorder(insert a t) = ins_list a (inorder t)"
@@ -165,40 +165,40 @@
subsubsection "Proofs for delete"
lemma inorder_node21: "height r > 0 \<Longrightarrow>
- inorder (tree\<^sub>d (node21 l' a r)) = inorder (tree\<^sub>d l') @ a # inorder r"
+ inorder (treeD (node21 l' a r)) = inorder (treeD l') @ a # inorder r"
by(induct l' a r rule: node21.induct) auto
lemma inorder_node22: "height l > 0 \<Longrightarrow>
- inorder (tree\<^sub>d (node22 l a r')) = inorder l @ a # inorder (tree\<^sub>d r')"
+ inorder (treeD (node22 l a r')) = inorder l @ a # inorder (treeD r')"
by(induct l a r' rule: node22.induct) auto
lemma inorder_node31: "height m > 0 \<Longrightarrow>
- inorder (tree\<^sub>d (node31 l' a m b r)) = inorder (tree\<^sub>d l') @ a # inorder m @ b # inorder r"
+ inorder (treeD (node31 l' a m b r)) = inorder (treeD l') @ a # inorder m @ b # inorder r"
by(induct l' a m b r rule: node31.induct) auto
lemma inorder_node32: "height r > 0 \<Longrightarrow>
- inorder (tree\<^sub>d (node32 l a m' b r)) = inorder l @ a # inorder (tree\<^sub>d m') @ b # inorder r"
+ inorder (treeD (node32 l a m' b r)) = inorder l @ a # inorder (treeD m') @ b # inorder r"
by(induct l a m' b r rule: node32.induct) auto
lemma inorder_node33: "height m > 0 \<Longrightarrow>
- inorder (tree\<^sub>d (node33 l a m b r')) = inorder l @ a # inorder m @ b # inorder (tree\<^sub>d r')"
+ inorder (treeD (node33 l a m b r')) = inorder l @ a # inorder m @ b # inorder (treeD r')"
by(induct l a m b r' rule: node33.induct) auto
lemmas inorder_nodes = inorder_node21 inorder_node22
inorder_node31 inorder_node32 inorder_node33
lemma split_minD:
- "split_min t = (x,t') \<Longrightarrow> bal t \<Longrightarrow> height t > 0 \<Longrightarrow>
- x # inorder(tree\<^sub>d t') = inorder t"
+ "split_min t = (x,t') \<Longrightarrow> complete t \<Longrightarrow> height t > 0 \<Longrightarrow>
+ x # inorder(treeD t') = inorder t"
by(induction t arbitrary: t' rule: split_min.induct)
(auto simp: inorder_nodes split: prod.splits)
-lemma inorder_del: "\<lbrakk> bal t ; sorted(inorder t) \<rbrakk> \<Longrightarrow>
- inorder(tree\<^sub>d (del x t)) = del_list x (inorder t)"
+lemma inorder_del: "\<lbrakk> complete t ; sorted(inorder t) \<rbrakk> \<Longrightarrow>
+ inorder(treeD (del x t)) = del_list x (inorder t)"
by(induction t rule: del.induct)
(auto simp: del_list_simps inorder_nodes split_minD split!: if_split prod.splits)
-lemma inorder_delete: "\<lbrakk> bal t ; sorted(inorder t) \<rbrakk> \<Longrightarrow>
+lemma inorder_delete: "\<lbrakk> complete t ; sorted(inorder t) \<rbrakk> \<Longrightarrow>
inorder(delete x t) = del_list x (inorder t)"
by(simp add: delete_def inorder_del)
@@ -208,21 +208,21 @@
subsubsection "Proofs for insert"
-text\<open>First a standard proof that \<^const>\<open>ins\<close> preserves \<^const>\<open>bal\<close>.\<close>
+text\<open>First a standard proof that \<^const>\<open>ins\<close> preserves \<^const>\<open>complete\<close>.\<close>
-instantiation up\<^sub>i :: (type)height
+instantiation upI :: (type)height
begin
-fun height_up\<^sub>i :: "'a up\<^sub>i \<Rightarrow> nat" where
-"height (T\<^sub>i t) = height t" |
-"height (Up\<^sub>i l a r) = height l"
+fun height_upI :: "'a upI \<Rightarrow> nat" where
+"height (TI t) = height t" |
+"height (OF l a r) = height l"
instance ..
end
-lemma bal_ins: "bal t \<Longrightarrow> bal (tree\<^sub>i(ins a t)) \<and> height(ins a t) = height t"
-by (induct t) (auto split!: if_split up\<^sub>i.split) (* 15 secs in 2015 *)
+lemma complete_ins: "complete t \<Longrightarrow> complete (treeI(ins a t)) \<and> height(ins a t) = height t"
+by (induct t) (auto split!: if_split upI.split) (* 15 secs in 2015 *)
text\<open>Now an alternative proof (by Brian Huffman) that runs faster because
two properties (balance and height) are combined in one predicate.\<close>
@@ -257,30 +257,30 @@
lemma full_imp_height: "full n t \<Longrightarrow> height t = n"
by (induct set: full, simp_all)
-lemma full_imp_bal: "full n t \<Longrightarrow> bal t"
+lemma full_imp_complete: "full n t \<Longrightarrow> complete t"
by (induct set: full, auto dest: full_imp_height)
-lemma bal_imp_full: "bal t \<Longrightarrow> full (height t) t"
+lemma complete_imp_full: "complete t \<Longrightarrow> full (height t) t"
by (induct t, simp_all)
-lemma bal_iff_full: "bal t \<longleftrightarrow> (\<exists>n. full n t)"
- by (auto elim!: bal_imp_full full_imp_bal)
+lemma complete_iff_full: "complete t \<longleftrightarrow> (\<exists>n. full n t)"
+ by (auto elim!: complete_imp_full full_imp_complete)
text \<open>The \<^const>\<open>insert\<close> function either preserves the height of the
-tree, or increases it by one. The constructor returned by the \<^term>\<open>insert\<close> function determines which: A return value of the form \<^term>\<open>T\<^sub>i t\<close> indicates that the height will be the same. A value of the
-form \<^term>\<open>Up\<^sub>i l p r\<close> indicates an increase in height.\<close>
+tree, or increases it by one. The constructor returned by the \<^term>\<open>insert\<close> function determines which: A return value of the form \<^term>\<open>TI t\<close> indicates that the height will be the same. A value of the
+form \<^term>\<open>OF l p r\<close> indicates an increase in height.\<close>
-fun full\<^sub>i :: "nat \<Rightarrow> 'a up\<^sub>i \<Rightarrow> bool" where
-"full\<^sub>i n (T\<^sub>i t) \<longleftrightarrow> full n t" |
-"full\<^sub>i n (Up\<^sub>i l p r) \<longleftrightarrow> full n l \<and> full n r"
+fun full\<^sub>i :: "nat \<Rightarrow> 'a upI \<Rightarrow> bool" where
+"full\<^sub>i n (TI t) \<longleftrightarrow> full n t" |
+"full\<^sub>i n (OF l p r) \<longleftrightarrow> full n l \<and> full n r"
lemma full\<^sub>i_ins: "full n t \<Longrightarrow> full\<^sub>i n (ins a t)"
-by (induct rule: full.induct) (auto split: up\<^sub>i.split)
+by (induct rule: full.induct) (auto split: upI.split)
-text \<open>The \<^const>\<open>insert\<close> operation preserves balance.\<close>
+text \<open>The \<^const>\<open>insert\<close> operation preserves completeance.\<close>
-lemma bal_insert: "bal t \<Longrightarrow> bal (insert a t)"
-unfolding bal_iff_full insert_def
+lemma complete_insert: "complete t \<Longrightarrow> complete (insert a t)"
+unfolding complete_iff_full insert_def
apply (erule exE)
apply (drule full\<^sub>i_ins [of _ _ a])
apply (cases "ins a t")
@@ -290,42 +290,42 @@
subsection "Proofs for delete"
-instantiation up\<^sub>d :: (type)height
+instantiation upD :: (type)height
begin
-fun height_up\<^sub>d :: "'a up\<^sub>d \<Rightarrow> nat" where
-"height (T\<^sub>d t) = height t" |
-"height (Up\<^sub>d t) = height t + 1"
+fun height_upD :: "'a upD \<Rightarrow> nat" where
+"height (TD t) = height t" |
+"height (UF t) = height t + 1"
instance ..
end
-lemma bal_tree\<^sub>d_node21:
- "\<lbrakk>bal r; bal (tree\<^sub>d l'); height r = height l' \<rbrakk> \<Longrightarrow> bal (tree\<^sub>d (node21 l' a r))"
+lemma complete_treeD_node21:
+ "\<lbrakk>complete r; complete (treeD l'); height r = height l' \<rbrakk> \<Longrightarrow> complete (treeD (node21 l' a r))"
by(induct l' a r rule: node21.induct) auto
-lemma bal_tree\<^sub>d_node22:
- "\<lbrakk>bal(tree\<^sub>d r'); bal l; height r' = height l \<rbrakk> \<Longrightarrow> bal (tree\<^sub>d (node22 l a r'))"
+lemma complete_treeD_node22:
+ "\<lbrakk>complete(treeD r'); complete l; height r' = height l \<rbrakk> \<Longrightarrow> complete (treeD (node22 l a r'))"
by(induct l a r' rule: node22.induct) auto
-lemma bal_tree\<^sub>d_node31:
- "\<lbrakk> bal (tree\<^sub>d l'); bal m; bal r; height l' = height r; height m = height r \<rbrakk>
- \<Longrightarrow> bal (tree\<^sub>d (node31 l' a m b r))"
+lemma complete_treeD_node31:
+ "\<lbrakk> complete (treeD l'); complete m; complete r; height l' = height r; height m = height r \<rbrakk>
+ \<Longrightarrow> complete (treeD (node31 l' a m b r))"
by(induct l' a m b r rule: node31.induct) auto
-lemma bal_tree\<^sub>d_node32:
- "\<lbrakk> bal l; bal (tree\<^sub>d m'); bal r; height l = height r; height m' = height r \<rbrakk>
- \<Longrightarrow> bal (tree\<^sub>d (node32 l a m' b r))"
+lemma complete_treeD_node32:
+ "\<lbrakk> complete l; complete (treeD m'); complete r; height l = height r; height m' = height r \<rbrakk>
+ \<Longrightarrow> complete (treeD (node32 l a m' b r))"
by(induct l a m' b r rule: node32.induct) auto
-lemma bal_tree\<^sub>d_node33:
- "\<lbrakk> bal l; bal m; bal(tree\<^sub>d r'); height l = height r'; height m = height r' \<rbrakk>
- \<Longrightarrow> bal (tree\<^sub>d (node33 l a m b r'))"
+lemma complete_treeD_node33:
+ "\<lbrakk> complete l; complete m; complete(treeD r'); height l = height r'; height m = height r' \<rbrakk>
+ \<Longrightarrow> complete (treeD (node33 l a m b r'))"
by(induct l a m b r' rule: node33.induct) auto
-lemmas bals = bal_tree\<^sub>d_node21 bal_tree\<^sub>d_node22
- bal_tree\<^sub>d_node31 bal_tree\<^sub>d_node32 bal_tree\<^sub>d_node33
+lemmas completes = complete_treeD_node21 complete_treeD_node22
+ complete_treeD_node31 complete_treeD_node32 complete_treeD_node33
lemma height'_node21:
"height r > 0 \<Longrightarrow> height(node21 l' a r) = max (height l') (height r) + 1"
@@ -354,32 +354,32 @@
height'_node31 height'_node32 height'_node33
lemma height_split_min:
- "split_min t = (x, t') \<Longrightarrow> height t > 0 \<Longrightarrow> bal t \<Longrightarrow> height t' = height t"
+ "split_min t = (x, t') \<Longrightarrow> height t > 0 \<Longrightarrow> complete t \<Longrightarrow> height t' = height t"
by(induct t arbitrary: x t' rule: split_min.induct)
(auto simp: heights split: prod.splits)
-lemma height_del: "bal t \<Longrightarrow> height(del x t) = height t"
+lemma height_del: "complete t \<Longrightarrow> height(del x t) = height t"
by(induction x t rule: del.induct)
(auto simp: heights max_def height_split_min split: prod.splits)
-lemma bal_split_min:
- "\<lbrakk> split_min t = (x, t'); bal t; height t > 0 \<rbrakk> \<Longrightarrow> bal (tree\<^sub>d t')"
+lemma complete_split_min:
+ "\<lbrakk> split_min t = (x, t'); complete t; height t > 0 \<rbrakk> \<Longrightarrow> complete (treeD t')"
by(induct t arbitrary: x t' rule: split_min.induct)
- (auto simp: heights height_split_min bals split: prod.splits)
+ (auto simp: heights height_split_min completes split: prod.splits)
-lemma bal_tree\<^sub>d_del: "bal t \<Longrightarrow> bal(tree\<^sub>d(del x t))"
+lemma complete_treeD_del: "complete t \<Longrightarrow> complete(treeD(del x t))"
by(induction x t rule: del.induct)
- (auto simp: bals bal_split_min height_del height_split_min split: prod.splits)
+ (auto simp: completes complete_split_min height_del height_split_min split: prod.splits)
-corollary bal_delete: "bal t \<Longrightarrow> bal(delete x t)"
-by(simp add: delete_def bal_tree\<^sub>d_del)
+corollary complete_delete: "complete t \<Longrightarrow> complete(delete x t)"
+by(simp add: delete_def complete_treeD_del)
subsection \<open>Overall Correctness\<close>
interpretation S: Set_by_Ordered
where empty = empty and isin = isin and insert = insert and delete = delete
-and inorder = inorder and inv = bal
+and inorder = inorder and inv = complete
proof (standard, goal_cases)
case 2 thus ?case by(simp add: isin_set)
next
@@ -387,9 +387,9 @@
next
case 4 thus ?case by(simp add: inorder_delete)
next
- case 6 thus ?case by(simp add: bal_insert)
+ case 6 thus ?case by(simp add: complete_insert)
next
- case 7 thus ?case by(simp add: bal_delete)
+ case 7 thus ?case by(simp add: complete_delete)
qed (simp add: empty_def)+
end
--- a/src/HOL/Data_Structures/Trie_Fun.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Data_Structures/Trie_Fun.thy Sun Jun 09 22:22:36 2019 +0200
@@ -10,6 +10,9 @@
datatype 'a trie = Nd bool "'a \<Rightarrow> 'a trie option"
+definition empty :: "'a trie" where
+[simp]: "empty = Nd False (\<lambda>_. None)"
+
fun isin :: "'a trie \<Rightarrow> 'a list \<Rightarrow> bool" where
"isin (Nd b m) [] = b" |
"isin (Nd b m) (k # xs) = (case m k of None \<Rightarrow> False | Some t \<Rightarrow> isin t xs)"
@@ -17,7 +20,7 @@
fun insert :: "('a::linorder) list \<Rightarrow> 'a trie \<Rightarrow> 'a trie" where
"insert [] (Nd b m) = Nd True m" |
"insert (x#xs) (Nd b m) =
- Nd b (m(x := Some(insert xs (case m x of None \<Rightarrow> Nd False (\<lambda>_. None) | Some t \<Rightarrow> t))))"
+ Nd b (m(x := Some(insert xs (case m x of None \<Rightarrow> empty | Some t \<Rightarrow> t))))"
fun delete :: "('a::linorder) list \<Rightarrow> 'a trie \<Rightarrow> 'a trie" where
"delete [] (Nd b m) = Nd False m" |
@@ -73,7 +76,7 @@
qed
interpretation S: Set
-where empty = "Nd False (\<lambda>_. None)" and isin = isin and insert = insert and delete = delete
+where empty = empty and isin = isin and insert = insert and delete = delete
and set = set and invar = "\<lambda>_. True"
proof (standard, goal_cases)
case 1 show ?case by (simp)
--- a/src/HOL/Data_Structures/Trie_Map.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Data_Structures/Trie_Map.thy Sun Jun 09 22:22:36 2019 +0200
@@ -29,6 +29,10 @@
apply(auto split: if_splits)
done
+
+definition empty :: "'a trie_map" where
+[simp]: "empty = Nd False Leaf"
+
fun isin :: "('a::linorder) trie_map \<Rightarrow> 'a list \<Rightarrow> bool" where
"isin (Nd b m) [] = b" |
"isin (Nd b m) (x # xs) = (case lookup m x of None \<Rightarrow> False | Some t \<Rightarrow> isin t xs)"
@@ -36,7 +40,7 @@
fun insert :: "('a::linorder) list \<Rightarrow> 'a trie_map \<Rightarrow> 'a trie_map" where
"insert [] (Nd b m) = Nd True m" |
"insert (x#xs) (Nd b m) =
- Nd b (update x (insert xs (case lookup m x of None \<Rightarrow> Nd False Leaf | Some t \<Rightarrow> t)) m)"
+ Nd b (update x (insert xs (case lookup m x of None \<Rightarrow> empty | Some t \<Rightarrow> t)) m)"
fun delete :: "('a::linorder) list \<Rightarrow> 'a trie_map \<Rightarrow> 'a trie_map" where
"delete [] (Nd b m) = Nd False m" |
@@ -84,7 +88,7 @@
text \<open>Overall correctness w.r.t. the \<open>Set\<close> ADT:\<close>
interpretation S2: Set
-where empty = "Nd False Leaf" and isin = isin and insert = insert and delete = delete
+where empty = empty and isin = isin and insert = insert and delete = delete
and set = "set o abs" and invar = invar
proof (standard, goal_cases)
case 1 show ?case by (simp)
--- a/src/HOL/Data_Structures/Tries_Binary.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Data_Structures/Tries_Binary.thy Sun Jun 09 22:22:36 2019 +0200
@@ -21,6 +21,9 @@
datatype trie = Lf | Nd bool "trie * trie"
+definition empty :: trie where
+[simp]: "empty = Lf"
+
fun isin :: "trie \<Rightarrow> bool list \<Rightarrow> bool" where
"isin Lf ks = False" |
"isin (Nd b lr) ks =
@@ -34,8 +37,8 @@
"insert (k#ks) Lf = Nd False (mod2 (insert ks) k (Lf,Lf))" |
"insert (k#ks) (Nd b lr) = Nd b (mod2 (insert ks) k lr)"
-lemma isin_insert: "isin (insert as t) bs = (as = bs \<or> isin t bs)"
-apply(induction as t arbitrary: bs rule: insert.induct)
+lemma isin_insert: "isin (insert xs t) ys = (xs = ys \<or> isin t ys)"
+apply(induction xs t arbitrary: ys rule: insert.induct)
apply (auto split: list.splits if_splits)
done
@@ -65,8 +68,8 @@
[] \<Rightarrow> node False lr |
k#ks' \<Rightarrow> node b (mod2 (delete ks') k lr))"
-lemma isin_delete: "isin (delete as t) bs = (as \<noteq> bs \<and> isin t bs)"
-apply(induction as t arbitrary: bs rule: delete.induct)
+lemma isin_delete: "isin (delete xs t) ys = (xs \<noteq> ys \<and> isin t ys)"
+apply(induction xs t arbitrary: ys rule: delete.induct)
apply simp
apply (auto split: list.splits if_splits)
apply (metis isin.simps(1))
@@ -76,28 +79,37 @@
definition set_trie :: "trie \<Rightarrow> bool list set" where
"set_trie t = {xs. isin t xs}"
+lemma set_trie_empty: "set_trie empty = {}"
+by(simp add: set_trie_def)
+
+lemma set_trie_isin: "isin t xs = (xs \<in> set_trie t)"
+by(simp add: set_trie_def)
+
lemma set_trie_insert: "set_trie(insert xs t) = set_trie t \<union> {xs}"
by(auto simp add: isin_insert set_trie_def)
+lemma set_trie_delete: "set_trie(delete xs t) = set_trie t - {xs}"
+by(auto simp add: isin_delete set_trie_def)
+
interpretation S: Set
-where empty = Lf and isin = isin and insert = insert and delete = delete
+where empty = empty and isin = isin and insert = insert and delete = delete
and set = set_trie and invar = "\<lambda>t. True"
proof (standard, goal_cases)
- case 1 show ?case by (simp add: set_trie_def)
+ case 1 show ?case by (rule set_trie_empty)
next
- case 2 thus ?case by(simp add: set_trie_def)
+ case 2 show ?case by(rule set_trie_isin)
next
case 3 thus ?case by(auto simp: set_trie_insert)
next
- case 4 thus ?case by(auto simp: isin_delete set_trie_def)
+ case 4 show ?case by(rule set_trie_delete)
qed (rule TrueI)+
subsection "Patricia Trie"
-datatype ptrie = LfP | NdP "bool list" bool "ptrie * ptrie"
+datatype trieP = LfP | NdP "bool list" bool "trieP * trieP"
-fun isinP :: "ptrie \<Rightarrow> bool list \<Rightarrow> bool" where
+fun isinP :: "trieP \<Rightarrow> bool list \<Rightarrow> bool" where
"isinP LfP ks = False" |
"isinP (NdP ps b lr) ks =
(let n = length ps in
@@ -105,6 +117,9 @@
then case drop n ks of [] \<Rightarrow> b | k#ks' \<Rightarrow> isinP (sel2 k lr) ks'
else False)"
+definition emptyP :: trieP where
+[simp]: "emptyP = LfP"
+
fun split where
"split [] ys = ([],[],ys)" |
"split xs [] = ([],xs,[])" |
@@ -118,7 +133,8 @@
\<Longrightarrow> mod2 f k lr= mod2 f' k' lr'"
by(cases lr, cases lr', auto)
-fun insertP :: "bool list \<Rightarrow> ptrie \<Rightarrow> ptrie" where
+
+fun insertP :: "bool list \<Rightarrow> trieP \<Rightarrow> trieP" where
"insertP ks LfP = NdP ks True (LfP,LfP)" |
"insertP ks (NdP ps b lr) =
(case split ks ps of
@@ -133,10 +149,10 @@
(qs,[],[]) \<Rightarrow> NdP ps True lr)"
-fun nodeP :: "bool list \<Rightarrow> bool \<Rightarrow> ptrie * ptrie \<Rightarrow> ptrie" where
+fun nodeP :: "bool list \<Rightarrow> bool \<Rightarrow> trieP * trieP \<Rightarrow> trieP" where
"nodeP ps b lr = (if \<not> b \<and> lr = (LfP,LfP) then LfP else NdP ps b lr)"
-fun deleteP :: "bool list \<Rightarrow> ptrie \<Rightarrow> ptrie" where
+fun deleteP :: "bool list \<Rightarrow> trieP \<Rightarrow> trieP" where
"deleteP ks LfP = LfP" |
"deleteP ks (NdP ps b lr) =
(case split ks ps of
@@ -147,16 +163,16 @@
subsubsection \<open>Functional Correctness\<close>
-text \<open>First step: @{typ ptrie} implements @{typ trie} via the abstraction function \<open>abs_ptrie\<close>:\<close>
+text \<open>First step: @{typ trieP} implements @{typ trie} via the abstraction function \<open>abs_trieP\<close>:\<close>
fun prefix_trie :: "bool list \<Rightarrow> trie \<Rightarrow> trie" where
"prefix_trie [] t = t" |
"prefix_trie (k#ks) t =
(let t' = prefix_trie ks t in Nd False (if k then (Lf,t') else (t',Lf)))"
-fun abs_ptrie :: "ptrie \<Rightarrow> trie" where
-"abs_ptrie LfP = Lf" |
-"abs_ptrie (NdP ps b (l,r)) = prefix_trie ps (Nd b (abs_ptrie l, abs_ptrie r))"
+fun abs_trieP :: "trieP \<Rightarrow> trie" where
+"abs_trieP LfP = Lf" |
+"abs_trieP (NdP ps b (l,r)) = prefix_trie ps (Nd b (abs_trieP l, abs_trieP r))"
text \<open>Correctness of @{const isinP}:\<close>
@@ -168,9 +184,9 @@
apply(auto split: list.split)
done
-lemma isinP:
- "isinP t ks = isin (abs_ptrie t) ks"
-apply(induction t arbitrary: ks rule: abs_ptrie.induct)
+lemma abs_trieP_isinP:
+ "isinP t ks = isin (abs_trieP t) ks"
+apply(induction t arbitrary: ks rule: abs_trieP.induct)
apply(auto simp: isin_prefix_trie split: list.split)
done
@@ -204,8 +220,8 @@
apply(auto split: prod.splits if_splits)
done
-lemma abs_ptrie_insertP:
- "abs_ptrie (insertP ks t) = insert ks (abs_ptrie t)"
+lemma abs_trieP_insertP:
+ "abs_trieP (insertP ks t) = insert ks (abs_trieP t)"
apply(induction t arbitrary: ks)
apply(auto simp: prefix_trie_Lfs insert_prefix_trie_same insert_append prefix_trie_append
dest!: split_if split: list.split prod.split if_splits)
@@ -217,7 +233,7 @@
lemma prefix_trie_Lf: "prefix_trie xs t = Lf \<longleftrightarrow> xs = [] \<and> t = Lf"
by(cases xs)(auto)
-lemma abs_ptrie_Lf: "abs_ptrie t = Lf \<longleftrightarrow> t = LfP"
+lemma abs_trieP_Lf: "abs_trieP t = Lf \<longleftrightarrow> t = LfP"
by(cases t) (auto simp: prefix_trie_Lf)
lemma delete_prefix_trie:
@@ -230,35 +246,40 @@
= (if delete ys t = Lf then Lf else prefix_trie xs (delete ys t))"
by(induction xs)(auto simp: prefix_trie_Lf)
-lemma delete_abs_ptrie:
- "delete ks (abs_ptrie t) = abs_ptrie (deleteP ks t)"
+lemma delete_abs_trieP:
+ "delete ks (abs_trieP t) = abs_trieP (deleteP ks t)"
apply(induction t arbitrary: ks)
apply(auto simp: delete_prefix_trie delete_append_prefix_trie
- prefix_trie_append prefix_trie_Lf abs_ptrie_Lf
+ prefix_trie_append prefix_trie_Lf abs_trieP_Lf
dest!: split_if split: if_splits list.split prod.split)
done
text \<open>The overall correctness proof. Simply composes correctness lemmas.\<close>
-definition set_ptrie :: "ptrie \<Rightarrow> bool list set" where
-"set_ptrie = set_trie o abs_ptrie"
+definition set_trieP :: "trieP \<Rightarrow> bool list set" where
+"set_trieP = set_trie o abs_trieP"
+
+lemma isinP_set_trieP: "isinP t xs = (xs \<in> set_trieP t)"
+by(simp add: abs_trieP_isinP set_trie_isin set_trieP_def)
-lemma set_ptrie_insertP: "set_ptrie (insertP xs t) = set_ptrie t \<union> {xs}"
-by(simp add: abs_ptrie_insertP set_trie_insert set_ptrie_def)
+lemma set_trieP_insertP: "set_trieP (insertP xs t) = set_trieP t \<union> {xs}"
+by(simp add: abs_trieP_insertP set_trie_insert set_trieP_def)
+
+lemma set_trieP_deleteP: "set_trieP (deleteP xs t) = set_trieP t - {xs}"
+by(auto simp: set_trie_delete set_trieP_def simp flip: delete_abs_trieP)
interpretation SP: Set
-where empty = LfP and isin = isinP and insert = insertP and delete = deleteP
-and set = set_ptrie and invar = "\<lambda>t. True"
+where empty = emptyP and isin = isinP and insert = insertP and delete = deleteP
+and set = set_trieP and invar = "\<lambda>t. True"
proof (standard, goal_cases)
- case 1 show ?case by (simp add: set_ptrie_def set_trie_def)
+ case 1 show ?case by (simp add: set_trieP_def set_trie_def)
next
- case 2 thus ?case by(simp add: isinP set_ptrie_def set_trie_def)
+ case 2 show ?case by(rule isinP_set_trieP)
next
- case 3 thus ?case by (auto simp: set_ptrie_insertP)
+ case 3 thus ?case by (auto simp: set_trieP_insertP)
next
- case 4 thus ?case
- by(auto simp: isin_delete set_ptrie_def set_trie_def simp flip: delete_abs_ptrie)
+ case 4 thus ?case by(auto simp: set_trieP_deleteP)
qed (rule TrueI)+
end
--- a/src/HOL/Decision_Procs/approximation.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Decision_Procs/approximation.ML Sun Jun 09 22:22:36 2019 +0200
@@ -271,7 +271,7 @@
val t = Syntax.read_term ctxt raw_t;
val t' = approx 30 ctxt t;
val ty' = Term.type_of t';
- val ctxt' = Variable.auto_fixes t' ctxt;
+ val ctxt' = Proof_Context.augment t' ctxt;
in
Print_Mode.with_modes modes (fn () =>
Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t'), Pretty.fbrk,
--- a/src/HOL/HOL.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/HOL.thy Sun Jun 09 22:22:36 2019 +0200
@@ -1251,7 +1251,7 @@
else
let (*Norbert Schirmer's case*)
val t = Thm.term_of ct;
- val ([t'], ctxt') = Variable.import_terms false [t] ctxt;
+ val (t', ctxt') = yield_singleton (Variable.import_terms false) t ctxt;
in
Option.map (hd o Variable.export ctxt' ctxt o single)
(case t' of Const (\<^const_name>\<open>Let\<close>,_) $ x $ f => (* x and f are already in normal form *)
--- a/src/HOL/Import/import_rule.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Import/import_rule.ML Sun Jun 09 22:22:36 2019 +0200
@@ -373,7 +373,7 @@
gettm t tstate ||>> (fn ti => getth th ti) |>> (fn (tm, th) => abs tm th) |-> setth
| process (thy, state) (#"M", [s]) =
let
- val ctxt = Variable.set_body false (Proof_Context.init_global thy)
+ val ctxt = Proof_Context.init_global thy
val thm = freezeT thy (Global_Theory.get_thm thy s)
val ((_, [th']), _) = Variable.import true [thm] ctxt
in
--- a/src/HOL/Library/Cancellation/cancel.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Library/Cancellation/cancel.ML Sun Jun 09 22:22:36 2019 +0200
@@ -47,7 +47,7 @@
fun proc ctxt ct =
let
val t = Thm.term_of ct
- val ([t'], ctxt') = Variable.import_terms true [t] ctxt
+ val (t', ctxt') = yield_singleton (Variable.import_terms true) t ctxt
val pre_simplified_ct = Simplifier.full_rewrite (clear_simpset ctxt addsimps
Named_Theorems.get ctxt \<^named_theorems>\<open>cancelation_simproc_pre\<close> addsimprocs
[\<^simproc>\<open>NO_MATCH\<close>]) (Thm.cterm_of ctxt t');
--- a/src/HOL/Library/Extended_Nonnegative_Real.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Library/Extended_Nonnegative_Real.thy Sun Jun 09 22:22:36 2019 +0200
@@ -956,6 +956,15 @@
lemma ennreal_numeral[simp]: "ennreal (numeral n) = numeral n"
using ennreal_of_nat_eq_real_of_nat[of "numeral n"] by simp
+lemma ennreal_less_numeral_iff [simp]: "ennreal n < numeral w \<longleftrightarrow> n < numeral w"
+ by (metis ennreal_less_iff ennreal_numeral less_le not_less zero_less_numeral)
+
+lemma numeral_less_ennreal_iff [simp]: "numeral w < ennreal n \<longleftrightarrow> numeral w < n"
+ using ennreal_less_iff zero_le_numeral by fastforce
+
+lemma numeral_le_ennreal_iff [simp]: "numeral n \<le> ennreal m \<longleftrightarrow> numeral n \<le> m"
+ by (metis not_le ennreal_less_numeral_iff)
+
lemma min_ennreal: "0 \<le> x \<Longrightarrow> 0 \<le> y \<Longrightarrow> min (ennreal x) (ennreal y) = ennreal (min x y)"
by (auto split: split_min)
--- a/src/HOL/Library/Finite_Map.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Library/Finite_Map.thy Sun Jun 09 22:22:36 2019 +0200
@@ -254,6 +254,15 @@
lemma fmdom_fmupd[simp]: "fmdom (fmupd a b m) = finsert a (fmdom m)" by transfer (simp add: map_upd_def)
lemma fmdom'_fmupd[simp]: "fmdom' (fmupd a b m) = insert a (fmdom' m)" by transfer (simp add: map_upd_def)
+lemma fmupd_reorder_neq:
+ assumes "a \<noteq> b"
+ shows "fmupd a x (fmupd b y m) = fmupd b y (fmupd a x m)"
+using assms
+by transfer' (auto simp: map_upd_def)
+
+lemma fmupd_idem[simp]: "fmupd a x (fmupd a y m) = fmupd a x m"
+by transfer' (auto simp: map_upd_def)
+
lift_definition fmfilter :: "('a \<Rightarrow> bool) \<Rightarrow> ('a, 'b) fmap \<Rightarrow> ('a, 'b) fmap"
is map_filter
parametric map_filter_transfer
@@ -357,6 +366,18 @@
lemma fmdom'_restrict_set: "fmdom' (fmrestrict_set A m) \<subseteq> A" unfolding fmfilter_alt_defs by auto
lemma fmdom_restrict_fset: "fmdom (fmrestrict_fset A m) |\<subseteq>| A" unfolding fmfilter_alt_defs by auto
+lemma fmdrop_fmupd: "fmdrop x (fmupd y z m) = (if x = y then fmdrop x m else fmupd y z (fmdrop x m))"
+by transfer' (auto simp: map_drop_def map_filter_def map_upd_def)
+
+lemma fmdrop_idle: "x |\<notin>| fmdom B \<Longrightarrow> fmdrop x B = B"
+by transfer' (auto simp: map_drop_def map_filter_def)
+
+lemma fmdrop_idle': "x \<notin> fmdom' B \<Longrightarrow> fmdrop x B = B"
+by transfer' (auto simp: map_drop_def map_filter_def)
+
+lemma fmdrop_fmupd_same: "fmdrop x (fmupd x y m) = fmdrop x m"
+by transfer' (auto simp: map_drop_def map_filter_def map_upd_def)
+
lemma fmdom'_restrict_set_precise: "fmdom' (fmrestrict_set A m) = fmdom' m \<inter> A"
unfolding fmfilter_alt_defs by auto
@@ -404,6 +425,12 @@
lemma fmdrop_fset_empty[simp]: "fmdrop_fset A fmempty = fmempty"
unfolding fmfilter_alt_defs by simp
+lemma fmdrop_fset_fmdom[simp]: "fmdrop_fset (fmdom A) A = fmempty"
+by transfer' (auto simp: map_drop_set_def map_filter_def)
+
+lemma fmdrop_set_fmdom[simp]: "fmdrop_set (fmdom' A) A = fmempty"
+by transfer' (auto simp: map_drop_set_def map_filter_def)
+
lemma fmrestrict_set_empty[simp]: "fmrestrict_set A fmempty = fmempty"
unfolding fmfilter_alt_defs by simp
@@ -1028,6 +1055,8 @@
including fset.lifting
by transfer' (auto simp: set_of_map_def)
+lemma fmmap_fmupd: "fmmap f (fmupd x y m) = fmupd x (f y) (fmmap f m)"
+by transfer' (auto simp: fun_eq_iff map_upd_def)
subsection \<open>\<^const>\<open>size\<close> setup\<close>
--- a/src/HOL/Library/old_recdef.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Library/old_recdef.ML Sun Jun 09 22:22:36 2019 +0200
@@ -1591,7 +1591,7 @@
fun prove ctxt strict (t, tac) =
let
- val ctxt' = Variable.auto_fixes t ctxt;
+ val ctxt' = Proof_Context.augment t ctxt;
in
if strict
then Goal.prove ctxt' [] [] t (K tac)
--- a/src/HOL/List.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/List.thy Sun Jun 09 22:22:36 2019 +0200
@@ -791,7 +791,7 @@
by (fact measure_induct)
lemma induct_list012:
- "\<lbrakk>P []; \<And>x. P [x]; \<And>x y zs. P (y # zs) \<Longrightarrow> P (x # y # zs)\<rbrakk> \<Longrightarrow> P xs"
+ "\<lbrakk>P []; \<And>x. P [x]; \<And>x y zs. \<lbrakk> P zs; P (y # zs) \<rbrakk> \<Longrightarrow> P (x # y # zs)\<rbrakk> \<Longrightarrow> P xs"
by induction_schema (pat_completeness, lexicographic_order)
lemma list_nonempty_induct [consumes 1, case_names single cons]:
@@ -5486,26 +5486,7 @@
text \<open>Stability of \<^const>\<open>sort_key\<close>:\<close>
lemma sort_key_stable: "filter (\<lambda>y. f y = k) (sort_key f xs) = filter (\<lambda>y. f y = k) xs"
-proof (induction xs)
- case Nil thus ?case by simp
-next
- case (Cons a xs)
- thus ?case
- proof (cases "f a = k")
- case False thus ?thesis
- using Cons.IH by (metis (mono_tags) filter.simps(2) filter_sort)
- next
- case True
- hence ler: "filter (\<lambda>y. f y = k) (a # xs) = a # filter (\<lambda>y. f y = f a) xs" by simp
- have "\<forall>y \<in> set (sort_key f (filter (\<lambda>y. f y = f a) xs)). f y = f a" by simp
- hence "insort_key f a (sort_key f (filter (\<lambda>y. f y = f a) xs))
- = a # (sort_key f (filter (\<lambda>y. f y = f a) xs))"
- by (simp add: insort_is_Cons)
- hence lel: "filter (\<lambda>y. f y = k) (sort_key f (a # xs)) = a # filter (\<lambda>y. f y = f a) (sort_key f xs)"
- by (metis True filter_sort ler sort_key_simps(2))
- from lel ler show ?thesis using Cons.IH True by (auto)
- qed
-qed
+by (induction xs) (auto simp: filter_insort insort_is_Cons filter_insort_triv)
corollary stable_sort_key_sort_key: "stable_sort_key sort_key"
by(simp add: stable_sort_key_def sort_key_stable)
--- a/src/HOL/Nominal/nominal_inductive.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Nominal/nominal_inductive.ML Sun Jun 09 22:22:36 2019 +0200
@@ -162,7 +162,8 @@
commas_quote xs));
val induct_cases = map (fst o fst) (fst (Rule_Cases.get (the
(Induct.lookup_inductP ctxt (hd names)))));
- val ([raw_induct'], ctxt') = Variable.import_terms false [Thm.prop_of raw_induct] ctxt;
+ val (raw_induct', ctxt') = ctxt
+ |> yield_singleton (Variable.import_terms false) (Thm.prop_of raw_induct);
val concls = raw_induct' |> Logic.strip_imp_concl |> HOLogic.dest_Trueprop |>
HOLogic.dest_conj |> map (HOLogic.dest_imp ##> strip_comb);
val ps = map (fst o snd) concls;
--- a/src/HOL/Nominal/nominal_inductive2.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Nominal/nominal_inductive2.ML Sun Jun 09 22:22:36 2019 +0200
@@ -169,7 +169,8 @@
(Induct.lookup_inductP ctxt (hd names)))));
val induct_cases' = if null induct_cases then replicate (length intrs) ""
else induct_cases;
- val ([raw_induct'], ctxt') = Variable.import_terms false [Thm.prop_of raw_induct] ctxt;
+ val (raw_induct', ctxt') = ctxt
+ |> yield_singleton (Variable.import_terms false) (Thm.prop_of raw_induct);
val concls = raw_induct' |> Logic.strip_imp_concl |> HOLogic.dest_Trueprop |>
HOLogic.dest_conj |> map (HOLogic.dest_imp ##> strip_comb);
val ps = map (fst o snd) concls;
--- a/src/HOL/Num.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Num.thy Sun Jun 09 22:22:36 2019 +0200
@@ -606,11 +606,9 @@
end
-subsubsection \<open>Comparisons: class \<open>linordered_semidom\<close>\<close>
+subsubsection \<open>Comparisons: class \<open>linordered_nonzero_semiring\<close>\<close>
-text \<open>Could be perhaps more general than here.\<close>
-
-context linordered_semidom
+context linordered_nonzero_semiring
begin
lemma numeral_le_iff: "numeral m \<le> numeral n \<longleftrightarrow> m \<le> n"
@@ -621,10 +619,10 @@
qed
lemma one_le_numeral: "1 \<le> numeral n"
- using numeral_le_iff [of One n] by (simp add: numeral_One)
+ using numeral_le_iff [of num.One n] by (simp add: numeral_One)
-lemma numeral_le_one_iff: "numeral n \<le> 1 \<longleftrightarrow> n \<le> One"
- using numeral_le_iff [of n One] by (simp add: numeral_One)
+lemma numeral_le_one_iff: "numeral n \<le> 1 \<longleftrightarrow> n \<le> num.One"
+ using numeral_le_iff [of n num.One] by (simp add: numeral_One)
lemma numeral_less_iff: "numeral m < numeral n \<longleftrightarrow> m < n"
proof -
@@ -634,16 +632,16 @@
qed
lemma not_numeral_less_one: "\<not> numeral n < 1"
- using numeral_less_iff [of n One] by (simp add: numeral_One)
+ using numeral_less_iff [of n num.One] by (simp add: numeral_One)
-lemma one_less_numeral_iff: "1 < numeral n \<longleftrightarrow> One < n"
- using numeral_less_iff [of One n] by (simp add: numeral_One)
+lemma one_less_numeral_iff: "1 < numeral n \<longleftrightarrow> num.One < n"
+ using numeral_less_iff [of num.One n] by (simp add: numeral_One)
lemma zero_le_numeral: "0 \<le> numeral n"
- by (induct n) (simp_all add: numeral.simps)
+ using dual_order.trans one_le_numeral zero_le_one by blast
lemma zero_less_numeral: "0 < numeral n"
- by (induct n) (simp_all add: numeral.simps add_pos_pos)
+ using less_linear not_numeral_less_one order.strict_trans zero_less_one by blast
lemma not_numeral_le_zero: "\<not> numeral n \<le> 0"
by (simp add: not_le zero_less_numeral)
--- a/src/HOL/Real.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Real.thy Sun Jun 09 22:22:36 2019 +0200
@@ -1261,7 +1261,7 @@
@{thm of_nat_mult}, @{thm of_int_0}, @{thm of_int_1},
@{thm of_int_add}, @{thm of_int_minus}, @{thm of_int_diff},
@{thm of_int_mult}, @{thm of_int_of_nat_eq},
- @{thm of_nat_numeral}, @{thm of_nat_numeral}, @{thm of_int_neg_numeral}]
+ @{thm of_nat_numeral}, @{thm of_int_numeral}, @{thm of_int_neg_numeral}]
#> Lin_Arith.add_inj_const (\<^const_name>\<open>of_nat\<close>, \<^typ>\<open>nat \<Rightarrow> real\<close>)
#> Lin_Arith.add_inj_const (\<^const_name>\<open>of_int\<close>, \<^typ>\<open>int \<Rightarrow> real\<close>))
\<close>
--- a/src/HOL/Real_Asymp/real_asymp_diag.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Real_Asymp/real_asymp_diag.ML Sun Jun 09 22:22:36 2019 +0200
@@ -122,7 +122,7 @@
let
val t = prep_term ctxt t
val flt = prep_flt ctxt flt
- val ctxt = Variable.auto_fixes t ctxt
+ val ctxt = Proof_Context.augment t ctxt
val t = reduce_to_at_top flt t
val facts = prep_facts ctxt facts
val ectxt = mk_eval_ctxt ctxt |> add_facts facts |> set_verbose true
@@ -165,7 +165,7 @@
let
val t = prep_term ctxt t
val flt = prep_flt ctxt flt
- val ctxt = Variable.auto_fixes t ctxt
+ val ctxt = Proof_Context.augment t ctxt
val t = reduce_to_at_top flt t
val facts = prep_facts ctxt facts
val ectxt = mk_eval_ctxt ctxt |> add_facts facts |> set_verbose true
--- a/src/HOL/Tools/ATP/atp_problem_generate.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/ATP/atp_problem_generate.ML Sun Jun 09 22:22:36 2019 +0200
@@ -737,7 +737,7 @@
| _ =>
if not (exists_subterm (fn Abs _ => true | _ => false) t) then t
else t |> Envir.eta_contract |> do_lambdas ctxt Ts)
- val (t, ctxt') = Variable.import_terms true [t] ctxt |>> the_single
+ val (t, ctxt') = yield_singleton (Variable.import_terms true) t ctxt
in t |> trans [] |> singleton (Variable.export_terms ctxt' ctxt) end
fun do_cheaply_conceal_lambdas Ts (t1 $ t2) =
--- a/src/HOL/Tools/Function/fun_cases.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Function/fun_cases.ML Sun Jun 09 22:22:36 2019 +0200
@@ -33,7 +33,7 @@
fun mk_elim rl =
Thm.implies_intr cprop
(Tactic.rule_by_tactic ctxt (Inductive.mk_cases_tac ctxt) (Thm.assume cprop RS rl))
- |> singleton (Variable.export (Variable.auto_fixes prop ctxt) ctxt);
+ |> singleton (Proof_Context.export (Proof_Context.augment prop ctxt) ctxt);
in
(case get_first (try mk_elim) (flat elims) of
SOME r => r
--- a/src/HOL/Tools/Function/function_common.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Function/function_common.ML Sun Jun 09 22:22:36 2019 +0200
@@ -114,7 +114,7 @@
inducts = Option.map fact inducts, termination = thm termination,
totality = Option.map thm totality, defname = Morphism.binding phi defname,
is_partial = is_partial, cases = fact cases,
- elims = Option.map (map fact) elims, pelims = map fact pelims }
+ pelims = map fact pelims, elims = Option.map (map fact) elims }
end
@@ -320,7 +320,7 @@
(case Item_Net.content (get_functions ctxt) of
[] => NONE
| (t, _) :: _ =>
- let val ([t'], ctxt') = Variable.import_terms true [t] ctxt
+ let val (t', ctxt') = yield_singleton (Variable.import_terms true) t ctxt
in import_function_data t' ctxt' end)
--- a/src/HOL/Tools/Function/partial_function.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Function/partial_function.ML Sun Jun 09 22:22:36 2019 +0200
@@ -164,7 +164,7 @@
(* instantiate generic fixpoint induction and eliminate the canonical assumptions;
curry induction predicate *)
-fun specialize_fixp_induct ctxt args fT fT_uc F curry uncurry mono_thm f_def rule =
+fun specialize_fixp_induct ctxt fT fT_uc curry uncurry mono_thm f_def rule =
let
val ([P], ctxt') = Variable.variant_fixes ["P"] ctxt
val P_inst = Abs ("f", fT_uc, Free (P, fT --> HOLogic.boolT) $ (curry $ Bound 0))
@@ -186,14 +186,14 @@
fun mk_curried_induct args ctxt inst_rule =
let
val cert = Thm.cterm_of ctxt
+ (* FIXME ctxt vs. ctxt' (!?) *)
val ([P], ctxt') = Variable.variant_fixes ["P"] ctxt
val split_paired_all_conv =
Conv.every_conv (replicate (length args - 1) (Conv.rewr_conv @{thm split_paired_all}))
val split_params_conv =
- Conv.params_conv ~1 (fn ctxt' =>
- Conv.implies_conv split_paired_all_conv Conv.all_conv)
+ Conv.params_conv ~1 (fn _ => Conv.implies_conv split_paired_all_conv Conv.all_conv)
val (P_var, x_var) =
Thm.prop_of inst_rule |> Logic.strip_imp_concl |> HOLogic.dest_Trueprop
@@ -280,7 +280,7 @@
|> Tactic.rule_by_tactic lthy' (Simplifier.simp_tac (put_simpset curry_uncurry_ss lthy') 1);
val specialized_fixp_induct =
- specialize_fixp_induct lthy' args fT fT_uc F curry uncurry inst_mono_thm f_def fixp_induct
+ specialize_fixp_induct lthy' fT fT_uc curry uncurry inst_mono_thm f_def fixp_induct
|> Drule.rename_bvars' (map SOME (f_bname :: f_bname :: argnames));
val mk_raw_induct =
--- a/src/HOL/Tools/Lifting/lifting_bnf.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Lifting/lifting_bnf.ML Sun Jun 09 22:22:36 2019 +0200
@@ -48,14 +48,13 @@
fun prove_Quotient_map bnf ctxt =
let
val live = live_of_bnf bnf
- val old_ctxt = ctxt
- val (((As, Bs), Ds), ctxt) = ctxt
+ val (((As, Bs), Ds), ctxt') = ctxt
|> mk_TFrees live
||>> mk_TFrees live
||>> mk_TFrees (dead_of_bnf bnf)
val argTss = map2 (fn a => fn b => [mk_pred2T a a, a --> b, b --> a,mk_pred2T a b]) As Bs
- val ((argss, argss'), ctxt) =
- @{fold_map 2} mk_Frees ["R", "Abs", "Rep", "T"] (transpose argTss) ctxt
+ val ((argss, argss'), ctxt'') = ctxt'
+ |> @{fold_map 2} mk_Frees ["R", "Abs", "Rep", "T"] (transpose argTss)
|>> `transpose
val assms = map (mk_Quotient #> HOLogic.mk_Trueprop) argss
@@ -65,11 +64,12 @@
val T_rel = list_comb (mk_rel_of_bnf Ds As Bs bnf, nth argss' 3)
val concl = mk_Quotient [R_rel, Abs_map, Rep_map, T_rel] |> HOLogic.mk_Trueprop
val goal = Logic.list_implies (assms, concl)
- val thm = Goal.prove_sorry ctxt [] [] goal
- (fn {context = ctxt, prems = _} => Quotient_tac bnf ctxt 1)
- |> Thm.close_derivation
in
- Drule.zero_var_indexes (singleton (Variable.export ctxt old_ctxt) thm)
+ Goal.prove_sorry ctxt'' [] [] goal
+ (fn {context = goal_ctxt, ...} => Quotient_tac bnf goal_ctxt 1)
+ |> Thm.close_derivation
+ |> singleton (Variable.export ctxt'' ctxt)
+ |> Drule.zero_var_indexes
end
--- a/src/HOL/Tools/Lifting/lifting_def.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Lifting/lifting_def.ML Sun Jun 09 22:22:36 2019 +0200
@@ -168,7 +168,7 @@
in
case mono_eq_prover ctxt prop of
SOME thm => SOME (thm RS rule)
- | NONE => NONE
+ | NONE => NONE
end
(*
@@ -181,7 +181,7 @@
using Lifting_Term.merge_transfer_relations
*)
-fun generate_parametric_transfer_rule ctxt transfer_rule parametric_transfer_rule =
+fun generate_parametric_transfer_rule ctxt0 transfer_rule parametric_transfer_rule =
let
fun preprocess ctxt thm =
let
@@ -235,21 +235,21 @@
end
val thm =
- inst_relcomppI ctxt parametric_transfer_rule transfer_rule
+ inst_relcomppI ctxt0 parametric_transfer_rule transfer_rule
OF [parametric_transfer_rule, transfer_rule]
- val preprocessed_thm = preprocess ctxt thm
- val orig_ctxt = ctxt
- val (fixed_thm, ctxt) = yield_singleton (apfst snd oo Variable.import true) preprocessed_thm ctxt
+ val preprocessed_thm = preprocess ctxt0 thm
+ val (fixed_thm, ctxt1) = ctxt0
+ |> yield_singleton (apfst snd oo Variable.import true) preprocessed_thm
val assms = cprems_of fixed_thm
val add_transfer_rule = Thm.attribute_declaration Transfer.transfer_add
- val (prems, ctxt) = fold_map Thm.assume_hyps assms ctxt
- val ctxt = Context.proof_map (fold add_transfer_rule prems) ctxt
+ val (prems, ctxt2) = ctxt1 |> fold_map Thm.assume_hyps assms
+ val ctxt3 = ctxt2 |> Context.proof_map (fold add_transfer_rule prems)
val zipped_thm =
fixed_thm
|> undisch_all
- |> zip_transfer_rules ctxt
+ |> zip_transfer_rules ctxt3
|> implies_intr_list assms
- |> singleton (Variable.export ctxt orig_ctxt)
+ |> singleton (Variable.export ctxt3 ctxt0)
in
zipped_thm
end
@@ -539,19 +539,17 @@
in
if no_no_code lthy (rty, qty) then
let
- val lthy = (snd oo Local_Theory.note)
- ((Binding.empty, [register_code_eq_attrib]), [encoded_code_eq]) lthy
- val opt_code_eq = Data.get (Context.Theory (Proof_Context.theory_of lthy))
+ val lthy' = lthy
+ |> (#2 oo Local_Theory.note) ((Binding.empty, [register_code_eq_attrib]), [encoded_code_eq])
+ val opt_code_eq = Data.get (Context.Theory (Proof_Context.theory_of lthy'))
val code_eq =
if is_some opt_code_eq then the opt_code_eq
else UNKNOWN_EQ (* UNKNOWN_EQ means that we are in a locale and we do not know
which code equation is going to be used. This is going to be resolved at the
point when an interpretation of the locale is executed. *)
- val lthy = Local_Theory.declaration {syntax = false, pervasive = true}
- (K (Data.put NONE)) lthy
- in
- (code_eq, lthy)
- end
+ val lthy'' = lthy'
+ |> Local_Theory.declaration {syntax = false, pervasive = true} (K (Data.put NONE))
+ in (code_eq, lthy'') end
else
(NONE_EQ, lthy)
end
@@ -569,27 +567,27 @@
par_thms - a parametricity theorem for rhs
*)
-fun add_lift_def (config: config) (binding, mx) qty rhs rsp_thm par_thms lthy =
+fun add_lift_def (config: config) (binding, mx) qty rhs rsp_thm par_thms lthy0 =
let
val rty = fastype_of rhs
- val quot_thm = Lifting_Term.prove_quot_thm lthy (rty, qty)
+ val quot_thm = Lifting_Term.prove_quot_thm lthy0 (rty, qty)
val absrep_trm = quot_thm_abs quot_thm
val rty_forced = (domain_type o fastype_of) absrep_trm
- val forced_rhs = force_rty_type lthy rty_forced rhs
+ val forced_rhs = force_rty_type lthy0 rty_forced rhs
val lhs = Free (Binding.name_of binding, qty)
val prop = Logic.mk_equals (lhs, absrep_trm $ forced_rhs)
- val (_, prop') = Local_Defs.cert_def lthy (K []) prop
+ val (_, prop') = Local_Defs.cert_def lthy0 (K []) prop
val (_, newrhs) = Local_Defs.abs_def prop'
val var = ((#notes config = false ? Binding.concealed) binding, mx)
val def_name = Thm.make_def_binding (#notes config) (#1 var)
- val ((lift_const, (_ , def_thm)), lthy) =
- Local_Theory.define (var, ((def_name, []), newrhs)) lthy
+ val ((lift_const, (_ , def_thm)), lthy1) = lthy0
+ |> Local_Theory.define (var, ((def_name, []), newrhs))
- val transfer_rules = generate_transfer_rules lthy quot_thm rsp_thm def_thm par_thms
+ val transfer_rules = generate_transfer_rules lthy1 quot_thm rsp_thm def_thm par_thms
- val abs_eq_thm = generate_abs_eq lthy def_thm rsp_thm quot_thm
- val opt_rep_eq_thm = generate_rep_eq lthy def_thm rsp_thm (rty_forced, qty)
+ val abs_eq_thm = generate_abs_eq lthy1 def_thm rsp_thm quot_thm
+ val opt_rep_eq_thm = generate_rep_eq lthy1 def_thm rsp_thm (rty_forced, qty)
fun notes names =
let
@@ -608,15 +606,16 @@
else map_filter (fn (_, attrs, thms) => if null attrs then NONE
else SOME (Binding.empty_atts, [(thms, attrs)])) notes
end
- val (code_eq, lthy) = register_code_eq abs_eq_thm opt_rep_eq_thm (rty_forced, qty) lthy
+ val (code_eq, lthy2) = lthy1
+ |> register_code_eq abs_eq_thm opt_rep_eq_thm (rty_forced, qty)
val lift_def = mk_lift_def rty_forced qty newrhs lift_const def_thm rsp_thm abs_eq_thm
opt_rep_eq_thm code_eq transfer_rules
in
- lthy
- |> Local_Theory.open_target |> snd
- |> Local_Theory.notes (notes (#notes config)) |> snd
- |> `(fn lthy => morph_lift_def (Local_Theory.target_morphism lthy) lift_def)
- ||> Local_Theory.close_target
+ lthy2
+ |> Local_Theory.open_target |> snd
+ |> Local_Theory.notes (notes (#notes config)) |> snd
+ |> `(fn lthy => morph_lift_def (Local_Theory.target_morphism lthy) lift_def)
+ ||> Local_Theory.close_target
end
(* This is not very cheap way of getting the rules but we have only few active
@@ -632,24 +631,24 @@
Symtab.fold (fn (_, data) => fn l => collect data l) table []
end
-fun prepare_lift_def add_lift_def var qty rhs par_thms lthy =
+fun prepare_lift_def add_lift_def var qty rhs par_thms ctxt =
let
- val rsp_rel = Lifting_Term.equiv_relation lthy (fastype_of rhs, qty)
+ val rsp_rel = Lifting_Term.equiv_relation ctxt (fastype_of rhs, qty)
val rty_forced = (domain_type o fastype_of) rsp_rel;
- val forced_rhs = force_rty_type lthy rty_forced rhs;
+ val forced_rhs = force_rty_type ctxt rty_forced rhs;
val cr_to_pcr_conv = HOLogic.Trueprop_conv (Conv.fun2_conv
- (Raw_Simplifier.rewrite lthy false (get_cr_pcr_eqs lthy)))
+ (Raw_Simplifier.rewrite ctxt false (get_cr_pcr_eqs ctxt)))
val (prsp_tm, rsp_prsp_eq) = HOLogic.mk_Trueprop (rsp_rel $ forced_rhs $ forced_rhs)
- |> Thm.cterm_of lthy
+ |> Thm.cterm_of ctxt
|> cr_to_pcr_conv
|> `Thm.concl_of
|>> Logic.dest_equals
|>> snd
val to_rsp = rsp_prsp_eq RS Drule.equal_elim_rule2
- val opt_proven_rsp_thm = try_prove_reflexivity lthy prsp_tm
+ val opt_proven_rsp_thm = try_prove_reflexivity ctxt prsp_tm
- fun after_qed internal_rsp_thm lthy =
- add_lift_def var qty rhs (internal_rsp_thm RS to_rsp) par_thms lthy
+ fun after_qed internal_rsp_thm =
+ add_lift_def var qty rhs (internal_rsp_thm RS to_rsp) par_thms
in
case opt_proven_rsp_thm of
SOME thm => (NONE, K (after_qed thm))
@@ -663,7 +662,8 @@
case goal of
SOME goal =>
let
- val rsp_thm = Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, ...} => tac ctxt)
+ val rsp_thm =
+ Goal.prove_sorry lthy [] [] goal (tac o #context)
|> Thm.close_derivation
in
after_qed rsp_thm lthy
@@ -671,7 +671,6 @@
| NONE => after_qed Drule.dummy_thm lthy
end
-fun lift_def config var qty rhs tac par_thms lthy = gen_lift_def (add_lift_def config)
- var qty rhs tac par_thms lthy
+val lift_def = gen_lift_def o add_lift_def
end (* structure *)
--- a/src/HOL/Tools/Lifting/lifting_def_code_dt.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Lifting/lifting_def_code_dt.ML Sun Jun 09 22:22:36 2019 +0200
@@ -226,8 +226,8 @@
fun bundle_name_of_bundle_binding binding phi context =
Name_Space.full_name (Name_Space.naming_of context) (Morphism.binding phi binding);
-fun prove_schematic_quot_thm actions ctxt = Lifting_Term.prove_schematic_quot_thm actions
- (Lifting_Info.get_quotients ctxt) ctxt
+fun prove_schematic_quot_thm actions ctxt =
+ Lifting_Term.prove_schematic_quot_thm actions (Lifting_Info.get_quotients ctxt) ctxt
fun prove_code_dt (rty, qty) lthy =
let
@@ -245,24 +245,25 @@
fun R_conv rel_eq_onps = Transfer.top_sweep_rewr_conv @{thms eq_onp_top_eq_eq[symmetric, THEN eq_reflection]}
then_conv Transfer.bottom_rewr_conv rel_eq_onps
- val (ret_lift_def, lthy) = add_lift_def (#lift_config config) var qty rhs rsp_thm par_thms lthy
+ val (ret_lift_def, lthy1) = add_lift_def (#lift_config config) var qty rhs rsp_thm par_thms lthy
in
if (not (#code_dt config) orelse (code_eq_of_lift_def ret_lift_def <> NONE_EQ)
andalso (code_eq_of_lift_def ret_lift_def <> UNKNOWN_EQ))
(* Let us try even in case of UNKNOWN_EQ. If this leads to problems, the user can always
say that they do not want this workaround. *)
- then (ret_lift_def, lthy)
+ then (ret_lift_def, lthy1)
else
let
- val lift_def = inst_of_lift_def lthy qty ret_lift_def
+ val lift_def = inst_of_lift_def lthy1 qty ret_lift_def
val rty = rty_of_lift_def lift_def
val rty_ret = body_type rty
val qty_ret = body_type qty
- val (lthy, rel_eq_onps) = prove_code_dt (rty_ret, qty_ret) lthy
- val code_dt = code_dt_of lthy (rty_ret, qty_ret)
+ val (lthy2, rel_eq_onps) = prove_code_dt (rty_ret, qty_ret) lthy1
+ val code_dt = code_dt_of lthy2 (rty_ret, qty_ret)
in
- if is_none code_dt orelse is_none (rep_isom_data_of_code_dt (the code_dt)) then (ret_lift_def, lthy)
+ if is_none code_dt orelse is_none (rep_isom_data_of_code_dt (the code_dt))
+ then (ret_lift_def, lthy2)
else
let
val code_dt = the code_dt
@@ -270,29 +271,28 @@
val rep_isom_data = code_dt |> rep_isom_data_of_code_dt |> the
val pointer = pointer_of_rep_isom_data rep_isom_data
val quot_active =
- Lifting_Info.lookup_restore_data lthy pointer |> the |> #quotient |> #quot_thm
- |> Lifting_Info.lookup_quot_thm_quotients lthy |> is_some
+ Lifting_Info.lookup_restore_data lthy2 pointer |> the |> #quotient |> #quot_thm
+ |> Lifting_Info.lookup_quot_thm_quotients lthy2 |> is_some
val qty_code_dt_bundle_name = bundle_name_of_rep_isom_data rep_isom_data
val rep_isom = mk_rep_isom_of_code_dt qty_ret code_dt |> the
- val lthy = if quot_active then lthy else Bundle.includes [qty_code_dt_bundle_name] lthy
+ val lthy3 = if quot_active then lthy2 else Bundle.includes [qty_code_dt_bundle_name] lthy2
fun qty_isom_of_rep_isom rep = rep |> dest_Const |> snd |> domain_type
val qty_isom = qty_isom_of_rep_isom rep_isom
val f'_var = (Binding.suffix_name "_aux" (fst var), NoSyn);
val f'_qty = strip_type qty |> fst |> rpair qty_isom |> op --->
- val f'_rsp_rel = Lifting_Term.equiv_relation lthy (rty, f'_qty);
+ val f'_rsp_rel = Lifting_Term.equiv_relation lthy3 (rty, f'_qty);
val rsp = rsp_thm_of_lift_def lift_def
val rel_eq_onps_conv = HOLogic.Trueprop_conv (Conv.fun2_conv (ret_rel_conv (R_conv rel_eq_onps)))
val rsp_norm = Conv.fconv_rule rel_eq_onps_conv rsp
val f'_rsp_goal = HOLogic.mk_Trueprop (f'_rsp_rel $ rhs $ rhs);
- val f'_rsp = Goal.prove_sorry lthy [] [] f'_rsp_goal
+ val f'_rsp = Goal.prove_sorry lthy3 [] [] f'_rsp_goal
(fn {context = ctxt, prems = _} =>
HEADGOAL (CONVERSION (rel_eq_onps_conv) THEN' rtac ctxt rsp_norm))
|> Thm.close_derivation
- val (f'_lift_def, lthy) = add_lift_def ld_no_notes f'_var f'_qty rhs f'_rsp [] lthy
- val f'_lift_def = inst_of_lift_def lthy f'_qty f'_lift_def
+ val (f'_lift_def, lthy4) = add_lift_def ld_no_notes f'_var f'_qty rhs f'_rsp [] lthy3
+ val f'_lift_def = inst_of_lift_def lthy4 f'_qty f'_lift_def
val f'_lift_const = mk_lift_const_of_lift_def f'_qty f'_lift_def
- val args_lthy = lthy
- val (args, lthy) = mk_Frees "x" (binder_types qty) lthy
+ val (args, args_ctxt) = mk_Frees "x" (binder_types qty) lthy4
val f_alt_def_goal_lhs = list_comb (lift_const_of_lift_def lift_def, args);
val f_alt_def_goal_rhs = rep_isom $ list_comb (f'_lift_const, args);
val f_alt_def_goal = HOLogic.mk_Trueprop (HOLogic.mk_eq (f_alt_def_goal_lhs, f_alt_def_goal_rhs));
@@ -300,26 +300,26 @@
EVERY' [Transfer.gen_frees_tac [] ctxt, DETERM o Transfer.transfer_tac true ctxt,
SELECT_GOAL (Local_Defs.unfold0_tac ctxt [id_apply]), rtac ctxt refl] i;
val rep_isom_transfer = transfer_of_rep_isom_data rep_isom_data
- val (_, transfer_lthy) =
- Proof_Context.note_thms ""
- (Binding.empty_atts, [([rep_isom_transfer], [Transfer.transfer_add])]) lthy
- val f_alt_def = Goal.prove_sorry transfer_lthy [] [] f_alt_def_goal
- (fn {context = ctxt, prems = _} => HEADGOAL (f_alt_def_tac ctxt))
+ val (_, transfer_ctxt) = args_ctxt
+ |> Proof_Context.note_thms ""
+ (Binding.empty_atts, [([rep_isom_transfer], [Transfer.transfer_add])])
+ val f_alt_def =
+ Goal.prove_sorry transfer_ctxt [] [] f_alt_def_goal
+ (fn {context = goal_ctxt, ...} => HEADGOAL (f_alt_def_tac goal_ctxt))
|> Thm.close_derivation
- |> singleton (Variable.export lthy args_lthy)
- val lthy = args_lthy
- val lthy = lthy
+ |> singleton (Variable.export transfer_ctxt lthy4)
+ val lthy5 = lthy4
|> Local_Theory.note ((Binding.empty, @{attributes [code]}), [f_alt_def])
|> snd
(* if processing a mutual datatype (there is a cycle!) the corresponding quotient
will be needed later and will be forgotten later *)
|> (if quot_active then I else Lifting_Setup.lifting_forget pointer)
in
- (ret_lift_def, lthy)
+ (ret_lift_def, lthy5)
end
end
end
-and mk_rep_isom qty_isom_bundle (rty, qty, qty_isom) lthy =
+and mk_rep_isom qty_isom_bundle (rty, qty, qty_isom) lthy0 =
let
(* logical definition of qty qty_isom isomorphism *)
val uTname = unique_Tname (rty, qty)
@@ -328,24 +328,23 @@
fun lift_isom_tac ctxt = HEADGOAL (eq_onp_to_top_tac ctxt
THEN' (rtac ctxt @{thm id_transfer}));
- val (rep_isom_lift_def, lthy) =
- lthy
+ val (rep_isom_lift_def, lthy1) = lthy0
|> Local_Theory.open_target |> snd
|> lift_def ld_no_notes (Binding.qualify_name true uTname "Rep_isom", NoSyn)
(qty_isom --> qty) (HOLogic.id_const rty) lift_isom_tac []
- |>> inst_of_lift_def lthy (qty_isom --> qty);
- val (abs_isom, lthy) =
- lthy
+ |>> inst_of_lift_def lthy0 (qty_isom --> qty);
+ val (abs_isom, lthy2) = lthy1
|> lift_def ld_no_notes (Binding.qualify_name true uTname "Abs_isom", NoSyn)
(qty --> qty_isom) (HOLogic.id_const rty) lift_isom_tac []
|>> mk_lift_const_of_lift_def (qty --> qty_isom);
val rep_isom = lift_const_of_lift_def rep_isom_lift_def
- val pointer = Lifting_Setup.pointer_of_bundle_binding lthy qty_isom_bundle
- fun code_dt phi context = code_dt_of lthy (rty, qty) |> the |>
- update_rep_isom rep_isom (transfer_rules_of_lift_def rep_isom_lift_def |> hd)
- (bundle_name_of_bundle_binding qty_isom_bundle phi context) pointer;
- val lthy = lthy
+ val pointer = Lifting_Setup.pointer_of_bundle_binding lthy2 qty_isom_bundle
+ fun code_dt phi context =
+ code_dt_of lthy2 (rty, qty) |> the
+ |> update_rep_isom rep_isom (transfer_rules_of_lift_def rep_isom_lift_def |> hd)
+ (bundle_name_of_bundle_binding qty_isom_bundle phi context) pointer;
+ val lthy3 = lthy2
|> Local_Theory.declaration {syntax = false, pervasive = true}
(fn phi => fn context => Data.map (Item_Net.update (morph_code_dt phi (code_dt phi context))) context)
|> Local_Theory.close_target
@@ -354,7 +353,7 @@
and selectors for qty_isom *)
val (rty_name, typs) = dest_Type rty
val (_, qty_typs) = dest_Type qty
- val fp = BNF_FP_Def_Sugar.fp_sugar_of lthy rty_name
+ val fp = BNF_FP_Def_Sugar.fp_sugar_of lthy3 rty_name
val fp = if is_some fp then the fp
else error ("code_dt: " ^ quote rty_name ^ " is not a datatype.")
val ctr_sugar = fp |> #fp_ctr_sugar |> #ctr_sugar
@@ -365,7 +364,7 @@
val n = length ctrs;
val ks = 1 upto n;
- val (xss, _) = mk_Freess "x" ctr_Tss lthy;
+ val (xss, _) = mk_Freess "x" ctr_Tss lthy3;
fun sel_retT (rty' as Type (s, rtys'), qty' as Type (s', qtys')) =
if (rty', qty') = (rty, qty) then qty_isom else (if s = s'
@@ -377,7 +376,7 @@
fun lazy_prove_code_dt (rty, qty) lthy =
if is_none (code_dt_of lthy (rty, qty)) then prove_code_dt (rty, qty) lthy |> fst else lthy;
- val lthy = fold2 (fold2 (lazy_prove_code_dt oo pair)) ctr_Tss sel_retTs lthy
+ val lthy4 = fold2 (fold2 (lazy_prove_code_dt oo pair)) ctr_Tss sel_retTs lthy3
val sel_argss = @{map 4} (fn k => fn xs => @{map 2} (fn x => fn qty_ret =>
(k, qty_ret, (xs, x)))) ks xss xss sel_retTs;
@@ -398,7 +397,7 @@
(if k = k' then (fold_rev Term.lambda xs x, wits) else gen_undef_wit Ts wits)) ctr_Tss ks []
end;
fun mk_sel_rhs arg =
- let val (sel_rhs, wits) = mk_sel_case_args lthy ctr_Tss ks arg
+ let val (sel_rhs, wits) = mk_sel_case_args lthy4 ctr_Tss ks arg
in (arg |> #2, wits, list_comb (mk_sel_casex arg, sel_rhs)) end;
fun mk_dis_case_args args k = map (fn (k', arg) => (if k = k'
then fold_rev Term.lambda arg \<^const>\<open>True\<close> else fold_rev Term.lambda arg \<^const>\<open>False\<close>)) args;
@@ -407,9 +406,9 @@
val dis_qty = qty_isom --> HOLogic.boolT;
val dis_names = map (fn k => Binding.qualify_name true uTname ("dis" ^ string_of_int k)) ks;
- val (diss, lthy) = @{fold_map 2} (fn b => fn rhs => fn lthy =>
+ val (diss, lthy5) = @{fold_map 2} (fn b => fn rhs => fn lthy =>
lift_def ld_no_notes (b, NoSyn) dis_qty rhs (K all_tac) [] lthy
- |>> mk_lift_const_of_lift_def dis_qty) dis_names dis_rhs lthy
+ |>> mk_lift_const_of_lift_def dis_qty) dis_names dis_rhs lthy4
val unfold_lift_sel_rsp = @{lemma "(\<And>x. P1 x \<Longrightarrow> P2 (f x)) \<Longrightarrow> (rel_fun (eq_onp P1) (eq_onp P2)) f f"
by (simp add: eq_onp_same_args rel_fun_eq_onp_rel)}
@@ -422,16 +421,16 @@
EVERY' [hyp_subst_tac ctxt, (* does not kill wits because = was rewritten to eq_onp top *)
Raw_Simplifier.rewrite_goal_tac ctxt (map safe_mk_meta_eq dt_rules),
REPEAT_DETERM o etac ctxt conjE, assume_tac ctxt])) i
- val pred_simps = Transfer.lookup_pred_data lthy (Tname rty) |> the |> Transfer.pred_simps
+ val pred_simps = Transfer.lookup_pred_data lthy5 (Tname rty) |> the |> Transfer.pred_simps
val sel_tac = lift_sel_tac (#exhaust ctr_sugar) (#case_thms ctr_sugar @ pred_simps)
val sel_names =
map (fn (k, xs) =>
map (fn k' => Binding.qualify_name true uTname ("sel" ^ string_of_int k ^ string_of_int k'))
(1 upto length xs)) (ks ~~ ctr_Tss);
- val (selss, lthy) = @{fold_map 2} (@{fold_map 2} (fn b => fn (qty_ret, wits, rhs) => fn lthy =>
+ val (selss, lthy6) = @{fold_map 2} (@{fold_map 2} (fn b => fn (qty_ret, wits, rhs) => fn lthy =>
lift_def_code_dt { code_dt = true, lift_config = ld_no_notes }
(b, NoSyn) (qty_isom --> qty_ret) rhs (HEADGOAL o sel_tac wits) [] lthy
- |>> mk_lift_const_of_lift_def (qty_isom --> qty_ret))) sel_names sel_rhs lthy
+ |>> mk_lift_const_of_lift_def (qty_isom --> qty_ret))) sel_names sel_rhs lthy5
(* now we can execute the qty qty_isom isomorphism *)
fun mk_type_definition newT oldT RepC AbsC A =
@@ -450,30 +449,31 @@
(map safe_mk_meta_eq @{thms id_apply simp_thms Ball_def}),
rtac ctxt TrueI] i;
- val (_, transfer_lthy) =
+ val (_, transfer_ctxt) =
Proof_Context.note_thms ""
(Binding.empty_atts,
[(@{thms right_total_UNIV_transfer}, [Transfer.transfer_add]),
- (@{thms Domain_eq_top}, [Transfer.transfer_domain_add])]) lthy;
+ (@{thms Domain_eq_top}, [Transfer.transfer_domain_add])]) lthy6;
- val quot_thm_isom = Goal.prove_sorry transfer_lthy [] [] typedef_goal
- (fn {context = ctxt, prems = _} => typ_isom_tac ctxt 1)
+ val quot_thm_isom =
+ Goal.prove_sorry transfer_ctxt [] [] typedef_goal
+ (fn {context = goal_ctxt, ...} => typ_isom_tac goal_ctxt 1)
|> Thm.close_derivation
- |> singleton (Variable.export transfer_lthy lthy)
+ |> singleton (Variable.export transfer_ctxt lthy6)
|> (fn thm => @{thm UNIV_typedef_to_Quotient} OF [thm, @{thm reflexive}])
val qty_isom_name = Tname qty_isom;
val quot_isom_rep =
let
- val (quotients : Lifting_Term.quotients) = Symtab.insert (Lifting_Info.quotient_eq) (qty_isom_name,
- {quot_thm = quot_thm_isom, pcr_info = NONE}) Symtab.empty
+ val (quotients : Lifting_Term.quotients) =
+ Symtab.insert (Lifting_Info.quotient_eq)
+ (qty_isom_name, {quot_thm = quot_thm_isom, pcr_info = NONE}) Symtab.empty
val id_actions = { constr = K I, lift = K I, comp_lift = K I }
in
fn ctxt => fn (rty, qty) => Lifting_Term.prove_schematic_quot_thm id_actions quotients
ctxt (rty, qty) () |> fst |> Lifting_Term.force_qty_type ctxt qty
|> quot_thm_rep
end;
- val x_lthy = lthy
- val (x, lthy) = yield_singleton (mk_Frees "x") qty_isom lthy;
+ val (x, x_ctxt) = yield_singleton (mk_Frees "x") qty_isom lthy6;
fun mk_ctr ctr ctr_Ts sels =
let
@@ -488,7 +488,7 @@
end;
in
@{fold 3} (fn sel => fn ctr_T => fn sel_ret_T => fn ctr =>
- ctr $ rep_isom lthy (sel $ x) (ctr_T, sel_ret_T)) sels ctr_Ts sel_ret_Ts ctr
+ ctr $ rep_isom x_ctxt (sel $ x) (ctr_T, sel_ret_T)) sels ctr_Ts sel_ret_Ts ctr
end;
(* stolen from Metis *)
@@ -535,26 +535,25 @@
the datatype fp. Otherwise rep_isom_code_tac could fail (especially transfer in it) is such
a way that it is not easy to infer the problem with sorts.
*)
- val _ = yield_singleton (mk_Frees "x") (#T fp) lthy |> fst |> force_typ lthy qty
+ val _ = yield_singleton (mk_Frees "x") (#T fp) x_ctxt |> fst |> force_typ x_ctxt qty
- val rep_isom_code = Goal.prove_sorry lthy [] [] rep_isom_code_goal
- (fn {context = ctxt, prems = _} => rep_isom_code_tac ctr_sugar ctxt 1)
+ val rep_isom_code =
+ Goal.prove_sorry x_ctxt [] [] rep_isom_code_goal
+ (fn {context = goal_ctxt, ...} => rep_isom_code_tac ctr_sugar goal_ctxt 1)
|> Thm.close_derivation
- |> singleton(Variable.export lthy x_lthy)
- val lthy = x_lthy
- val lthy =
- lthy
- |> snd o Local_Theory.note ((Binding.empty, @{attributes [code]}), [rep_isom_code])
- |> Lifting_Setup.lifting_forget pointer
+ |> singleton(Variable.export x_ctxt lthy6)
in
- ((selss, diss, rep_isom_code), lthy)
+ lthy6
+ |> snd o Local_Theory.note ((Binding.empty, @{attributes [code]}), [rep_isom_code])
+ |> Lifting_Setup.lifting_forget pointer
+ |> pair (selss, diss, rep_isom_code)
end
-and constr qty (quot_thm, (lthy, rel_eq_onps)) =
+and constr qty (quot_thm, (lthy0, rel_eq_onps)) =
let
- val quot_thm = Lifting_Term.force_qty_type lthy qty quot_thm
+ val quot_thm = Lifting_Term.force_qty_type lthy0 qty quot_thm
val (rty, qty) = quot_thm_rty_qty quot_thm
val rty_name = Tname rty;
- val pred_data = Transfer.lookup_pred_data lthy rty_name
+ val pred_data = Transfer.lookup_pred_data lthy0 rty_name
val pred_data = if is_some pred_data then the pred_data
else error ("code_dt: " ^ quote rty_name ^ " is not a datatype.")
val rel_eq_onp = safe_mk_meta_eq (Transfer.rel_eq_onp pred_data);
@@ -563,12 +562,11 @@
then_conv Conv.rewr_conv rel_eq_onp
val quot_thm = Conv.fconv_rule(HOLogic.Trueprop_conv (Quotient_R_conv R_conv)) quot_thm;
in
- if is_none (code_dt_of lthy (rty, qty)) then
+ if is_none (code_dt_of lthy0 (rty, qty)) then
let
val non_empty_pred = quot_thm RS @{thm type_definition_Quotient_not_empty}
val pred = quot_thm_rel quot_thm |> dest_comb |> snd;
- val (pred, lthy) =
- lthy
+ val (pred, lthy1) = lthy0
|> Local_Theory.open_target |> snd
|> yield_singleton (Variable.import_terms true) pred;
val TFrees = Term.add_tfreesT qty []
@@ -578,29 +576,29 @@
SELECT_GOAL (Local_Defs.unfold0_tac ctxt [mem_Collect_eq]) THEN' assume_tac ctxt) i
val uTname = unique_Tname (rty, qty)
val Tdef_set = HOLogic.mk_Collect ("x", rty, pred $ Free("x", rty));
- val ((_, tcode_dt), lthy) = conceal_naming_result (typedef (Binding.concealed uTname, TFrees, NoSyn)
- Tdef_set NONE (fn lthy => HEADGOAL (non_empty_typedef_tac non_empty_pred lthy))) lthy;
+ val ((_, tcode_dt), lthy2) = lthy1
+ |> conceal_naming_result
+ (typedef (Binding.concealed uTname, TFrees, NoSyn)
+ Tdef_set NONE (fn lthy => HEADGOAL (non_empty_typedef_tac non_empty_pred lthy)));
val type_definition_thm = tcode_dt |> snd |> #type_definition;
val qty_isom = tcode_dt |> fst |> #abs_type;
- val (binding, lthy) =
- lthy
- |> conceal_naming_result (Lifting_Setup.setup_by_typedef_thm
- { notes = false } type_definition_thm)
+ val (binding, lthy3) = lthy2
+ |> conceal_naming_result
+ (Lifting_Setup.setup_by_typedef_thm {notes = false} type_definition_thm)
||> Local_Theory.close_target
val (wit, wit_thm) = mk_witness quot_thm;
val code_dt = mk_code_dt rty qty wit wit_thm NONE;
- val lthy = lthy
+ val lthy4 = lthy3
|> update_code_dt code_dt
|> mk_rep_isom binding (rty, qty, qty_isom) |> snd
in
- (quot_thm, (lthy, rel_eq_onps))
+ (quot_thm, (lthy4, rel_eq_onps))
end
else
- (quot_thm, (lthy, rel_eq_onps))
+ (quot_thm, (lthy0, rel_eq_onps))
end
-and lift_def_code_dt config var qty rhs tac par_thms lthy = gen_lift_def (add_lift_def_code_dt config)
- var qty rhs tac par_thms lthy
+and lift_def_code_dt config = gen_lift_def (add_lift_def_code_dt config)
(** from parsed parameters to the config record **)
@@ -634,9 +632,9 @@
[@{thm pcr_Domainp_total}, @{thm pcr_Domainp_par_left_total}, @{thm pcr_Domainp_par},
@{thm pcr_Domainp}]
in
-fun mk_readable_rsp_thm_eq tm lthy =
+fun mk_readable_rsp_thm_eq tm ctxt =
let
- val ctm = Thm.cterm_of lthy tm
+ val ctm = Thm.cterm_of ctxt tm
fun assms_rewr_conv tactic rule ct =
let
@@ -685,18 +683,18 @@
@{thm rel_fun_def[THEN eq_reflection]}]
fun binop_conv2 cv1 cv2 = Conv.combination_conv (Conv.arg_conv cv1) cv2
val eq_onp_assms_tac_rules = @{thm left_unique_OO} ::
- eq_onp_assms_tac_fixed_rules @ (Transfer.get_transfer_raw lthy)
+ eq_onp_assms_tac_fixed_rules @ (Transfer.get_transfer_raw ctxt)
val intro_top_rule = @{thm eq_onp_top_eq_eq[symmetric, THEN eq_reflection]}
val kill_tops = Transfer.top_sweep_rewr_conv [@{thm eq_onp_top_eq_eq[THEN eq_reflection]}]
val eq_onp_assms_tac = (CONVERSION kill_tops THEN'
- TRY o REPEAT_ALL_NEW (resolve_tac lthy eq_onp_assms_tac_rules)
- THEN_ALL_NEW (DETERM o Transfer.eq_tac lthy)) 1
+ TRY o REPEAT_ALL_NEW (resolve_tac ctxt eq_onp_assms_tac_rules)
+ THEN_ALL_NEW (DETERM o Transfer.eq_tac ctxt)) 1
val relator_eq_onp_conv = Conv.bottom_conv
(K (Conv.try_conv (assms_rewrs_conv eq_onp_assms_tac
- (intro_top_rule :: Lifting_Info.get_relator_eq_onp_rules lthy)))) lthy
+ (intro_top_rule :: Lifting_Info.get_relator_eq_onp_rules ctxt)))) ctxt
then_conv kill_tops
val relator_eq_conv = Conv.bottom_conv
- (K (Conv.try_conv (Conv.rewrs_conv (Transfer.get_relator_eq lthy)))) lthy
+ (K (Conv.try_conv (Conv.rewrs_conv (Transfer.get_relator_eq ctxt)))) ctxt
in
case (Thm.term_of ctm) of
Const (\<^const_name>\<open>rel_fun\<close>, _) $ _ $ _ =>
@@ -705,18 +703,18 @@
end
val unfold_ret_val_invs = Conv.bottom_conv
- (K (Conv.try_conv (Conv.rewr_conv @{thm eq_onp_same_args[THEN eq_reflection]}))) lthy
+ (K (Conv.try_conv (Conv.rewr_conv @{thm eq_onp_same_args[THEN eq_reflection]}))) ctxt
val unfold_inv_conv =
- Conv.top_sweep_conv (K (Conv.rewr_conv @{thm eq_onp_def[THEN eq_reflection]})) lthy
+ Conv.top_sweep_conv (K (Conv.rewr_conv @{thm eq_onp_def[THEN eq_reflection]})) ctxt
val simp_conv = HOLogic.Trueprop_conv (Conv.fun2_conv simp_arrows_conv)
val univq_conv = Conv.rewr_conv @{thm HOL.all_simps(6)[symmetric, THEN eq_reflection]}
- val univq_prenex_conv = Conv.top_conv (K (Conv.try_conv univq_conv)) lthy
+ val univq_prenex_conv = Conv.top_conv (K (Conv.try_conv univq_conv)) ctxt
val beta_conv = Thm.beta_conversion true
val eq_thm =
(simp_conv then_conv univq_prenex_conv then_conv beta_conv then_conv unfold_ret_val_invs
then_conv unfold_inv_conv) ctm
in
- Object_Logic.rulify lthy (eq_thm RS Drule.equal_elim_rule2)
+ Object_Logic.rulify ctxt (eq_thm RS Drule.equal_elim_rule2)
end
end
@@ -772,40 +770,39 @@
error error_msg
end
-fun lift_def_cmd (params, raw_var, rhs_raw, par_xthms) lthy =
+fun lift_def_cmd (params, raw_var, rhs_raw, par_xthms) lthy0 =
let
val config = evaluate_params params
- val ((binding, SOME qty, mx), lthy) = Proof_Context.read_var raw_var lthy
+ val ((binding, SOME qty, mx), lthy1) = Proof_Context.read_var raw_var lthy0
val var = (binding, mx)
- val rhs = Syntax.read_term lthy rhs_raw
- val par_thms = Attrib.eval_thms lthy par_xthms
- val (goal, after_qed) = prepare_lift_def (add_lift_def_code_dt config) var qty rhs par_thms lthy
+ val rhs = Syntax.read_term lthy1 rhs_raw
+ val par_thms = Attrib.eval_thms lthy1 par_xthms
+ val (goal, after_qed) = lthy1
+ |> prepare_lift_def (add_lift_def_code_dt config) var qty rhs par_thms
val (goal, after_qed) =
case goal of
NONE => (goal, K (after_qed Drule.dummy_thm))
| SOME prsp_tm =>
let
- val readable_rsp_thm_eq = mk_readable_rsp_thm_eq prsp_tm lthy
+ val readable_rsp_thm_eq = mk_readable_rsp_thm_eq prsp_tm lthy1
val (readable_rsp_tm, _) = Logic.dest_implies (Thm.prop_of readable_rsp_thm_eq)
- val readable_rsp_tm_tnames = rename_to_tnames lthy readable_rsp_tm
+ val readable_rsp_tm_tnames = rename_to_tnames lthy1 readable_rsp_tm
fun after_qed' [[thm]] lthy =
let
- val internal_rsp_thm = Goal.prove lthy [] [] prsp_tm
- (fn {context = ctxt, ...} =>
- rtac ctxt readable_rsp_thm_eq 1 THEN Proof_Context.fact_tac ctxt [thm] 1)
- in
- after_qed internal_rsp_thm lthy
- end
- in
- (SOME readable_rsp_tm_tnames, after_qed')
- end
+ val internal_rsp_thm =
+ Goal.prove lthy [] [] prsp_tm (fn {context = goal_ctxt, ...} =>
+ rtac goal_ctxt readable_rsp_thm_eq 1 THEN
+ Proof_Context.fact_tac goal_ctxt [thm] 1)
+ in after_qed internal_rsp_thm lthy end
+ in (SOME readable_rsp_tm_tnames, after_qed') end
fun after_qed_with_err_handling thmss ctxt = (after_qed thmss ctxt
- handle Lifting_Term.QUOT_THM (rty, qty, msg) => quot_thm_err lthy (rty, qty) msg)
+ handle Lifting_Term.QUOT_THM (rty, qty, msg) => quot_thm_err lthy1 (rty, qty) msg)
handle Lifting_Term.CHECK_RTY (rty_schematic, rty_forced) =>
- check_rty_err lthy (rty_schematic, rty_forced) (raw_var, rhs_raw);
+ check_rty_err lthy1 (rty_schematic, rty_forced) (raw_var, rhs_raw);
in
- Proof.theorem NONE (snd oo after_qed_with_err_handling) [map (rpair []) (the_list goal)] lthy
+ lthy1
+ |> Proof.theorem NONE (snd oo after_qed_with_err_handling) [map (rpair []) (the_list goal)]
end
fun lift_def_cmd_with_err_handling (params, (raw_var, rhs_raw, par_xthms)) lthy =
--- a/src/HOL/Tools/Lifting/lifting_info.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Lifting/lifting_info.ML Sun Jun 09 22:22:36 2019 +0200
@@ -124,18 +124,18 @@
fun map_restore_data f = map_data I I I I f I
fun map_no_code_types f = map_data I I I I I f
-val get_quot_maps' = #quot_maps o Data.get
-val get_quotients' = #quotients o Data.get
-val get_reflexivity_rules' = #reflexivity_rules o Data.get
-val get_relator_distr_data' = #relator_distr_data o Data.get
-val get_restore_data' = #restore_data o Data.get
-val get_no_code_types' = #no_code_types o Data.get
+val get_quot_maps' = #quot_maps o Data.get
+val get_quotients' = #quotients o Data.get
+val get_reflexivity_rules' = #reflexivity_rules o Data.get
+val get_relator_distr_data' = #relator_distr_data o Data.get
+val get_restore_data' = #restore_data o Data.get
+val get_no_code_types' = #no_code_types o Data.get
-fun get_quot_maps ctxt = get_quot_maps' (Context.Proof ctxt)
-fun get_quotients ctxt = get_quotients' (Context.Proof ctxt)
-fun get_relator_distr_data ctxt = get_relator_distr_data' (Context.Proof ctxt)
-fun get_restore_data ctxt = get_restore_data' (Context.Proof ctxt)
-fun get_no_code_types ctxt = get_no_code_types' (Context.Proof ctxt)
+val get_quot_maps = get_quot_maps' o Context.Proof
+val get_quotients = get_quotients' o Context.Proof
+val get_relator_distr_data = get_relator_distr_data' o Context.Proof
+val get_restore_data = get_restore_data' o Context.Proof
+val get_no_code_types = get_no_code_types' o Context.Proof
(* info about Quotient map theorems *)
@@ -228,7 +228,7 @@
in
case lookup_quotients ctxt qty_full_name of
SOME quotient => if compare_data quotient then SOME quotient else NONE
- | NONE => NONE
+ | NONE => NONE
end
fun update_quotients type_name qinfo context =
@@ -248,14 +248,13 @@
let
fun prt_quot (qty_name, {quot_thm, pcr_info}: quotient) =
Pretty.block (separate (Pretty.brk 2)
- [Pretty.str "type:",
- Pretty.str qty_name,
- Pretty.str "quot. thm:",
- Syntax.pretty_term ctxt (Thm.prop_of quot_thm),
- Pretty.str "pcrel_def thm:",
- option_fold (Pretty.str "-") ((Syntax.pretty_term ctxt) o Thm.prop_of o #pcrel_def) pcr_info,
- Pretty.str "pcr_cr_eq thm:",
- option_fold (Pretty.str "-") ((Syntax.pretty_term ctxt) o Thm.prop_of o #pcr_cr_eq) pcr_info])
+ ([Pretty.str "type:", Pretty.str qty_name,
+ Pretty.str "quot thm:", Thm.pretty_thm ctxt quot_thm] @
+ (case pcr_info of
+ NONE => []
+ | SOME {pcrel_def, pcr_cr_eq, ...} =>
+ [Pretty.str "pcrel_def thm:", Thm.pretty_thm ctxt pcrel_def,
+ Pretty.str "pcr_cr_eq thm:", Thm.pretty_thm ctxt pcr_cr_eq])))
in
map prt_quot (Symtab.dest (get_quotients ctxt))
|> Pretty.big_list "quotients:"
@@ -293,7 +292,7 @@
(* info about reflexivity rules *)
-fun get_reflexivity_rules ctxt = Item_Net.content (get_reflexivity_rules' (Context.Proof ctxt))
+val get_reflexivity_rules = Item_Net.content o get_reflexivity_rules' o Context.Proof
fun add_reflexivity_rule thm = Data.map (map_reflexivity_rules (Item_Net.update thm))
val add_reflexivity_rule_attribute = Thm.declaration_attribute add_reflexivity_rule
--- a/src/HOL/Tools/Lifting/lifting_setup.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Lifting/lifting_setup.ML Sun Jun 09 22:22:36 2019 +0200
@@ -44,16 +44,16 @@
val def_term = Abs ("x", rty, Abs ("y", qty, rep_fun_graph))
val qty_name = (Binding.name o Long_Name.base_name o fst o dest_Type) qty
val crel_name = Binding.prefix_name "cr_" qty_name
- val (fixed_def_term, lthy) = yield_singleton (Variable.importT_terms) def_term lthy
- val ((_, (_ , def_thm)), lthy) =
+ val (fixed_def_term, lthy1) = lthy |> yield_singleton (Variable.importT_terms) def_term
+ val ((_, (_ , def_thm)), lthy2) =
if #notes config then
Local_Theory.define
- ((crel_name, NoSyn), ((Thm.def_binding crel_name, []), fixed_def_term)) lthy
+ ((crel_name, NoSyn), ((Thm.def_binding crel_name, []), fixed_def_term)) lthy1
else
Local_Theory.define
- ((Binding.concealed crel_name, NoSyn), (Binding.empty_atts, fixed_def_term)) lthy
+ ((Binding.concealed crel_name, NoSyn), (Binding.empty_atts, fixed_def_term)) lthy1
in
- (def_thm, lthy)
+ (def_thm, lthy2)
end
fun print_define_pcrel_warning msg =
@@ -66,18 +66,18 @@
warning warning_msg
end
-fun define_pcrel (config: config) crel lthy =
+fun define_pcrel (config: config) crel lthy0 =
let
- val (fixed_crel, lthy) = yield_singleton Variable.importT_terms crel lthy
+ val (fixed_crel, lthy1) = yield_singleton Variable.importT_terms crel lthy0
val [rty', qty] = (binder_types o fastype_of) fixed_crel
- val (param_rel, args) = Lifting_Term.generate_parametrized_relator lthy rty'
+ val (param_rel, args) = Lifting_Term.generate_parametrized_relator lthy1 rty'
val rty_raw = (domain_type o range_type o fastype_of) param_rel
- val thy = Proof_Context.theory_of lthy
- val tyenv_match = Sign.typ_match thy (rty_raw, rty') Vartab.empty
+ val tyenv_match = Sign.typ_match (Proof_Context.theory_of lthy1) (rty_raw, rty') Vartab.empty
val param_rel_subst = Envir.subst_term (tyenv_match,Vartab.empty) param_rel
val args_subst = map (Envir.subst_term (tyenv_match,Vartab.empty)) args
- val lthy = Variable.declare_names fixed_crel lthy
- val (instT, lthy) = Variable.importT_inst (param_rel_subst :: args_subst) lthy
+ val (instT, lthy2) = lthy1
+ |> Variable.declare_names fixed_crel
+ |> Variable.importT_inst (param_rel_subst :: args_subst)
val args_fixed = (map (Term_Subst.instantiate (instT, []))) args_subst
val param_rel_fixed = Term_Subst.instantiate (instT, []) param_rel_subst
val rty = (domain_type o fastype_of) param_rel_fixed
@@ -98,18 +98,18 @@
let
val ((_, rhs), prove) =
Local_Defs.derived_def lthy (K []) {conditional = true} definition_term;
- val ((_, (_, raw_th)), lthy) =
+ val ((_, (_, raw_th)), lthy') =
Local_Theory.define
((Binding.concealed pcrel_name, NoSyn), (Binding.empty_atts, rhs)) lthy;
- val th = prove lthy raw_th;
+ val th = prove lthy' raw_th;
in
- (th, lthy)
+ (th, lthy')
end
- val (def_thm, lthy) = if #notes config then note_def lthy else raw_def lthy
+ val (def_thm, lthy3) = if #notes config then note_def lthy2 else raw_def lthy2
in
- (SOME def_thm, lthy)
+ (SOME def_thm, lthy3)
end
- handle Lifting_Term.PARAM_QUOT_THM (_, msg) => (print_define_pcrel_warning msg; (NONE, lthy))
+ handle Lifting_Term.PARAM_QUOT_THM (_, msg) => (print_define_pcrel_warning msg; (NONE, lthy0))
local
@@ -143,13 +143,12 @@
val inst = (#1 (dest_Var var), Thm.cterm_of ctxt' (HOLogic.eq_const (TFree fresh_var)))
in (inst, ctxt') end
- val orig_lthy = lthy
- val (args_inst, lthy) = fold_map make_inst args lthy
+ val (args_inst, args_ctxt) = fold_map make_inst args lthy
val pcr_cr_eq =
pcr_rel_def
- |> infer_instantiate lthy args_inst
+ |> infer_instantiate args_ctxt args_inst
|> Conv.fconv_rule (Conv.arg_conv (Conv.arg1_conv
- (Transfer.bottom_rewr_conv (Transfer.get_relator_eq lthy))))
+ (Transfer.bottom_rewr_conv (Transfer.get_relator_eq args_ctxt))))
in
case (Thm.term_of o Thm.rhs_of) pcr_cr_eq of
Const (\<^const_name>\<open>relcompp\<close>, _) $ Const (\<^const_name>\<open>HOL.eq\<close>, _) $ _ =>
@@ -158,13 +157,14 @@
pcr_cr_eq
|> Conv.fconv_rule (Conv.arg_conv (Conv.rewr_conv eq_OO_meta))
|> HOLogic.mk_obj_eq
- |> singleton (Variable.export lthy orig_lthy)
- val lthy = (#notes config ? (Local_Theory.note
- ((Binding.qualify_name true qty_name "pcr_cr_eq", []), [thm]) #> snd)) lthy
+ |> singleton (Variable.export args_ctxt lthy)
+ val lthy' = lthy
+ |> #notes config ?
+ (Local_Theory.note ((Binding.qualify_name true qty_name "pcr_cr_eq", []), [thm]) #> #2)
in
- (thm, lthy)
+ (thm, lthy')
end
- | Const (\<^const_name>\<open>relcompp\<close>, _) $ t $ _ => print_generate_pcr_cr_eq_error lthy t
+ | Const (\<^const_name>\<open>relcompp\<close>, _) $ t $ _ => print_generate_pcr_cr_eq_error args_ctxt t
| _ => error "generate_pcr_cr_eq: implementation error"
end
end
@@ -234,38 +234,40 @@
fun lifting_bundle qty_full_name qinfo lthy =
let
+ val thy = Proof_Context.theory_of lthy
+
val binding =
Binding.qualify_name true (qty_full_name |> Long_Name.base_name |> Binding.name) "lifting"
val morphed_binding = Morphism.binding (Local_Theory.target_morphism lthy) binding
- val bundle_name = Name_Space.full_name (Name_Space.naming_of
- (Context.Theory (Proof_Context.theory_of lthy))) morphed_binding
+ val bundle_name =
+ Name_Space.full_name (Name_Space.naming_of (Context.Theory thy)) morphed_binding
fun phi_qinfo phi = Lifting_Info.transform_quotient phi qinfo
- val thy = Proof_Context.theory_of lthy
val dummy_thm = Thm.transfer thy Drule.dummy_thm
val restore_lifting_att =
([dummy_thm],
[map (Token.make_string o rpair Position.none)
["Lifting.lifting_restore_internal", bundle_name]])
in
- lthy
- |> Local_Theory.declaration {syntax = false, pervasive = true}
- (fn phi => Lifting_Info.init_restore_data bundle_name (phi_qinfo phi))
- |> Bundle.bundle ((binding, [restore_lifting_att])) []
- |> pair binding
+ lthy
+ |> Local_Theory.declaration {syntax = false, pervasive = true}
+ (fn phi => Lifting_Info.init_restore_data bundle_name (phi_qinfo phi))
+ |> Bundle.bundle ((binding, [restore_lifting_att])) []
+ |> pair binding
end
fun setup_lifting_infr config quot_thm opt_reflp_thm lthy =
let
val _ = quot_thm_sanity_check lthy quot_thm
val (_, qty) = quot_thm_rty_qty quot_thm
- val (pcrel_def, lthy) = define_pcrel config (quot_thm_crel quot_thm) lthy
+ val (pcrel_def, lthy1) = define_pcrel config (quot_thm_crel quot_thm) lthy
(**)
- val pcrel_def = Option.map (Morphism.thm (Local_Theory.target_morphism lthy)) pcrel_def
+ val pcrel_def = Option.map (Morphism.thm (Local_Theory.target_morphism lthy1)) pcrel_def
(**)
- val (pcr_cr_eq, lthy) = case pcrel_def of
- SOME pcrel_def => apfst SOME (define_pcr_cr_eq config lthy pcrel_def)
- | NONE => (NONE, lthy)
+ val (pcr_cr_eq, lthy2) =
+ case pcrel_def of
+ SOME pcrel_def => apfst SOME (define_pcr_cr_eq config lthy1 pcrel_def)
+ | NONE => (NONE, lthy1)
val pcr_info = case pcrel_def of
SOME pcrel_def => SOME { pcrel_def = pcrel_def, pcr_cr_eq = the pcr_cr_eq }
| NONE => NONE
@@ -273,18 +275,19 @@
val qty_full_name = (fst o dest_Type) qty
fun quot_info phi = Lifting_Info.transform_quotient phi quotients
val reflexivity_rule_attr = Attrib.internal (K Lifting_Info.add_reflexivity_rule_attribute)
- val lthy = case opt_reflp_thm of
- SOME reflp_thm => lthy
- |> (snd oo Local_Theory.note) ((Binding.empty, [reflexivity_rule_attr]),
- [reflp_thm RS @{thm reflp_ge_eq}])
- |> define_code_constr quot_thm
- | NONE => lthy
- |> define_abs_type quot_thm
+ val lthy3 =
+ case opt_reflp_thm of
+ SOME reflp_thm =>
+ lthy2
+ |> (#2 oo Local_Theory.note)
+ ((Binding.empty, [reflexivity_rule_attr]), [reflp_thm RS @{thm reflp_ge_eq}])
+ |> define_code_constr quot_thm
+ | NONE => lthy2 |> define_abs_type quot_thm
in
- lthy
- |> Local_Theory.declaration {syntax = false, pervasive = true}
+ lthy3
+ |> Local_Theory.declaration {syntax = false, pervasive = true}
(fn phi => Lifting_Info.update_quotients qty_full_name (quot_info phi))
- |> lifting_bundle qty_full_name quotients
+ |> lifting_bundle qty_full_name quotients
end
local
@@ -321,7 +324,7 @@
val OO_rules = @{thms left_total_OO left_unique_OO right_total_OO right_unique_OO bi_total_OO
bi_unique_OO}
in
- fun parametrize_class_constraint ctxt pcr_def constraint =
+ fun parametrize_class_constraint ctxt0 pcr_def constraint =
let
fun generate_transfer_rule pcr_def constraint goal ctxt =
let
@@ -368,14 +371,14 @@
Pretty.str thm_name,
Pretty.str " transfer rule found:",
Pretty.brk 1] @
- Pretty.commas (map (Syntax.pretty_term ctxt) non_trivial_assms))
+ Pretty.commas (map (Syntax.pretty_term ctxt0) non_trivial_assms))
|> Pretty.string_of
|> warning
end
end
val goal = make_goal pcr_def constraint
- val thm = generate_transfer_rule pcr_def constraint goal ctxt
+ val thm = generate_transfer_rule pcr_def constraint goal ctxt0
val _ = check_assms thm
in
thm
@@ -439,7 +442,7 @@
reduced_assm RS thm
end
in
- fun parametrize_domain dom_thm (pcr_info : Lifting_Info.pcr) ctxt =
+ fun parametrize_domain dom_thm (pcr_info : Lifting_Info.pcr) ctxt0 =
let
fun reduce_first_assm ctxt rules thm =
let
@@ -456,11 +459,11 @@
val pcr_Domainp_par_left_total =
(dom_thm RS @{thm pcr_Domainp_par_left_total})
|> fold_Domainp_pcrel pcrel_def
- |> reduce_first_assm ctxt (Lifting_Info.get_reflexivity_rules ctxt)
+ |> reduce_first_assm ctxt0 (Lifting_Info.get_reflexivity_rules ctxt0)
val pcr_Domainp_par =
(dom_thm RS @{thm pcr_Domainp_par})
|> fold_Domainp_pcrel pcrel_def
- |> reduce_Domainp ctxt (Transfer.get_relator_domain ctxt)
+ |> reduce_Domainp ctxt0 (Transfer.get_relator_domain ctxt0)
val pcr_Domainp =
(dom_thm RS @{thm pcr_Domainp})
|> fold_Domainp_pcrel pcrel_def
@@ -513,10 +516,10 @@
opt_par_thm - a parametricity theorem for R
*)
-fun setup_by_quotient (config: config) quot_thm opt_reflp_thm opt_par_thm lthy =
+fun setup_by_quotient (config: config) quot_thm opt_reflp_thm opt_par_thm lthy0 =
let
(**)
- val quot_thm = Morphism.thm (Local_Theory.target_morphism lthy) quot_thm
+ val quot_thm = Morphism.thm (Local_Theory.target_morphism lthy0) quot_thm
(**)
val (rty, qty) = quot_thm_rty_qty quot_thm
val induct_attr = Attrib.internal (K (Induct.induct_type (fst (dest_Type qty))))
@@ -558,21 +561,18 @@
notes2 @ notes1 @ notes
end
- fun generate_parametric_rel_eq lthy transfer_rule opt_param_thm =
- option_fold transfer_rule (Lifting_Def.generate_parametric_transfer_rule lthy transfer_rule) opt_param_thm
- handle Lifting_Term.MERGE_TRANSFER_REL msg =>
- let
- val error_msg = cat_lines
- ["Generation of a parametric transfer rule for the quotient relation failed.",
- (Pretty.string_of (Pretty.block
- [Pretty.str "Reason:", Pretty.brk 2, msg]))]
- in
- error error_msg
- end
+ fun generate_parametric_rel_eq ctxt transfer_rule opt_param_thm =
+ (case opt_param_thm of
+ NONE => transfer_rule
+ | SOME param_thm =>
+ (Lifting_Def.generate_parametric_transfer_rule ctxt transfer_rule param_thm
+ handle Lifting_Term.MERGE_TRANSFER_REL msg =>
+ error ("Generation of a parametric transfer rule for the quotient relation failed:\n"
+ ^ Pretty.string_of msg)))
- fun setup_transfer_rules_par lthy notes =
+ fun setup_transfer_rules_par ctxt notes =
let
- val pcrel_info = (the (get_pcrel_info lthy qty_full_name))
+ val pcrel_info = the (get_pcrel_info ctxt qty_full_name)
val pcrel_def = #pcrel_def pcrel_info
val notes1 =
case opt_reflp_thm of
@@ -580,12 +580,12 @@
let
val left_total = ([quot_thm, reflp_thm] MRSL @{thm Quotient_left_total})
val bi_total = ([quot_thm, reflp_thm] MRSL @{thm Quotient_bi_total})
- val domain_thms = parametrize_total_domain left_total pcrel_def lthy
- val id_abs_transfer = generate_parametric_id lthy rty
- (Lifting_Term.parametrize_transfer_rule lthy
+ val domain_thms = parametrize_total_domain left_total pcrel_def ctxt
+ val id_abs_transfer = generate_parametric_id ctxt rty
+ (Lifting_Term.parametrize_transfer_rule ctxt
([quot_thm, reflp_thm] MRSL @{thm Quotient_id_abs_transfer}))
- val left_total = parametrize_class_constraint lthy pcrel_def left_total
- val bi_total = parametrize_class_constraint lthy pcrel_def bi_total
+ val left_total = parametrize_class_constraint ctxt pcrel_def left_total
+ val bi_total = parametrize_class_constraint ctxt pcrel_def bi_total
val thms =
[("id_abs_transfer", [id_abs_transfer], @{attributes [transfer_rule]}),
("left_total", [left_total], @{attributes [transfer_rule]}),
@@ -593,19 +593,19 @@
in
map_thms qualify I thms @ map_thms qualify I domain_thms
end
- | NONE =>
+ | NONE =>
let
- val thms = parametrize_domain dom_thm pcrel_info lthy
+ val thms = parametrize_domain dom_thm pcrel_info ctxt
in
map_thms qualify I thms
end
- val rel_eq_transfer = generate_parametric_rel_eq lthy
- (Lifting_Term.parametrize_transfer_rule lthy (quot_thm RS @{thm Quotient_rel_eq_transfer}))
+ val rel_eq_transfer = generate_parametric_rel_eq ctxt
+ (Lifting_Term.parametrize_transfer_rule ctxt (quot_thm RS @{thm Quotient_rel_eq_transfer}))
opt_par_thm
- val right_unique = parametrize_class_constraint lthy pcrel_def
+ val right_unique = parametrize_class_constraint ctxt pcrel_def
(quot_thm RS @{thm Quotient_right_unique})
- val right_total = parametrize_class_constraint lthy pcrel_def
+ val right_total = parametrize_class_constraint ctxt pcrel_def
(quot_thm RS @{thm Quotient_right_total})
val notes2 = map_thms qualify I
[("rel_eq_transfer", [rel_eq_transfer], @{attributes [transfer_rule]}),
@@ -617,15 +617,15 @@
fun setup_rules lthy =
let
- val thms = if is_some (get_pcrel_info lthy qty_full_name)
- then setup_transfer_rules_par lthy notes1 else setup_transfer_rules_nonpar notes1
- in
- notes (#notes config) thms lthy
- end
+ val thms =
+ if is_some (get_pcrel_info lthy qty_full_name)
+ then setup_transfer_rules_par lthy notes1
+ else setup_transfer_rules_nonpar notes1
+ in notes (#notes config) thms lthy end
in
- lthy
- |> setup_lifting_infr config quot_thm opt_reflp_thm
- ||> setup_rules
+ lthy0
+ |> setup_lifting_infr config quot_thm opt_reflp_thm
+ ||> setup_rules
end
(*
@@ -636,12 +636,12 @@
typedef_thm - a typedef theorem (type_definition Rep Abs S)
*)
-fun setup_by_typedef_thm config typedef_thm lthy =
+fun setup_by_typedef_thm config typedef_thm lthy0 =
let
val (_ $ rep_fun $ _ $ typedef_set) = (HOLogic.dest_Trueprop o Thm.prop_of) typedef_thm
- val (T_def, lthy) = define_crel config rep_fun lthy
+ val (T_def, lthy1) = define_crel config rep_fun lthy0
(**)
- val T_def = Morphism.thm (Local_Theory.target_morphism lthy) T_def
+ val T_def = Morphism.thm (Local_Theory.target_morphism lthy1) T_def
(**)
val quot_thm = case typedef_set of
Const (\<^const_name>\<open>top\<close>, _) =>
@@ -686,9 +686,9 @@
map_thms qualify (fn thm => [typedef_thm, T_def] MRSL thm) thms @ notes1 @ notes
end
- fun setup_transfer_rules_par lthy notes =
+ fun setup_transfer_rules_par ctxt notes =
let
- val pcrel_info = (the (get_pcrel_info lthy qty_full_name))
+ val pcrel_info = (the (get_pcrel_info ctxt qty_full_name))
val pcrel_def = #pcrel_def pcrel_info
val notes1 =
@@ -697,11 +697,11 @@
let
val left_total = ([quot_thm, reflp_thm] MRSL @{thm Quotient_left_total})
val bi_total = ([quot_thm, reflp_thm] MRSL @{thm Quotient_bi_total})
- val domain_thms = parametrize_total_domain left_total pcrel_def lthy
- val left_total = parametrize_class_constraint lthy pcrel_def left_total
- val bi_total = parametrize_class_constraint lthy pcrel_def bi_total
- val id_abs_transfer = generate_parametric_id lthy rty
- (Lifting_Term.parametrize_transfer_rule lthy
+ val domain_thms = parametrize_total_domain left_total pcrel_def ctxt
+ val left_total = parametrize_class_constraint ctxt pcrel_def left_total
+ val bi_total = parametrize_class_constraint ctxt pcrel_def bi_total
+ val id_abs_transfer = generate_parametric_id ctxt rty
+ (Lifting_Term.parametrize_transfer_rule ctxt
([quot_thm, reflp_thm] MRSL @{thm Quotient_id_abs_transfer}))
val thms =
[("left_total", [left_total], @{attributes [transfer_rule]}),
@@ -712,17 +712,17 @@
end
| NONE =>
let
- val thms = parametrize_domain dom_thm pcrel_info lthy
+ val thms = parametrize_domain dom_thm pcrel_info ctxt
in
map_thms qualify I thms
end
- val notes2 = map_thms qualify (fn thm => generate_parametric_id lthy rty
- (Lifting_Term.parametrize_transfer_rule lthy ([typedef_thm, T_def] MRSL thm)))
+ val notes2 = map_thms qualify (fn thm => generate_parametric_id ctxt rty
+ (Lifting_Term.parametrize_transfer_rule ctxt ([typedef_thm, T_def] MRSL thm)))
[("rep_transfer", @{thms typedef_rep_transfer}, @{attributes [transfer_rule]})];
val notes3 =
map_thms qualify
- (fn thm => parametrize_class_constraint lthy pcrel_def ([typedef_thm, T_def] MRSL thm))
+ (fn thm => parametrize_class_constraint ctxt pcrel_def ([typedef_thm, T_def] MRSL thm))
[("left_unique", @{thms typedef_left_unique}, @{attributes [transfer_rule]}),
("right_unique", @{thms typedef_right_unique},@{attributes [transfer_rule]}),
("bi_unique", @{thms typedef_bi_unique}, @{attributes [transfer_rule]}),
@@ -735,15 +735,15 @@
fun setup_rules lthy =
let
- val thms = if is_some (get_pcrel_info lthy qty_full_name)
- then setup_transfer_rules_par lthy notes1 else setup_transfer_rules_nonpar notes1
- in
- notes (#notes config) thms lthy
- end
+ val thms =
+ if is_some (get_pcrel_info lthy qty_full_name)
+ then setup_transfer_rules_par lthy notes1
+ else setup_transfer_rules_nonpar notes1
+ in notes (#notes config) thms lthy end
in
- lthy
- |> setup_lifting_infr config quot_thm opt_reflp_thm
- ||> setup_rules
+ lthy1
+ |> setup_lifting_infr config quot_thm opt_reflp_thm
+ ||> setup_rules
end
fun setup_lifting_cmd xthm opt_reflp_xthm opt_par_xthm lthy =
@@ -918,7 +918,7 @@
ctxt
|> lifting_restore (#quotient restore_info)
|> fold_rev Transfer.transfer_raw_add (Item_Net.content (#transfer_rules restore_info))
- | NONE => ctxt
+ | NONE => ctxt
end
val lifting_restore_internal_attribute_setup =
@@ -1034,7 +1034,7 @@
Local_Theory.declaration {syntax = false, pervasive = true}
(fn phi => Lifting_Info.add_transfer_rules_in_restore_data pointer
(new_transfer_rules refresh_data lthy phi)) lthy
- | NONE => error "The lifting bundle refers to non-existent restore data."
+ | NONE => error "The lifting bundle refers to non-existent restore data."
end
fun lifting_update_cmd bundle_name lthy =
--- a/src/HOL/Tools/Lifting/lifting_term.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Lifting/lifting_term.ML Sun Jun 09 22:22:36 2019 +0200
@@ -53,12 +53,8 @@
type quotients = Lifting_Info.quotient Symtab.table
fun match ctxt err ty_pat ty =
- let
- val thy = Proof_Context.theory_of ctxt
- in
- Sign.typ_match thy (ty_pat, ty) Vartab.empty
- handle Type.TYPE_MATCH => err ctxt ty_pat ty
- end
+ Sign.typ_match (Proof_Context.theory_of ctxt) (ty_pat, ty) Vartab.empty
+ handle Type.TYPE_MATCH => err ctxt ty_pat ty
fun equiv_match_err ctxt ty_pat ty =
let
@@ -82,11 +78,7 @@
Pretty.str "found."])
fun get_quot_thm quotients ctxt s =
- let
- val thy = Proof_Context.theory_of ctxt
- in
- Thm.transfer thy (#quot_thm (get_quot_data quotients s))
- end
+ Thm.transfer' ctxt (#quot_thm (get_quot_data quotients s))
fun has_pcrel_info quotients s = is_some (#pcr_info (get_quot_data quotients s))
@@ -99,51 +91,34 @@
Pretty.str "found."])
fun get_pcrel_def quotients ctxt s =
- let
- val thy = Proof_Context.theory_of ctxt
- in
- Thm.transfer thy (#pcrel_def (get_pcrel_info quotients s))
- end
+ Thm.transfer' ctxt (#pcrel_def (get_pcrel_info quotients s))
fun get_pcr_cr_eq quotients ctxt s =
- let
- val thy = Proof_Context.theory_of ctxt
- in
- Thm.transfer thy (#pcr_cr_eq (get_pcrel_info quotients s))
- end
+ Thm.transfer' ctxt (#pcr_cr_eq (get_pcrel_info quotients s))
fun get_rel_quot_thm ctxt s =
- let
- val thy = Proof_Context.theory_of ctxt
- in
- (case Lifting_Info.lookup_quot_maps ctxt s of
- SOME map_data => Thm.transfer thy (#rel_quot_thm map_data)
- | NONE => raise QUOT_THM_INTERNAL (Pretty.block
+ (case Lifting_Info.lookup_quot_maps ctxt s of
+ SOME map_data => Thm.transfer' ctxt (#rel_quot_thm map_data)
+ | NONE => raise QUOT_THM_INTERNAL (Pretty.block
[Pretty.str ("No relator for the type " ^ quote s),
Pretty.brk 1,
Pretty.str "found."]))
- end
-
+
fun get_rel_distr_rules ctxt s tm =
- let
- val thy = Proof_Context.theory_of ctxt
- in
- (case Lifting_Info.lookup_relator_distr_data ctxt s of
- SOME rel_distr_thm => (
- case tm of
- Const (\<^const_name>\<open>POS\<close>, _) => map (Thm.transfer thy) (#pos_distr_rules rel_distr_thm)
- | Const (\<^const_name>\<open>NEG\<close>, _) => map (Thm.transfer thy) (#neg_distr_rules rel_distr_thm)
- )
- | NONE => raise QUOT_THM_INTERNAL (Pretty.block
+ (case Lifting_Info.lookup_relator_distr_data ctxt s of
+ SOME rel_distr_thm =>
+ (case tm of
+ Const (\<^const_name>\<open>POS\<close>, _) => map (Thm.transfer' ctxt) (#pos_distr_rules rel_distr_thm)
+ | Const (\<^const_name>\<open>NEG\<close>, _) => map (Thm.transfer' ctxt) (#neg_distr_rules rel_distr_thm))
+ | NONE => raise QUOT_THM_INTERNAL (Pretty.block
[Pretty.str ("No relator distr. data for the type " ^ quote s),
Pretty.brk 1,
Pretty.str "found."]))
- end
fun is_id_quot thm = Thm.eq_thm_prop (thm, @{thm identity_quotient})
-fun zip_Tvars ctxt type_name rty_Tvars qty_Tvars =
- case try (get_rel_quot_thm ctxt) type_name of
+fun zip_Tvars ctxt0 type_name rty_Tvars qty_Tvars =
+ case try (get_rel_quot_thm ctxt0) type_name of
NONE => rty_Tvars ~~ qty_Tvars
| SOME rel_quot_thm =>
let
@@ -181,27 +156,26 @@
val qty = Type (type_name, qty_Tvars)
val rel_quot_thm_concl = (Logic.strip_imp_concl o Thm.prop_of) rel_quot_thm
val schematic_rel_absT = quot_term_absT rel_quot_thm_concl;
- val thy = Proof_Context.theory_of ctxt
val absT = rty --> qty
val schematic_absT =
absT
- |> Logic.type_map (singleton (Variable.polymorphic ctxt))
+ |> Logic.type_map (singleton (Variable.polymorphic ctxt0))
|> Logic.incr_tvar (maxidx_of_typ schematic_rel_absT + 1)
(* because absT can already contain schematic variables from rty patterns *)
val maxidx = Term.maxidx_of_typs [schematic_rel_absT, schematic_absT]
- val _ = Sign.typ_unify thy (schematic_rel_absT, schematic_absT) (Vartab.empty,maxidx)
- handle Type.TUNIFY => equiv_univ_err ctxt schematic_rel_absT schematic_absT
+ val _ =
+ Sign.typ_unify (Proof_Context.theory_of ctxt0) (schematic_rel_absT, schematic_absT)
+ (Vartab.empty, maxidx)
+ handle Type.TUNIFY => equiv_univ_err ctxt0 schematic_rel_absT schematic_absT
val subs = raw_match (schematic_rel_absT, absT) Vartab.empty
val rel_quot_thm_prems = (Logic.strip_imp_prems o Thm.prop_of) rel_quot_thm
in
- map (dest_funT o
- Envir.subst_type subs o
- quot_term_absT)
- rel_quot_thm_prems
+ map (dest_funT o Envir.subst_type subs o quot_term_absT) rel_quot_thm_prems
end
-fun gen_rty_is_TVar quotients ctxt qty = qty |> Tname |> get_quot_thm quotients ctxt |>
- quot_thm_rty_qty |> fst |> is_TVar
+fun gen_rty_is_TVar quotients ctxt qty =
+ qty |> Tname |> get_quot_thm quotients ctxt
+ |> quot_thm_rty_qty |> fst |> is_TVar
fun gen_instantiate_rtys quotients ctxt (rty, (qty as Type (qty_name, _))) =
let
@@ -307,25 +281,19 @@
fun force_qty_type ctxt qty quot_thm =
let
- val thy = Proof_Context.theory_of ctxt
val (_, qty_schematic) = quot_thm_rty_qty quot_thm
- val match_env = Sign.typ_match thy (qty_schematic, qty) Vartab.empty
+ val match_env = Sign.typ_match (Proof_Context.theory_of ctxt) (qty_schematic, qty) Vartab.empty
fun prep_ty (x, (S, ty)) = ((x, S), Thm.ctyp_of ctxt ty)
val ty_inst = Vartab.fold (cons o prep_ty) match_env []
- in
- Thm.instantiate (ty_inst, []) quot_thm
- end
+ in Thm.instantiate (ty_inst, []) quot_thm end
fun check_rty_type ctxt rty quot_thm =
let
- val thy = Proof_Context.theory_of ctxt
val (rty_forced, _) = quot_thm_rty_qty quot_thm
val rty_schematic = Logic.type_map (singleton (Variable.polymorphic ctxt)) rty
- val _ = Sign.typ_match thy (rty_schematic, rty_forced) Vartab.empty
+ val _ = Sign.typ_match (Proof_Context.theory_of ctxt) (rty_schematic, rty_forced) Vartab.empty
handle Type.TYPE_MATCH => raise CHECK_RTY (rty_schematic, rty_forced)
- in
- ()
- end
+ in () end
(*
The function tries to prove that rty and qty form a quotient.
@@ -344,9 +312,7 @@
val (schematic_quot_thm, _) = prove_schematic_quot_thm id_actions quotients ctxt (rty, qty) ()
val quot_thm = force_qty_type ctxt qty schematic_quot_thm
val _ = check_rty_type ctxt rty quot_thm
- in
- quot_thm
- end
+ in quot_thm end
end
(*
@@ -370,26 +336,28 @@
val tfrees_Q_t = rev (Term.add_tfree_names Q_t [])
in
fn ctxt =>
- let
- fun rename_free_var tab (Free (name, typ)) = Free (the_default name (AList.lookup op= tab name),typ)
- | rename_free_var _ t = t
-
- fun rename_free_vars tab = map_aterms (rename_free_var tab)
-
- fun rename_free_tvars tab =
- map_types (map_type_tfree (fn (name, sort) => TFree (the_default name (AList.lookup op= tab name), sort)))
-
- val (new_frees_Q_t, ctxt) = Variable.variant_fixes frees_Q_t ctxt
- val tab_frees = frees_Q_t ~~ new_frees_Q_t
-
- val (new_tfrees_Q_t, ctxt) = Variable.invent_types (replicate (length tfrees_Q_t) []) ctxt
- val tab_tfrees = tfrees_Q_t ~~ (fst o split_list) new_tfrees_Q_t
+ let
+ fun rename_free_var tab (Free (name, typ)) =
+ Free (the_default name (AList.lookup op= tab name), typ)
+ | rename_free_var _ t = t
+
+ fun rename_free_vars tab = map_aterms (rename_free_var tab)
+
+ fun rename_free_tvars tab =
+ map_types (map_type_tfree (fn (name, sort) =>
+ TFree (the_default name (AList.lookup op= tab name), sort)))
- val renamed_Q_t = rename_free_vars tab_frees Q_t
- val renamed_Q_t = rename_free_tvars tab_tfrees renamed_Q_t
- in
- (renamed_Q_t, ctxt)
- end
+ val (new_frees_Q_t, ctxt') = Variable.variant_fixes frees_Q_t ctxt
+ val tab_frees = frees_Q_t ~~ new_frees_Q_t
+
+ val (new_tfrees_Q_t, ctxt'') = Variable.invent_types (replicate (length tfrees_Q_t) []) ctxt'
+ val tab_tfrees = tfrees_Q_t ~~ (fst o split_list) new_tfrees_Q_t
+
+ val renamed_Q_t = rename_free_vars tab_frees Q_t
+ val renamed_Q_t = rename_free_tvars tab_tfrees renamed_Q_t
+ in
+ (renamed_Q_t, ctxt'')
+ end
end
(*
@@ -402,39 +370,29 @@
to the corresponding assumption in the returned theorem.
*)
-fun prove_param_quot_thm ctxt ty =
+fun prove_param_quot_thm ctxt0 ty =
let
- fun generate (ty as Type (s, tys)) (table_ctxt as (table, ctxt)) =
- if null tys
- then
- let
- val instantiated_id_quot_thm =
- Thm.instantiate' [SOME (Thm.ctyp_of ctxt ty)] [] @{thm identity_quotient}
- in
- (instantiated_id_quot_thm, (table, ctxt))
- end
- else
- let
- val (args, table_ctxt) = fold_map generate tys table_ctxt
- in
- (args MRSL (get_rel_quot_thm ctxt s), table_ctxt)
- end
+ fun generate (ty as Type (s, tys)) (table, ctxt) =
+ if null tys then
+ let
+ val instantiated_id_quot_thm =
+ Thm.instantiate' [SOME (Thm.ctyp_of ctxt ty)] [] @{thm identity_quotient}
+ in (instantiated_id_quot_thm, (table, ctxt)) end
+ else
+ let val (args, table_ctxt') = fold_map generate tys (table, ctxt)
+ in (args MRSL (get_rel_quot_thm ctxt s), table_ctxt') end
| generate ty (table, ctxt) =
- if AList.defined (op=) table ty
- then (the (AList.lookup (op=) table ty), (table, ctxt))
- else
- let
- val (Q_t, ctxt') = get_fresh_Q_t ctxt
- val Q_thm = Thm.assume (Thm.cterm_of ctxt' Q_t)
- val table' = (ty, Q_thm) :: table
- in
- (Q_thm, (table', ctxt'))
- end
+ if AList.defined (op =) table ty
+ then (the (AList.lookup (op =) table ty), (table, ctxt))
+ else
+ let
+ val (Q_t, ctxt') = get_fresh_Q_t ctxt
+ val Q_thm = Thm.assume (Thm.cterm_of ctxt' Q_t)
+ val table' = (ty, Q_thm) :: table
+ in (Q_thm, (table', ctxt')) end
- val (param_quot_thm, (table, ctxt)) = generate ty ([], ctxt)
- in
- (param_quot_thm, rev table, ctxt)
- end
+ val (param_quot_thm, (table, ctxt1)) = generate ty ([], ctxt0)
+ in (param_quot_thm, rev table, ctxt1) end
handle QUOT_THM_INTERNAL pretty_msg => raise PARAM_QUOT_THM (ty, pretty_msg)
(*
@@ -446,11 +404,10 @@
fun generate_parametrized_relator ctxt ty =
let
- val orig_ctxt = ctxt
- val (quot_thm, table, ctxt) = prove_param_quot_thm ctxt ty
+ val (quot_thm, table, ctxt') = prove_param_quot_thm ctxt ty
val parametrized_relator = quot_thm_crel quot_thm
val args = map (fn (_, q_thm) => quot_thm_crel q_thm) table
- val exported_terms = Variable.exportT_terms ctxt orig_ctxt (parametrized_relator :: args)
+ val exported_terms = Variable.exportT_terms ctxt' ctxt (parametrized_relator :: args)
in
(hd exported_terms, tl exported_terms)
end
@@ -487,7 +444,7 @@
fun rewrs_imp rules = first_imp (map rewr_imp rules)
in
- fun gen_merge_transfer_relations quotients ctxt ctm =
+ fun gen_merge_transfer_relations quotients ctxt0 ctm =
let
val ctm = Thm.dest_arg ctm
val tm = Thm.term_of ctm
@@ -498,14 +455,15 @@
fun prove_extra_assms ctxt ctm distr_rule =
let
- fun prove_assm assm = try (Goal.prove ctxt [] [] (Thm.term_of assm))
- (fn _ => SOLVED' (REPEAT_ALL_NEW (resolve_tac ctxt (Transfer.get_transfer_raw ctxt))) 1)
+ fun prove_assm assm =
+ try (Goal.prove ctxt [] [] (Thm.term_of assm)) (fn {context = goal_ctxt, ...} =>
+ SOLVED' (REPEAT_ALL_NEW (resolve_tac goal_ctxt (Transfer.get_transfer_raw goal_ctxt))) 1)
fun is_POS_or_NEG ctm =
case (head_of o Thm.term_of o Thm.dest_arg) ctm of
Const (\<^const_name>\<open>POS\<close>, _) => true
- | Const (\<^const_name>\<open>NEG\<close>, _) => true
- | _ => false
+ | Const (\<^const_name>\<open>NEG\<close>, _) => true
+ | _ => false
val inst_distr_rule = rewr_imp distr_rule ctm
val extra_assms = filter_out is_POS_or_NEG (cprems_of inst_distr_rule)
@@ -518,7 +476,7 @@
fun cannot_merge_error_msg () = Pretty.block
[Pretty.str "Rewriting (merging) of this term has failed:",
Pretty.brk 1,
- Syntax.pretty_term ctxt rel]
+ Syntax.pretty_term ctxt0 rel]
in
case get_args 2 rel of
@@ -530,25 +488,25 @@
in
if same_type_constrs (rty', qty) then
let
- val distr_rules = get_rel_distr_rules ctxt ((fst o dest_Type) rty') (head_of tm)
- val distr_rule = get_first (prove_extra_assms ctxt ctm) distr_rules
+ val distr_rules = get_rel_distr_rules ctxt0 ((fst o dest_Type) rty') (head_of tm)
+ val distr_rule = get_first (prove_extra_assms ctxt0 ctm) distr_rules
in
case distr_rule of
NONE => raise MERGE_TRANSFER_REL (cannot_merge_error_msg ())
- | SOME distr_rule => (map (gen_merge_transfer_relations quotients ctxt)
- (cprems_of distr_rule))
- MRSL distr_rule
+ | SOME distr_rule =>
+ map (gen_merge_transfer_relations quotients ctxt0) (cprems_of distr_rule)
+ MRSL distr_rule
end
else
let
- val pcrel_def = get_pcrel_def quotients ctxt ((fst o dest_Type) qty)
+ val pcrel_def = get_pcrel_def quotients ctxt0 ((fst o dest_Type) qty)
val pcrel_const = (head_of o fst o Logic.dest_equals o Thm.prop_of) pcrel_def
in
if same_constants pcrel_const (head_of trans_rel) then
let
val unfolded_ctm = Thm.rhs_of (Conv.arg1_conv (Conv.arg_conv (Conv.rewr_conv pcrel_def)) ctm)
val distr_rule = rewrs_imp @{thms POS_pcr_rule NEG_pcr_rule} unfolded_ctm
- val result = (map (gen_merge_transfer_relations quotients ctxt)
+ val result = (map (gen_merge_transfer_relations quotients ctxt0)
(cprems_of distr_rule)) MRSL distr_rule
val fold_pcr_rel = Conv.rewr_conv (Thm.symmetric pcrel_def)
in
@@ -572,8 +530,8 @@
(e.g., (rel_S R) OO (pcr_T op=) --> pcr_T R using the definition pcr_T R = (rel_S R) OO cr_T).
*)
- fun merge_transfer_relations ctxt ctm = gen_merge_transfer_relations
- (Lifting_Info.get_quotients ctxt) ctxt ctm
+ fun merge_transfer_relations ctxt ctm =
+ gen_merge_transfer_relations (Lifting_Info.get_quotients ctxt) ctxt ctm
end
fun gen_parametrize_transfer_rule quotients ctxt thm =
--- a/src/HOL/Tools/Lifting/lifting_util.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Lifting/lifting_util.ML Sun Jun 09 22:22:36 2019 +0200
@@ -7,7 +7,6 @@
signature LIFTING_UTIL =
sig
val MRSL: thm list * thm -> thm
- val option_fold: 'b -> ('a -> 'b) -> 'a option -> 'b
val dest_Quotient: term -> term * term * term * term
val quot_thm_rel: thm -> term
@@ -41,9 +40,6 @@
fun ants MRSL thm = fold (fn rl => fn thm => rl RS thm) ants thm
-fun option_fold a _ NONE = a
- | option_fold _ f (SOME x) = f x
-
fun dest_Quotient (Const (\<^const_name>\<open>Quotient\<close>, _) $ rel $ abs $ rep $ cr)
= (rel, abs, rep, cr)
| dest_Quotient t = raise TERM ("dest_Quotient", [t])
@@ -128,7 +124,6 @@
end
fun conceal_naming_result f lthy =
- let val old_lthy = lthy
- in lthy |> Proof_Context.concealed |> f ||> Proof_Context.restore_naming old_lthy end;
+ lthy |> Proof_Context.concealed |> f ||> Proof_Context.restore_naming lthy;
end
--- a/src/HOL/Tools/Meson/meson_clausify.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Meson/meson_clausify.ML Sun Jun 09 22:22:36 2019 +0200
@@ -346,8 +346,8 @@
| _ => false) fully_skolemized_t then
let
val (fully_skolemized_ct, ctxt) =
- Variable.import_terms true [fully_skolemized_t] ctxt
- |>> the_single |>> Thm.cterm_of ctxt
+ yield_singleton (Variable.import_terms true) fully_skolemized_t ctxt
+ |>> Thm.cterm_of ctxt
in
(SOME (discharger_th, fully_skolemized_ct),
(Thm.assume fully_skolemized_ct, ctxt))
@@ -363,27 +363,26 @@
(* Convert a theorem to CNF, with additional premises due to skolemization. *)
fun cnf_axiom ctxt0 new_skolem combinators ax_no th =
let
- val thy = Proof_Context.theory_of ctxt0
- val choice_ths = choice_theorems thy
- val (opt, (nnf_th, ctxt)) =
+ val choice_ths = choice_theorems (Proof_Context.theory_of ctxt0)
+ val (opt, (nnf_th, ctxt1)) =
nnf_axiom choice_ths new_skolem ax_no th ctxt0
fun clausify th =
make_cnf
(if new_skolem orelse null choice_ths then []
- else map (old_skolem_theorem_of_def ctxt) (old_skolem_defs th))
- th ctxt
- val (cnf_ths, ctxt) = clausify nnf_th
+ else map (old_skolem_theorem_of_def ctxt1) (old_skolem_defs th))
+ th ctxt1
+ val (cnf_ths, ctxt2) = clausify nnf_th
fun intr_imp ct th =
- Thm.instantiate ([], [((("i", 0), \<^typ>\<open>nat\<close>), Thm.cterm_of ctxt (HOLogic.mk_nat ax_no))])
+ Thm.instantiate ([], [((("i", 0), \<^typ>\<open>nat\<close>), Thm.cterm_of ctxt2 (HOLogic.mk_nat ax_no))])
(zero_var_indexes @{thm skolem_COMBK_D})
RS Thm.implies_intr ct th
in
- (opt |> Option.map (I #>> singleton (Variable.export ctxt ctxt0)
+ (opt |> Option.map (I #>> singleton (Variable.export ctxt2 ctxt0)
##> (Thm.term_of #> HOLogic.dest_Trueprop
- #> singleton (Variable.export_terms ctxt ctxt0))),
- cnf_ths |> map (combinators ? introduce_combinators_in_theorem ctxt
+ #> singleton (Variable.export_terms ctxt2 ctxt0))),
+ cnf_ths |> map (combinators ? introduce_combinators_in_theorem ctxt2
#> (case opt of SOME (_, ct) => intr_imp ct | NONE => I))
- |> Variable.export ctxt ctxt0
+ |> Variable.export ctxt2 ctxt0
|> finish_cnf
|> map Thm.close_derivation)
end
--- a/src/HOL/Tools/Predicate_Compile/code_prolog.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Predicate_Compile/code_prolog.ML Sun Jun 09 22:22:36 2019 +0200
@@ -1018,7 +1018,7 @@
val t = Syntax.read_term ctxt raw_t
val t' = values ctxt soln t
val ty' = Term.type_of t'
- val ctxt' = Variable.auto_fixes t' ctxt
+ val ctxt' = Proof_Context.augment t' ctxt
val _ = tracing "Printing terms..."
in
Print_Mode.with_modes print_modes (fn () =>
--- a/src/HOL/Tools/Predicate_Compile/predicate_compile_aux.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Predicate_Compile/predicate_compile_aux.ML Sun Jun 09 22:22:36 2019 +0200
@@ -1048,7 +1048,7 @@
fun import_intros inp_pred [] ctxt =
let
- val ([outp_pred], ctxt') = Variable.import_terms true [inp_pred] ctxt
+ val (outp_pred, ctxt') = yield_singleton (Variable.import_terms true) inp_pred ctxt
val T = fastype_of outp_pred
val paramTs = ho_argsT_of_typ (binder_types T)
val (param_names, _) = Variable.variant_fixes
--- a/src/HOL/Tools/Predicate_Compile/predicate_compile_core.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Predicate_Compile/predicate_compile_core.ML Sun Jun 09 22:22:36 2019 +0200
@@ -1648,7 +1648,7 @@
binds = [], cases = []}) preds cases_rules
val case_env = map2 (fn p => fn c => (Long_Name.base_name p, SOME c)) preds cases
val lthy'' = lthy'
- |> fold Variable.auto_fixes cases_rules
+ |> fold Proof_Context.augment cases_rules
|> Proof_Context.update_cases case_env
fun after_qed thms goal_ctxt =
let
@@ -1983,7 +1983,7 @@
val t = Syntax.read_term ctxt raw_t
val ((t', stats), ctxt') = values param_user_modes options k t ctxt
val ty' = Term.type_of t'
- val ctxt'' = Variable.auto_fixes t' ctxt'
+ val ctxt'' = Proof_Context.augment t' ctxt'
val pretty_stat =
(case stats of
NONE => []
--- a/src/HOL/Tools/Quickcheck/exhaustive_generators.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Quickcheck/exhaustive_generators.ML Sun Jun 09 22:22:36 2019 +0200
@@ -318,7 +318,7 @@
fun mk_fast_generator_expr ctxt (t, eval_terms) =
let
- val ctxt' = Variable.auto_fixes t ctxt
+ val ctxt' = Proof_Context.augment t ctxt
val names = Term.add_free_names t []
val frees = map Free (Term.add_frees t [])
fun lookup v = the (AList.lookup (op =) (names ~~ frees) v)
@@ -351,7 +351,7 @@
fun mk_generator_expr ctxt (t, eval_terms) =
let
- val ctxt' = Variable.auto_fixes t ctxt
+ val ctxt' = Proof_Context.augment t ctxt
val names = Term.add_free_names t []
val frees = map Free (Term.add_frees t [])
fun lookup v = the (AList.lookup (op =) (names ~~ frees) v)
@@ -374,7 +374,7 @@
fun mk_full_generator_expr ctxt (t, eval_terms) =
let
val thy = Proof_Context.theory_of ctxt
- val ctxt' = Variable.auto_fixes t ctxt
+ val ctxt' = Proof_Context.augment t ctxt
val names = Term.add_free_names t []
val frees = map Free (Term.add_frees t [])
val ([depth_name, genuine_only_name], ctxt'') =
@@ -415,7 +415,7 @@
fun mk_validator_expr ctxt t =
let
fun bounded_forallT T = (T --> \<^typ>\<open>bool\<close>) --> \<^typ>\<open>natural\<close> --> \<^typ>\<open>bool\<close>
- val ctxt' = Variable.auto_fixes t ctxt
+ val ctxt' = Proof_Context.augment t ctxt
val names = Term.add_free_names t []
val frees = map Free (Term.add_frees t [])
fun lookup v = the (AList.lookup (op =) (names ~~ frees) v)
--- a/src/HOL/Tools/SMT/smt_replay.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/SMT/smt_replay.ML Sun Jun 09 22:22:36 2019 +0200
@@ -186,14 +186,14 @@
|> map (fn ithm as (_, thm) => (ithm, Thm.cprop_of thm aconvc ct))
in
-fun add_asserted tab_update tab_empty p_extract cond outer_ctxt rewrite_rules assms steps ctxt =
+fun add_asserted tab_update tab_empty p_extract cond outer_ctxt rewrite_rules assms steps ctxt0 =
let
- val eqs = map (rewrite ctxt [rewrite_true_rule]) rewrite_rules
+ val eqs = map (rewrite ctxt0 [rewrite_true_rule]) rewrite_rules
val eqs' = union Thm.eq_thm eqs prep_rules
val assms_net =
assms
- |> map (apsnd (rewrite ctxt eqs'))
+ |> map (apsnd (rewrite ctxt0 eqs'))
|> map (apsnd (Conv.fconv_rule Thm.eta_conversion))
|> thm_net_of snd
@@ -229,7 +229,7 @@
else
cx
end
- in fold add steps (([], []), (ctxt, tab_empty)) end
+ in fold add steps (([], []), (ctxt0, tab_empty)) end
end
--- a/src/HOL/Tools/SMT/smt_solver.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/SMT/smt_solver.ML Sun Jun 09 22:22:36 2019 +0200
@@ -7,7 +7,7 @@
signature SMT_SOLVER =
sig
(*configuration*)
- datatype outcome = Unsat | Sat | Unknown
+ datatype outcome = Unsat | Sat | Unknown | Time_Out
type parsed_proof =
{outcome: SMT_Failure.failure option,
@@ -138,7 +138,7 @@
(* configuration *)
-datatype outcome = Unsat | Sat | Unknown
+datatype outcome = Unsat | Sat | Unknown | Time_Out
type parsed_proof =
{outcome: SMT_Failure.failure option,
@@ -198,6 +198,7 @@
(case parse_proof0 of
SOME pp => pp outer_ctxt replay_data xfacts prems concl lines
| NONE => {outcome = NONE, fact_ids = NONE, atp_proof = K []})
+ | (Time_Out, _) => raise SMT_Failure.SMT (SMT_Failure.Time_Out)
| (result, _) => raise SMT_Failure.SMT (SMT_Failure.Counterexample (result = Sat)))
fun replay outcome replay0 oracle outer_ctxt
@@ -211,6 +212,7 @@
SOME replay => replay outer_ctxt replay_data lines
| NONE => error "No proof reconstruction for solver -- \
\declare [[smt_oracle]] to allow oracle")
+ | (Time_Out, _) => raise SMT_Failure.SMT (SMT_Failure.Time_Out)
| (result, _) => raise SMT_Failure.SMT (SMT_Failure.Counterexample (result = Sat)))
val cfalse = Thm.cterm_of \<^context> \<^prop>\<open>False\<close>
--- a/src/HOL/Tools/SMT/smt_systems.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/SMT/smt_systems.ML Sun Jun 09 22:22:36 2019 +0200
@@ -19,10 +19,11 @@
fun make_command name () = [getenv (name ^ "_SOLVER")]
-fun outcome_of unsat sat unknown solver_name line =
+fun outcome_of unsat sat unknown timeout solver_name line =
if String.isPrefix unsat line then SMT_Solver.Unsat
else if String.isPrefix sat line then SMT_Solver.Sat
else if String.isPrefix unknown line then SMT_Solver.Unknown
+ else if String.isPrefix timeout line then SMT_Solver.Time_Out
else raise SMT_Failure.SMT (SMT_Failure.Other_Failure ("Solver " ^ quote solver_name ^
" failed -- enable tracing using the " ^ quote (Config.name_of SMT_Config.trace) ^
" option for details"))
@@ -56,7 +57,7 @@
options = cvc3_options,
smt_options = [],
default_max_relevant = 400 (* FUDGE *),
- outcome = on_first_line (outcome_of "unsat" "sat" "unknown"),
+ outcome = on_first_line (outcome_of "unsat" "sat" "unknown" "timeout"),
parse_proof = NONE,
replay = NONE }
@@ -96,7 +97,7 @@
options = cvc4_options,
smt_options = [(":produce-unsat-cores", "true")],
default_max_relevant = 400 (* FUDGE *),
- outcome = on_first_line (outcome_of "unsat" "sat" "unknown"),
+ outcome = on_first_line (outcome_of "unsat" "sat" "unknown" "timeout"),
parse_proof = SOME (K CVC4_Proof_Parse.parse_proof),
replay = NONE }
@@ -127,7 +128,7 @@
"--max-time=" ^ string_of_int (Real.ceil (Config.get ctxt SMT_Config.timeout))]),
smt_options = [(":produce-proofs", "true")],
default_max_relevant = 200 (* FUDGE *),
- outcome = on_first_non_unsupported_line (outcome_of "unsat" "sat" "unknown"),
+ outcome = on_first_non_unsupported_line (outcome_of "unsat" "sat" "unknown" "timeout"),
parse_proof = SOME (K VeriT_Proof_Parse.parse_proof),
replay = SOME Verit_Replay.replay }
@@ -157,7 +158,7 @@
options = z3_options,
smt_options = [(":produce-proofs", "true")],
default_max_relevant = 350 (* FUDGE *),
- outcome = on_first_line (outcome_of "unsat" "sat" "unknown"),
+ outcome = on_first_line (outcome_of "unsat" "sat" "unknown" "timeout"),
parse_proof = SOME Z3_Replay.parse_proof,
replay = SOME Z3_Replay.replay }
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_isar_proof.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_isar_proof.ML Sun Jun 09 22:22:36 2019 +0200
@@ -303,7 +303,7 @@
|> Print_Mode.setmp [] (Syntax.unparse_term ctxt #> Pretty.string_of)
|> simplify_spaces
|> maybe_quote keywords),
- ctxt |> perhaps (try (Variable.auto_fixes term)))
+ ctxt |> perhaps (try (Proof_Context.augment term)))
fun using_facts [] [] = ""
| using_facts ls ss = enclose "using " " " (space_implode " " (map string_of_label ls @ ss))
@@ -323,7 +323,7 @@
maybe_quote keywords (simplify_spaces (Print_Mode.setmp [] (Syntax.string_of_typ ctxt) T))
fun add_frees xs (s, ctxt) =
- (s ^ space_implode " and " (map of_free xs), ctxt |> fold Variable.auto_fixes (map Free xs))
+ (s ^ space_implode " and " (map of_free xs), ctxt |> fold Proof_Context.augment (map Free xs))
fun add_fix _ [] = I
| add_fix ind xs = add_str (of_indent ind ^ "fix ") #> add_frees xs #> add_str "\n"
--- a/src/HOL/Tools/Transfer/transfer.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Transfer/transfer.ML Sun Jun 09 22:22:36 2019 +0200
@@ -124,36 +124,27 @@
pred_data = Symtab.merge (K true) (pd1, pd2) }
)
-fun get_transfer_raw ctxt = ctxt
- |> (Item_Net.content o #transfer_raw o Data.get o Context.Proof)
+val get_transfer_raw = Item_Net.content o #transfer_raw o Data.get o Context.Proof
-fun get_known_frees ctxt = ctxt
- |> (#known_frees o Data.get o Context.Proof)
+val get_known_frees = #known_frees o Data.get o Context.Proof
-fun get_compound_lhs ctxt = ctxt
- |> (#compound_lhs o Data.get o Context.Proof)
+val get_compound_lhs = #compound_lhs o Data.get o Context.Proof
-fun get_compound_rhs ctxt = ctxt
- |> (#compound_rhs o Data.get o Context.Proof)
+val get_compound_rhs = #compound_rhs o Data.get o Context.Proof
-fun get_relator_eq_item_net ctxt = (#relator_eq o Data.get o Context.Proof) ctxt
+val get_relator_eq_item_net = #relator_eq o Data.get o Context.Proof
-fun get_relator_eq ctxt = ctxt
- |> (Item_Net.content o #relator_eq o Data.get o Context.Proof)
- |> map safe_mk_meta_eq
+val get_relator_eq =
+ map safe_mk_meta_eq o Item_Net.content o #relator_eq o Data.get o Context.Proof
-fun get_sym_relator_eq ctxt = ctxt
- |> (Item_Net.content o #relator_eq o Data.get o Context.Proof)
- |> map (Thm.symmetric o safe_mk_meta_eq)
+val get_sym_relator_eq =
+ map (Thm.symmetric o safe_mk_meta_eq) o Item_Net.content o #relator_eq o Data.get o Context.Proof
-fun get_relator_eq_raw ctxt = ctxt
- |> (Item_Net.content o #relator_eq_raw o Data.get o Context.Proof)
+val get_relator_eq_raw = Item_Net.content o #relator_eq_raw o Data.get o Context.Proof
-fun get_relator_domain ctxt = ctxt
- |> (Item_Net.content o #relator_domain o Data.get o Context.Proof)
+val get_relator_domain = Item_Net.content o #relator_domain o Data.get o Context.Proof
-fun get_pred_data ctxt = ctxt
- |> (#pred_data o Data.get o Context.Proof)
+val get_pred_data = #pred_data o Data.get o Context.Proof
fun map_data f1 f2 f3 f4 f5 f6 f7 f8
{ transfer_raw, known_frees, compound_lhs, compound_rhs,
@@ -358,7 +349,7 @@
in
forall_elim (thm COMP (equal_thm COMP @{thm equal_elim_rule2}))
end
- handle TERM _ => thm
+ handle TERM _ => thm
fun abstract_domains_transfer ctxt thm =
let
@@ -404,14 +395,14 @@
(** Adding transfer domain rules **)
-fun prep_transfer_domain_thm ctxt thm =
- (abstract_equalities_domain ctxt o detect_transfer_rules) thm
+fun prep_transfer_domain_thm ctxt =
+ abstract_equalities_domain ctxt o detect_transfer_rules
-fun add_transfer_domain_thm thm ctxt = (add_transfer_thm o
- prep_transfer_domain_thm (Context.proof_of ctxt)) thm ctxt
+fun add_transfer_domain_thm thm ctxt =
+ (add_transfer_thm o prep_transfer_domain_thm (Context.proof_of ctxt)) thm ctxt
-fun del_transfer_domain_thm thm ctxt = (del_transfer_thm o
- prep_transfer_domain_thm (Context.proof_of ctxt)) thm ctxt
+fun del_transfer_domain_thm thm ctxt =
+ (del_transfer_thm o prep_transfer_domain_thm (Context.proof_of ctxt)) thm ctxt
(** Transfer proof method **)
@@ -495,32 +486,26 @@
end
(* create a lambda term of the same shape as the given term *)
-fun skeleton (is_atom : term -> bool) ctxt t =
+fun skeleton is_atom =
let
fun dummy ctxt =
- let
- val (c, ctxt) = yield_singleton Variable.variant_fixes "a" ctxt
- in
- (Free (c, dummyT), ctxt)
- end
- fun go (Bound i) ctxt = (Bound i, ctxt)
- | go (Abs (x, _, t)) ctxt =
- let
- val (t', ctxt) = go t ctxt
- in
- (Abs (x, dummyT, t'), ctxt)
- end
- | go (tu as (t $ u)) ctxt =
- if is_atom tu andalso not (Term.is_open tu) then dummy ctxt else
- let
- val (t', ctxt) = go t ctxt
- val (u', ctxt) = go u ctxt
- in
- (t' $ u', ctxt)
- end
- | go _ ctxt = dummy ctxt
+ let val (c, ctxt') = yield_singleton Variable.variant_fixes "a" ctxt
+ in (Free (c, dummyT), ctxt') end
+ fun skel (Bound i) ctxt = (Bound i, ctxt)
+ | skel (Abs (x, _, t)) ctxt =
+ let val (t', ctxt) = skel t ctxt
+ in (Abs (x, dummyT, t'), ctxt) end
+ | skel (tu as t $ u) ctxt =
+ if is_atom tu andalso not (Term.is_open tu) then dummy ctxt
+ else
+ let
+ val (t', ctxt) = skel t ctxt
+ val (u', ctxt) = skel u ctxt
+ in (t' $ u', ctxt) end
+ | skel _ ctxt = dummy ctxt
in
- go t ctxt |> fst |> Syntax.check_term ctxt
+ fn ctxt => fn t =>
+ fst (skel t ctxt) |> Syntax.check_term ctxt (* FIXME avoid syntax operation!? *)
end
(** Monotonicity analysis **)
@@ -580,7 +565,7 @@
fun matches_list ctxt term =
is_some o find_first (fn pat => Pattern.matches (Proof_Context.theory_of ctxt) (pat, term))
-fun transfer_rule_of_term ctxt equiv t : thm =
+fun transfer_rule_of_term ctxt equiv t =
let
val compound_rhs = get_compound_rhs ctxt
fun is_rhs t = compound_rhs |> retrieve_terms t |> matches_list ctxt t
@@ -599,11 +584,11 @@
((Name.clean_index (prep a, idx), \<^typ>\<open>bool \<Rightarrow> bool \<Rightarrow> bool\<close>), Thm.cterm_of ctxt' t)
in
thm
- |> Thm.generalize (tfrees, rnames @ frees) idx
- |> Thm.instantiate (map tinst binsts, map inst binsts)
+ |> Thm.generalize (tfrees, rnames @ frees) idx
+ |> Thm.instantiate (map tinst binsts, map inst binsts)
end
-fun transfer_rule_of_lhs ctxt t : thm =
+fun transfer_rule_of_lhs ctxt t =
let
val compound_lhs = get_compound_lhs ctxt
fun is_lhs t = compound_lhs |> retrieve_terms t |> matches_list ctxt t
@@ -622,8 +607,8 @@
((Name.clean_index (prep a, idx), \<^typ>\<open>bool \<Rightarrow> bool \<Rightarrow> bool\<close>), Thm.cterm_of ctxt' t)
in
thm
- |> Thm.generalize (tfrees, rnames @ frees) idx
- |> Thm.instantiate (map tinst binsts, map inst binsts)
+ |> Thm.generalize (tfrees, rnames @ frees) idx
+ |> Thm.instantiate (map tinst binsts, map inst binsts)
end
fun eq_rules_tac ctxt eq_rules =
@@ -719,11 +704,8 @@
fun transferred ctxt extra_rules thm =
let
- val start_rule = @{thm transfer_start}
- val start_rule' = @{thm transfer_start'}
val rules = extra_rules @ get_transfer_raw ctxt
val eq_rules = get_relator_eq_raw ctxt
- val err_msg = "Transfer failed to convert goal to an object-logic formula"
val pre_simps = @{thms transfer_forall_eq transfer_implies_eq}
val thm1 = Drule.forall_intr_vars thm
val instT =
@@ -733,24 +715,22 @@
|> Thm.instantiate (instT, [])
|> Raw_Simplifier.rewrite_rule ctxt pre_simps
val ctxt' = Variable.declare_names (Thm.full_prop_of thm2) ctxt
- val t = HOLogic.dest_Trueprop (Thm.concl_of thm2)
- val rule = transfer_rule_of_lhs ctxt' t
- val tac =
- resolve_tac ctxt' [thm2 RS start_rule', thm2 RS start_rule] 1 THEN
- (resolve_tac ctxt' [rule]
- THEN_ALL_NEW
- (SOLVED' (REPEAT_ALL_NEW (resolve_tac ctxt' rules)
- THEN_ALL_NEW (DETERM o eq_rules_tac ctxt' eq_rules)))) 1
- handle TERM (_, ts) => raise TERM (err_msg, ts)
- val goal = Thm.cterm_of ctxt' (HOLogic.mk_Trueprop (Var (("P", 0), \<^typ>\<open>bool\<close>)))
- val thm3 = Goal.prove_internal ctxt' [] goal (K tac)
- val tnames = map (fst o dest_TFree o Thm.typ_of o snd) instT
+ val rule = transfer_rule_of_lhs ctxt' (HOLogic.dest_Trueprop (Thm.concl_of thm2))
in
- thm3
- |> Raw_Simplifier.rewrite_rule ctxt' post_simps
- |> Simplifier.norm_hhf ctxt'
- |> Drule.generalize (tnames, [])
- |> Drule.zero_var_indexes
+ Goal.prove_internal ctxt' []
+ (Thm.cterm_of ctxt' (HOLogic.mk_Trueprop (Var (("P", 0), \<^typ>\<open>bool\<close>))))
+ (fn _ =>
+ resolve_tac ctxt' [thm2 RS @{thm transfer_start'}, thm2 RS @{thm transfer_start}] 1 THEN
+ (resolve_tac ctxt' [rule]
+ THEN_ALL_NEW
+ (SOLVED' (REPEAT_ALL_NEW (resolve_tac ctxt' rules)
+ THEN_ALL_NEW (DETERM o eq_rules_tac ctxt' eq_rules)))) 1
+ handle TERM (_, ts) =>
+ raise TERM ("Transfer failed to convert goal to an object-logic formula", ts))
+ |> Raw_Simplifier.rewrite_rule ctxt' post_simps
+ |> Simplifier.norm_hhf ctxt'
+ |> Drule.generalize (map (fst o dest_TFree o Thm.typ_of o snd) instT, [])
+ |> Drule.zero_var_indexes
end
(*
handle THM _ => thm
@@ -758,10 +738,8 @@
fun untransferred ctxt extra_rules thm =
let
- val start_rule = @{thm untransfer_start}
val rules = extra_rules @ get_transfer_raw ctxt
val eq_rules = get_relator_eq_raw ctxt
- val err_msg = "Transfer failed to convert goal to an object-logic formula"
val pre_simps = @{thms transfer_forall_eq transfer_implies_eq}
val thm1 = Drule.forall_intr_vars thm
val instT =
@@ -773,22 +751,21 @@
val ctxt' = Variable.declare_names (Thm.full_prop_of thm2) ctxt
val t = HOLogic.dest_Trueprop (Thm.concl_of thm2)
val rule = transfer_rule_of_term ctxt' true t
- val tac =
- resolve_tac ctxt' [thm2 RS start_rule] 1 THEN
- (resolve_tac ctxt' [rule]
- THEN_ALL_NEW
- (SOLVED' (REPEAT_ALL_NEW (resolve_tac ctxt' rules)
- THEN_ALL_NEW (DETERM o eq_rules_tac ctxt' eq_rules)))) 1
- handle TERM (_, ts) => raise TERM (err_msg, ts)
- val goal = Thm.cterm_of ctxt' (HOLogic.mk_Trueprop (Var (("P", 0), \<^typ>\<open>bool\<close>)))
- val thm3 = Goal.prove_internal ctxt' [] goal (K tac)
- val tnames = map (fst o dest_TFree o Thm.typ_of o snd) instT
in
- thm3
- |> Raw_Simplifier.rewrite_rule ctxt' post_simps
- |> Simplifier.norm_hhf ctxt'
- |> Drule.generalize (tnames, [])
- |> Drule.zero_var_indexes
+ Goal.prove_internal ctxt' []
+ (Thm.cterm_of ctxt' (HOLogic.mk_Trueprop (Var (("P", 0), \<^typ>\<open>bool\<close>))))
+ (fn _ =>
+ resolve_tac ctxt' [thm2 RS @{thm untransfer_start}] 1 THEN
+ (resolve_tac ctxt' [rule]
+ THEN_ALL_NEW
+ (SOLVED' (REPEAT_ALL_NEW (resolve_tac ctxt' rules)
+ THEN_ALL_NEW (DETERM o eq_rules_tac ctxt' eq_rules)))) 1
+ handle TERM (_, ts) =>
+ raise TERM ("Transfer failed to convert goal to an object-logic formula", ts))
+ |> Raw_Simplifier.rewrite_rule ctxt' post_simps
+ |> Simplifier.norm_hhf ctxt'
+ |> Drule.generalize (map (fst o dest_TFree o Thm.typ_of o snd) instT, [])
+ |> Drule.zero_var_indexes
end
(** Methods and attributes **)
@@ -863,7 +840,8 @@
map_pred_data' morph_thm morph_thm (map morph_thm)
end
-fun lookup_pred_data ctxt type_name = Symtab.lookup (get_pred_data ctxt) type_name
+fun lookup_pred_data ctxt type_name =
+ Symtab.lookup (get_pred_data ctxt) type_name
|> Option.map (morph_pred_data (Morphism.transfer_morphism' ctxt))
fun update_pred_data type_name qinfo ctxt =
--- a/src/HOL/Tools/Transfer/transfer_bnf.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/Transfer/transfer_bnf.ML Sun Jun 09 22:22:36 2019 +0200
@@ -72,55 +72,56 @@
constraint_def |> Thm.prop_of |> HOLogic.dest_Trueprop |> fst o HOLogic.dest_eq
|> head_of |> fst o dest_Const
val live = live_of_bnf bnf
- val (((As, Bs), Ds), ctxt) = ctxt
+ val (((As, Bs), Ds), ctxt') = ctxt
|> mk_TFrees live
||>> mk_TFrees live
||>> mk_TFrees' (map Type.sort_of_atyp (deads_of_bnf bnf))
val relator = mk_rel_of_bnf Ds As Bs bnf
val relsT = map2 mk_pred2T As Bs
- val (args, ctxt) = Ctr_Sugar_Util.mk_Frees "R" relsT ctxt
+ val (args, ctxt'') = Ctr_Sugar_Util.mk_Frees "R" relsT ctxt'
val concl = HOLogic.mk_Trueprop (mk_relation_constraint constr_name (list_comb (relator, args)))
val assms = map (HOLogic.mk_Trueprop o (mk_relation_constraint constr_name)) args
val goal = Logic.list_implies (assms, concl)
in
- (goal, ctxt)
+ (goal, ctxt'')
end
fun prove_relation_side_constraint ctxt bnf constraint_def =
let
- val old_ctxt = ctxt
- val (goal, ctxt) = generate_relation_constraint_goal ctxt bnf constraint_def
- val thm = Goal.prove_sorry ctxt [] [] goal
- (fn {context = ctxt, prems = _} => side_constraint_tac bnf [constraint_def] ctxt 1)
- |> Thm.close_derivation
+ val (goal, ctxt') = generate_relation_constraint_goal ctxt bnf constraint_def
in
- Drule.zero_var_indexes (singleton (Variable.export ctxt old_ctxt) thm)
+ Goal.prove_sorry ctxt' [] [] goal (fn {context = goal_ctxt, ...} =>
+ side_constraint_tac bnf [constraint_def] goal_ctxt 1)
+ |> Thm.close_derivation
+ |> singleton (Variable.export ctxt' ctxt)
+ |> Drule.zero_var_indexes
end
fun prove_relation_bi_constraint ctxt bnf constraint_def side_constraints =
let
- val old_ctxt = ctxt
- val (goal, ctxt) = generate_relation_constraint_goal ctxt bnf constraint_def
- val thm = Goal.prove_sorry ctxt [] [] goal
- (fn {context = ctxt, prems = _} => bi_constraint_tac constraint_def side_constraints ctxt 1)
- |> Thm.close_derivation
+ val (goal, ctxt') = generate_relation_constraint_goal ctxt bnf constraint_def
in
- Drule.zero_var_indexes (singleton (Variable.export ctxt old_ctxt) thm)
+ Goal.prove_sorry ctxt' [] [] goal (fn {context = goal_ctxt, ...} =>
+ bi_constraint_tac constraint_def side_constraints goal_ctxt 1)
+ |> Thm.close_derivation
+ |> singleton (Variable.export ctxt' ctxt)
+ |> Drule.zero_var_indexes
end
-val defs = [("left_total_rel", @{thm left_total_alt_def}), ("right_total_rel", @{thm right_total_alt_def}),
+val defs =
+ [("left_total_rel", @{thm left_total_alt_def}), ("right_total_rel", @{thm right_total_alt_def}),
("left_unique_rel", @{thm left_unique_alt_def}), ("right_unique_rel", @{thm right_unique_alt_def})]
-fun prove_relation_constraints bnf lthy =
+fun prove_relation_constraints bnf ctxt =
let
val transfer_attr = @{attributes [transfer_rule]}
val Tname = base_name_of_bnf bnf
- val defs = map (apsnd (prove_relation_side_constraint lthy bnf)) defs
- val bi_total = prove_relation_bi_constraint lthy bnf @{thm bi_total_alt_def}
+ val defs = map (apsnd (prove_relation_side_constraint ctxt bnf)) defs
+ val bi_total = prove_relation_bi_constraint ctxt bnf @{thm bi_total_alt_def}
[snd (nth defs 0), snd (nth defs 1)]
- val bi_unique = prove_relation_bi_constraint lthy bnf @{thm bi_unique_alt_def}
+ val bi_unique = prove_relation_bi_constraint ctxt bnf @{thm bi_unique_alt_def}
[snd (nth defs 2), snd (nth defs 3)]
val defs = ("bi_total_rel", bi_total) :: ("bi_unique_rel", bi_unique) :: defs
in
@@ -174,23 +175,23 @@
fun prove_Domainp_rel ctxt bnf pred_def =
let
val live = live_of_bnf bnf
- val old_ctxt = ctxt
- val (((As, Bs), Ds), ctxt) = ctxt
+ val (((As, Bs), Ds), ctxt') = ctxt
|> mk_TFrees live
||>> mk_TFrees live
||>> mk_TFrees' (map Type.sort_of_atyp (deads_of_bnf bnf))
val relator = mk_rel_of_bnf Ds As Bs bnf
val relsT = map2 mk_pred2T As Bs
- val (args, ctxt) = Ctr_Sugar_Util.mk_Frees "R" relsT ctxt
+ val (args, ctxt'') = Ctr_Sugar_Util.mk_Frees "R" relsT ctxt'
val lhs = mk_Domainp (list_comb (relator, args))
val rhs = Term.list_comb (mk_pred_of_bnf Ds As bnf, map mk_Domainp args)
val goal = HOLogic.mk_eq (lhs, rhs) |> HOLogic.mk_Trueprop
- val thm = Goal.prove_sorry ctxt [] [] goal
- (fn {context = ctxt, prems = _} => Domainp_tac bnf pred_def ctxt 1)
- |> Thm.close_derivation
in
- Drule.zero_var_indexes (singleton (Variable.export ctxt old_ctxt) thm)
+ Goal.prove_sorry ctxt'' [] [] goal (fn {context = goal_ctxt, ...} =>
+ Domainp_tac bnf pred_def goal_ctxt 1)
+ |> Thm.close_derivation
+ |> singleton (Variable.export ctxt'' ctxt)
+ |> Drule.zero_var_indexes
end
fun predicator bnf lthy =
@@ -232,8 +233,8 @@
(* simplification rules for the predicator *)
-fun lookup_defined_pred_data lthy name =
- case Transfer.lookup_pred_data lthy name of
+fun lookup_defined_pred_data ctxt name =
+ case Transfer.lookup_pred_data ctxt name of
SOME data => data
| NONE => raise NO_PRED_DATA ()
--- a/src/HOL/Tools/functor.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/functor.ML Sun Jun 09 22:22:36 2019 +0200
@@ -187,7 +187,7 @@
else error ("Illegal additional type variable(s) in term: " ^ Syntax.string_of_term ctxt input_mapper);
val _ =
if null (Term.add_vars (singleton
- (Variable.export_terms (Variable.auto_fixes input_mapper ctxt) ctxt) input_mapper) [])
+ (Variable.export_terms (Proof_Context.augment input_mapper ctxt) ctxt) input_mapper) [])
then ()
else error ("Illegal locally free variable(s) in term: "
^ Syntax.string_of_term ctxt input_mapper);
--- a/src/HOL/Tools/inductive.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/inductive.ML Sun Jun 09 22:22:36 2019 +0200
@@ -594,7 +594,7 @@
fun mk_elim rl =
Thm.implies_intr cprop
(Tactic.rule_by_tactic ctxt (mk_cases_tac ctxt) (Thm.assume cprop RS rl))
- |> singleton (Variable.export (Variable.auto_fixes prop ctxt) ctxt);
+ |> singleton (Proof_Context.export (Proof_Context.augment prop ctxt) ctxt);
in
(case get_first (try mk_elim) elims of
SOME r => r
@@ -642,7 +642,7 @@
fun mk_simp_eq ctxt prop =
let
val thy = Proof_Context.theory_of ctxt;
- val ctxt' = Variable.auto_fixes prop ctxt;
+ val ctxt' = Proof_Context.augment prop ctxt;
val lhs_of = fst o HOLogic.dest_eq o HOLogic.dest_Trueprop o Thm.prop_of;
val substs =
retrieve_equations ctxt (HOLogic.dest_Trueprop prop)
@@ -661,7 +661,7 @@
in
infer_instantiate ctxt' inst eq
|> Conv.fconv_rule (Conv.arg_conv (Conv.arg_conv (Simplifier.full_rewrite ctxt')))
- |> singleton (Variable.export ctxt' ctxt)
+ |> singleton (Proof_Context.export ctxt' ctxt)
end
--- a/src/HOL/Tools/legacy_transfer.ML Sun Jun 09 20:24:16 2019 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,261 +0,0 @@
-(* Title: HOL/Tools/legacy_transfer.ML
- Author: Amine Chaieb, University of Cambridge, 2009
- Author: Jeremy Avigad, Carnegie Mellon University
- Author: Florian Haftmann, TU Muenchen
-
-Simple transfer principle on theorems.
-*)
-
-signature LEGACY_TRANSFER =
-sig
- datatype selection = Direction of term * term | Hints of string list | Prop
- val transfer: Context.generic -> selection -> string list -> thm -> thm list
- type entry
- val add: thm -> bool -> entry -> Context.generic -> Context.generic
- val del: thm -> entry -> Context.generic -> Context.generic
- val drop: thm -> Context.generic -> Context.generic
-end;
-
-structure Legacy_Transfer : LEGACY_TRANSFER =
-struct
-
-(* data administration *)
-
-val direction_of = Thm.dest_binop o Thm.dest_arg o Thm.cprop_of;
-
-val transfer_morphism_key = Drule.strip_imp_concl (Thm.cprop_of @{thm transfer_morphismI});
-
-fun check_morphism_key ctxt key =
- let
- val _ = Thm.match (transfer_morphism_key, Thm.cprop_of key)
- handle Pattern.MATCH => error ("Transfer: expected theorem of the form "
- ^ quote (Syntax.string_of_term ctxt (Thm.term_of transfer_morphism_key)));
- in direction_of key end;
-
-type entry = { inj : thm list, embed : thm list, return : thm list, cong : thm list,
- hints : string list };
-
-val empty_entry = { inj = [], embed = [], return = [], cong = [], hints = [] };
-fun merge_entry ({ inj = inj1, embed = embed1, return = return1, cong = cong1, hints = hints1 } : entry,
- { inj = inj2, embed = embed2, return = return2, cong = cong2, hints = hints2 } : entry) =
- { inj = merge Thm.eq_thm (inj1, inj2), embed = merge Thm.eq_thm (embed1, embed2),
- return = merge Thm.eq_thm (return1, return2), cong = merge Thm.eq_thm (cong1, cong2),
- hints = merge (op =) (hints1, hints2) };
-
-structure Data = Generic_Data
-(
- type T = (thm * entry) list;
- val empty = [];
- val extend = I;
- val merge = AList.join Thm.eq_thm (K merge_entry);
-);
-
-
-(* data lookup *)
-
-fun transfer_rules_of ({ inj, embed, return, cong, ... } : entry) =
- (inj, embed, return, cong);
-
-fun get_by_direction context (a, D) =
- let
- val ctxt = Context.proof_of context;
- val a0 = Thm.cterm_of ctxt a;
- val D0 = Thm.cterm_of ctxt D;
- fun eq_direction ((a, D), thm') =
- let
- val (a', D') = direction_of thm';
- in a aconvc a' andalso D aconvc D' end;
- in case AList.lookup eq_direction (Data.get context) (a0, D0) of
- SOME e => ((a0, D0), transfer_rules_of e)
- | NONE => error ("Transfer: no such instance: ("
- ^ Syntax.string_of_term ctxt a ^ ", " ^ Syntax.string_of_term ctxt D ^ ")")
- end;
-
-fun get_by_hints context hints =
- let
- val insts = map_filter (fn (k, e) => if exists (member (op =) (#hints e)) hints
- then SOME (direction_of k, transfer_rules_of e) else NONE) (Data.get context);
- val _ = if null insts then error ("Transfer: no such labels: " ^ commas_quote hints) else ();
- in insts end;
-
-fun splits P [] = []
- | splits P (xs as (x :: _)) =
- let
- val (pss, qss) = List.partition (P x) xs;
- in if null pss then [qss] else if null qss then [pss] else pss :: splits P qss end;
-
-fun get_by_prop context t =
- let
- val tys = map snd (Term.add_vars t []);
- val _ = if null tys then error "Transfer: unable to guess instance" else ();
- val tyss = splits (curry Type.could_unify) tys;
- val get_ty = Thm.typ_of_cterm o fst o direction_of;
- val insts = map_filter (fn tys => get_first (fn (k, e) =>
- if Type.could_unify (hd tys, range_type (get_ty k))
- then SOME (direction_of k, transfer_rules_of e)
- else NONE) (Data.get context)) tyss;
- val _ = if null insts then
- error "Transfer: no instances, provide direction or hints explicitly" else ();
- in insts end;
-
-
-(* applying transfer data *)
-
-fun transfer_thm ((raw_a, raw_D), (inj, embed, return, cong)) leave ctxt1 thm =
- let
- (* identify morphism function *)
- val ([a, D], ctxt2) = ctxt1
- |> Variable.import true (map Drule.mk_term [raw_a, raw_D])
- |>> map Drule.dest_term o snd;
- val transform = Thm.apply @{cterm "Trueprop"} o Thm.apply D;
- val T = Thm.typ_of_cterm a;
- val (aT, bT) = (Term.range_type T, Term.domain_type T);
-
- (* determine variables to transfer *)
- val ctxt3 = ctxt2
- |> Variable.declare_thm thm
- |> Variable.declare_term (Thm.term_of a)
- |> Variable.declare_term (Thm.term_of D);
- val vars = filter (fn ((v, _), T) => Type.could_unify (T, aT) andalso
- not (member (op =) leave v)) (Term.add_vars (Thm.prop_of thm) []);
- val c_vars = map (Thm.cterm_of ctxt3 o Var) vars;
- val (vs', ctxt4) = Variable.variant_fixes (map (fst o fst) vars) ctxt3;
- val c_vars' = map (Thm.cterm_of ctxt3 o (fn v => Free (v, bT))) vs';
- val c_exprs' = map (Thm.apply a) c_vars';
-
- (* transfer *)
- val (hyps, ctxt5) = ctxt4
- |> Assumption.add_assumes (map transform c_vars');
- val simpset =
- put_simpset HOL_ss ctxt5 addsimps (inj @ embed @ return)
- |> fold Simplifier.add_cong cong;
- val thm' = thm
- |> infer_instantiate ctxt5 (map (#1 o dest_Var o Thm.term_of) c_vars ~~ c_exprs')
- |> fold_rev Thm.implies_intr (map Thm.cprop_of hyps)
- |> Simplifier.asm_full_simplify simpset
- in singleton (Variable.export ctxt5 ctxt1) thm' end;
-
-fun transfer_thm_multiple insts leave ctxt thm =
- map (fn inst => transfer_thm inst leave ctxt thm) insts;
-
-datatype selection = Direction of term * term | Hints of string list | Prop;
-
-fun insts_for context thm (Direction direction) = [get_by_direction context direction]
- | insts_for context thm (Hints hints) = get_by_hints context hints
- | insts_for context thm Prop = get_by_prop context (Thm.prop_of thm);
-
-fun transfer context selection leave thm =
- transfer_thm_multiple (insts_for context thm selection) leave (Context.proof_of context) thm;
-
-
-(* maintaining transfer data *)
-
-fun extend_entry ctxt (a, D) guess
- { inj = inj1, embed = embed1, return = return1, cong = cong1, hints = hints1 }
- { inj = inj2, embed = embed2, return = return2, cong = cong2, hints = hints2 } =
- let
- val guessed = if guess
- then map (fn thm => transfer_thm
- ((a, D), (if null inj1 then inj2 else inj1, [], [], cong1)) [] ctxt thm RS sym) embed1
- else [];
- in
- { inj = union Thm.eq_thm inj1 inj2, embed = union Thm.eq_thm embed1 embed2,
- return = union Thm.eq_thm guessed (union Thm.eq_thm return1 return2),
- cong = union Thm.eq_thm cong1 cong2, hints = union (op =) hints1 hints2 }
- end;
-
-fun diminish_entry
- { inj = inj0, embed = embed0, return = return0, cong = cong0, hints = hints0 }
- { inj = inj2, embed = embed2, return = return2, cong = cong2, hints = hints2 } =
- { inj = subtract Thm.eq_thm inj0 inj2, embed = subtract Thm.eq_thm embed0 embed2,
- return = subtract Thm.eq_thm return0 return2, cong = subtract Thm.eq_thm cong0 cong2,
- hints = subtract (op =) hints0 hints2 };
-
-fun add key guess entry context =
- let
- val ctxt = Context.proof_of context;
- val a_D = check_morphism_key ctxt key;
- in
- context
- |> Data.map (AList.map_default Thm.eq_thm
- (key, empty_entry) (extend_entry ctxt a_D guess entry))
- end;
-
-fun del key entry = Data.map (AList.map_entry Thm.eq_thm key (diminish_entry entry));
-
-fun drop key = Data.map (AList.delete Thm.eq_thm key);
-
-
-(* syntax *)
-
-local
-
-fun these scan = Scan.optional scan [];
-fun these_pair scan = Scan.optional scan ([], []);
-
-fun keyword k = Scan.lift (Args.$$$ k) >> K ();
-fun keyword_colon k = Scan.lift (Args.$$$ k -- Args.colon) >> K ();
-
-val addN = "add";
-val delN = "del";
-val keyN = "key";
-val modeN = "mode";
-val automaticN = "automatic";
-val manualN = "manual";
-val injN = "inj";
-val embedN = "embed";
-val returnN = "return";
-val congN = "cong";
-val labelsN = "labels";
-
-val leavingN = "leaving";
-val directionN = "direction";
-
-val any_keyword = keyword_colon addN || keyword_colon delN || keyword_colon keyN
- || keyword_colon modeN || keyword_colon injN || keyword_colon embedN || keyword_colon returnN
- || keyword_colon congN || keyword_colon labelsN
- || keyword_colon leavingN || keyword_colon directionN;
-
-val thms = Scan.repeats (Scan.unless any_keyword Attrib.multi_thm);
-val names = Scan.repeat (Scan.unless any_keyword (Scan.lift Args.name))
-
-val mode = keyword_colon modeN |-- ((Scan.lift (Args.$$$ manualN) >> K false)
- || (Scan.lift (Args.$$$ automaticN) >> K true));
-val inj = (keyword_colon injN |-- thms) -- these (keyword_colon delN |-- thms);
-val embed = (keyword_colon embedN |-- thms) -- these (keyword_colon delN |-- thms);
-val return = (keyword_colon returnN |-- thms) -- these (keyword_colon delN |-- thms);
-val cong = (keyword_colon congN |-- thms) -- these (keyword_colon delN |-- thms);
-val labels = (keyword_colon labelsN |-- names) -- these (keyword_colon delN |-- names);
-
-val entry_pair = these_pair inj -- these_pair embed
- -- these_pair return -- these_pair cong -- these_pair labels
- >> (fn (((((inja, injd), (embeda, embedd)), (returna, returnd)), (conga, congd)),
- (hintsa, hintsd)) =>
- ({ inj = inja, embed = embeda, return = returna, cong = conga, hints = hintsa },
- { inj = injd, embed = embedd, return = returnd, cong = congd, hints = hintsd }));
-
-val selection = (keyword_colon directionN |-- (Args.term -- Args.term) >> Direction)
- || these names >> (fn hints => if null hints then Prop else Hints hints);
-
-in
-
-val _ =
- Theory.setup
- (Attrib.setup @{binding transfer}
- (keyword delN >> K (Thm.declaration_attribute drop)
- || keyword addN |-- Scan.optional mode true -- entry_pair
- >> (fn (guess, (entry_add, entry_del)) =>
- Thm.declaration_attribute (fn thm => add thm guess entry_add #> del thm entry_del))
- || keyword_colon keyN |-- Attrib.thm
- >> (fn key => Thm.declaration_attribute (fn thm =>
- add key false { inj = [], embed = [], return = [thm], cong = [], hints = [] })))
- "install transfer data" #>
- Attrib.setup @{binding transferred}
- (selection -- these (keyword_colon leavingN |-- names)
- >> (fn (selection, leave) => Thm.rule_attribute [] (fn context =>
- Conjunction.intr_balanced o transfer context selection leave)))
- "transfer theorems");
-
-end;
-
-end;
--- a/src/HOL/Tools/semiring_normalizer.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/semiring_normalizer.ML Sun Jun 09 22:22:36 2019 +0200
@@ -169,7 +169,7 @@
{semiring = raw_semiring0, ring = raw_ring0, field = raw_field0, idom = raw_idom, ideal = raw_ideal}
lthy =
let
- val ctxt' = fold Variable.auto_fixes (fst raw_semiring0 @ fst raw_ring0 @ fst raw_field0) lthy;
+ val ctxt' = fold Proof_Context.augment (fst raw_semiring0 @ fst raw_ring0 @ fst raw_field0) lthy;
val prepare_ops = apfst (Variable.export_terms ctxt' lthy #> map (Thm.cterm_of lthy));
val raw_semiring = prepare_ops raw_semiring0;
val raw_ring = prepare_ops raw_ring0;
--- a/src/HOL/Tools/set_comprehension_pointfree.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/set_comprehension_pointfree.ML Sun Jun 09 22:22:36 2019 +0200
@@ -466,7 +466,7 @@
fun conv ctxt t =
let
- val ([t'], ctxt') = Variable.import_terms true [t] (Variable.declare_term t ctxt)
+ val (t', ctxt') = yield_singleton (Variable.import_terms true) t (Variable.declare_term t ctxt)
val ct = Thm.cterm_of ctxt' t'
fun unfold_conv thms =
Raw_Simplifier.rewrite_cterm (false, false, false) (K (K NONE))
--- a/src/HOL/Tools/value_command.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Tools/value_command.ML Sun Jun 09 22:22:36 2019 +0200
@@ -55,7 +55,7 @@
val t = Syntax.read_term ctxt raw_t;
val t' = value_select name ctxt t;
val ty' = Term.type_of t';
- val ctxt' = Variable.auto_fixes t' ctxt;
+ val ctxt' = Proof_Context.augment t' ctxt;
val p = Print_Mode.with_modes modes (fn () =>
Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t'), Pretty.fbrk,
Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' ty')]) ();
--- a/src/HOL/Transcendental.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/Transcendental.thy Sun Jun 09 22:22:36 2019 +0200
@@ -2771,8 +2771,8 @@
using powr_realpow[of x "nat n"] powr_realpow[of x "nat (-n)"]
by (auto simp: field_simps powr_minus)
-lemma powr_numeral [simp]: "0 < x \<Longrightarrow> x powr (numeral n :: real) = x ^ (numeral n)"
- by (metis of_nat_numeral powr_realpow)
+lemma powr_numeral [simp]: "0 \<le> x \<Longrightarrow> x powr (numeral n :: real) = x ^ (numeral n)"
+ by (metis less_le power_zero_numeral powr_0 of_nat_numeral powr_realpow)
lemma powr_int:
assumes "x > 0"
--- a/src/HOL/ex/Commands.thy Sun Jun 09 20:24:16 2019 +0200
+++ b/src/HOL/ex/Commands.thy Sun Jun 09 22:22:36 2019 +0200
@@ -20,7 +20,7 @@
let
val ctxt = Toplevel.context_of st;
val t = Syntax.read_term ctxt s;
- val ctxt' = Variable.auto_fixes t ctxt;
+ val ctxt' = Proof_Context.augment t ctxt;
in Pretty.writeln (Syntax.pretty_term ctxt' t) end)));
\<close>
--- a/src/Provers/Arith/cancel_numeral_factor.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Provers/Arith/cancel_numeral_factor.ML Sun Jun 09 22:22:36 2019 +0200
@@ -45,9 +45,7 @@
let
val prems = Simplifier.prems_of ctxt;
val t = Thm.term_of ct;
- val ([t'], ctxt') = Variable.import_terms true [t] ctxt
- val export = singleton (Variable.export ctxt' ctxt)
- (* FIXME ctxt vs. ctxt' *)
+ val (t', ctxt') = yield_singleton (Variable.import_terms true) t ctxt
val (t1,t2) = Data.dest_bal t'
val (m1, t1') = Data.dest_coeff t1
@@ -65,13 +63,13 @@
else Data.mk_bal (Data.mk_coeff(n1,t1'), Data.mk_coeff(n2,t2'))
val reshape = (*Move d to the front and put the rest into standard form
i * #m * j == #d * (#n * (j * k)) *)
- Data.prove_conv [Data.norm_tac ctxt] ctxt prems
+ Data.prove_conv [Data.norm_tac ctxt'] ctxt' prems
(t', Data.mk_bal (newshape(n1,t1'), newshape(n2,t2')))
in
- Option.map (export o Data.simplify_meta_eq ctxt)
+ Option.map (singleton (Variable.export ctxt' ctxt) o Data.simplify_meta_eq ctxt')
(Data.prove_conv
- [Data.trans_tac ctxt reshape, resolve_tac ctxt [Data.cancel] 1,
- Data.numeral_simp_tac ctxt] ctxt prems (t', rhs))
+ [Data.trans_tac ctxt' reshape, resolve_tac ctxt' [Data.cancel] 1,
+ Data.numeral_simp_tac ctxt'] ctxt' prems (t', rhs))
end
(* FIXME avoid handling of generic exceptions *)
handle TERM _ => NONE
--- a/src/Provers/Arith/cancel_numerals.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Provers/Arith/cancel_numerals.ML Sun Jun 09 22:22:36 2019 +0200
@@ -69,9 +69,7 @@
let
val prems = Simplifier.prems_of ctxt
val t = Thm.term_of ct
- val ([t'], ctxt') = Variable.import_terms true [t] ctxt
- val export = singleton (Variable.export ctxt' ctxt)
- (* FIXME ctxt vs. ctxt' (!?) *)
+ val (t', ctxt') = yield_singleton (Variable.import_terms true) t ctxt
val (t1,t2) = Data.dest_bal t'
val terms1 = Data.dest_sum t1
@@ -87,19 +85,19 @@
i + #m + j + k == #m + i + (j + k) *)
if n1=0 orelse n2=0 then (*trivial, so do nothing*)
raise TERM("cancel_numerals", [])
- else Data.prove_conv [Data.norm_tac ctxt] ctxt prems
+ else Data.prove_conv [Data.norm_tac ctxt'] ctxt' prems
(t', Data.mk_bal (newshape(n1,terms1'), newshape(n2,terms2')))
in
- Option.map (export o Data.simplify_meta_eq ctxt)
+ Option.map (singleton (Variable.export ctxt' ctxt) o Data.simplify_meta_eq ctxt')
(if n2 <= n1 then
Data.prove_conv
- [Data.trans_tac ctxt reshape, resolve_tac ctxt [Data.bal_add1] 1,
- Data.numeral_simp_tac ctxt] ctxt prems
+ [Data.trans_tac ctxt' reshape, resolve_tac ctxt' [Data.bal_add1] 1,
+ Data.numeral_simp_tac ctxt'] ctxt' prems
(t', Data.mk_bal (newshape(n1-n2,terms1'), Data.mk_sum T terms2'))
else
Data.prove_conv
- [Data.trans_tac ctxt reshape, resolve_tac ctxt [Data.bal_add2] 1,
- Data.numeral_simp_tac ctxt] ctxt prems
+ [Data.trans_tac ctxt' reshape, resolve_tac ctxt' [Data.bal_add2] 1,
+ Data.numeral_simp_tac ctxt'] ctxt' prems
(t', Data.mk_bal (Data.mk_sum T terms1', newshape(n2-n1,terms2'))))
end
(* FIXME avoid handling of generic exceptions *)
--- a/src/Provers/Arith/combine_numerals.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Provers/Arith/combine_numerals.ML Sun Jun 09 22:22:36 2019 +0200
@@ -68,9 +68,7 @@
fun proc ctxt ct =
let
val t = Thm.term_of ct
- val ([t'], ctxt') = Variable.import_terms true [t] ctxt
- val export = singleton (Variable.export ctxt' ctxt)
- (* FIXME ctxt vs. ctxt' (!?) *)
+ val (t', ctxt') = yield_singleton (Variable.import_terms true) t ctxt
val (u,m,n,terms) = find_repeated (Termtab.empty, [], Data.dest_sum t')
val T = Term.fastype_of u
@@ -79,13 +77,13 @@
i + #m + j + k == #m + i + (j + k) *)
if Data.iszero m orelse Data.iszero n then (*trivial, so do nothing*)
raise TERM("combine_numerals", [])
- else Data.prove_conv [Data.norm_tac ctxt] ctxt
+ else Data.prove_conv [Data.norm_tac ctxt'] ctxt'
(t', Data.mk_sum T ([Data.mk_coeff (m, u), Data.mk_coeff (n, u)] @ terms))
in
- Option.map (export o Data.simplify_meta_eq ctxt)
+ Option.map (singleton (Variable.export ctxt' ctxt) o Data.simplify_meta_eq ctxt')
(Data.prove_conv
- [Data.trans_tac ctxt reshape, resolve_tac ctxt [Data.left_distrib] 1,
- Data.numeral_simp_tac ctxt] ctxt
+ [Data.trans_tac ctxt' reshape, resolve_tac ctxt' [Data.left_distrib] 1,
+ Data.numeral_simp_tac ctxt'] ctxt'
(t', Data.mk_sum T (Data.mk_coeff(Data.add(m,n), u) :: terms)))
end
(* FIXME avoid handling of generic exceptions *)
--- a/src/Provers/Arith/extract_common_term.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Provers/Arith/extract_common_term.ML Sun Jun 09 22:22:36 2019 +0200
@@ -49,9 +49,7 @@
fun proc ctxt t =
let
val prems = Simplifier.prems_of ctxt;
- val ([t'], ctxt') = Variable.import_terms true [t] ctxt
- val export = singleton (Variable.export ctxt' ctxt)
- (* FIXME ctxt vs. ctxt' (!?) *)
+ val (t', ctxt') = yield_singleton (Variable.import_terms true) t ctxt
val (t1,t2) = Data.dest_bal t'
val terms1 = Data.dest_sum t1
@@ -59,7 +57,7 @@
val u = find_common (terms1,terms2)
val simp_th =
- case Data.simp_conv ctxt u of NONE => raise TERM("no simp", [])
+ case Data.simp_conv ctxt' u of NONE => raise TERM("no simp", [])
| SOME th => th
val terms1' = Data.find_first u terms1
and terms2' = Data.find_first u terms2
@@ -67,10 +65,10 @@
val t'' = Data.mk_bal (Data.mk_sum T (u::terms1'), Data.mk_sum T (u::terms2'))
val reshape =
- Goal.prove ctxt [] [] (Data.mk_eq (t', t'')) (K (Data.norm_tac ctxt))
+ Goal.prove ctxt' [] [] (Data.mk_eq (t', t'')) (K (Data.norm_tac ctxt))
in
- SOME (export (Data.simplify_meta_eq ctxt simp_th reshape))
+ SOME (singleton (Variable.export ctxt' ctxt) (Data.simplify_meta_eq ctxt' simp_th reshape))
end
(* FIXME avoid handling of generic exceptions *)
handle TERM _ => NONE
--- a/src/Pure/Isar/attrib.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Isar/attrib.ML Sun Jun 09 22:22:36 2019 +0200
@@ -557,11 +557,7 @@
(Scan.succeed (Thm.rule_attribute [] (K Tactic.make_elim)))
"destruct rule turned into elimination rule format" #>
setup \<^binding>\<open>no_vars\<close>
- (Scan.succeed (Thm.rule_attribute [] (fn context => fn th =>
- let
- val ctxt = Variable.set_body false (Context.proof_of context);
- val ((_, [th']), _) = Variable.import true [th] ctxt;
- in th' end)))
+ (Scan.succeed (Thm.rule_attribute [] (Variable.import_vars o Context.proof_of)))
"imported schematic variables" #>
setup \<^binding>\<open>atomize\<close>
(Scan.succeed Object_Logic.declare_atomize) "declaration of atomize rule" #>
--- a/src/Pure/Isar/class_declaration.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Isar/class_declaration.ML Sun Jun 09 22:22:36 2019 +0200
@@ -27,7 +27,7 @@
fun calculate thy class sups base_sort param_map assm_axiom =
let
- val empty_ctxt = Proof_Context.init_global thy;
+ val thy_ctxt = Proof_Context.init_global thy;
val certT = Thm.trim_context_ctyp o Thm.global_ctyp_of thy;
val cert = Thm.trim_context_cterm o Thm.global_cterm_of thy;
@@ -42,7 +42,7 @@
Element.instantiate_normalize_morphism ([], param_map_inst);
val typ_morph =
Element.instantiate_normalize_morphism ([(a_tfree, certT (TFree (Name.aT, [class])))], [])
- val (([raw_props], _, [(_, raw_inst_morph)], _, export_morph), _) = empty_ctxt
+ val (([raw_props], _, [(_, raw_inst_morph)], _, export_morph), _) = thy_ctxt
|> Expression.cert_goal_expression ([(class, (("", false),
(Expression.Named param_map_const, [])))], []);
val (props, inst_morph) =
@@ -60,11 +60,11 @@
val loc_intro_tac =
(case Locale.intros_of thy class of
(_, NONE) => all_tac
- | (_, SOME intro) => ALLGOALS (resolve_tac empty_ctxt [intro]));
+ | (_, SOME intro) => ALLGOALS (resolve_tac thy_ctxt [intro]));
val tac = loc_intro_tac
- THEN ALLGOALS (Proof_Context.fact_tac empty_ctxt (sup_axioms @ the_list assm_axiom));
- in Element.prove_witness empty_ctxt prop tac end) some_prop;
- val some_axiom = Option.map (Element.conclude_witness empty_ctxt) some_witn;
+ THEN ALLGOALS (Proof_Context.fact_tac thy_ctxt (sup_axioms @ the_list assm_axiom));
+ in Element.prove_witness thy_ctxt prop tac end) some_prop;
+ val some_axiom = Option.map (Element.conclude_witness thy_ctxt) some_witn;
(* canonical interpretation *)
val base_morph = inst_morph
@@ -75,7 +75,7 @@
(* assm_intro *)
fun prove_assm_intro thm =
let
- val ((_, [thm']), _) = Variable.import true [thm] empty_ctxt;
+ val ((_, [thm']), _) = Variable.import true [thm] thy_ctxt;
val const_eq_morph =
(case eq_morph of
SOME eq_morph => const_morph $> eq_morph
--- a/src/Pure/Isar/element.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Isar/element.ML Sun Jun 09 22:22:36 2019 +0200
@@ -426,7 +426,7 @@
let
val asms' = Attrib.map_specs (map (Attrib.attribute ctxt)) asms;
val (_, ctxt') = ctxt
- |> fold Variable.auto_fixes (maps (map #1 o #2) asms')
+ |> fold Proof_Context.augment (maps (map #1 o #2) asms')
|> Proof_Context.add_assms Assumption.assume_export asms';
in ctxt' end)
| init (Defines defs) = Context.map_proof (fn ctxt =>
@@ -436,7 +436,7 @@
let val (_, t') = Local_Defs.cert_def ctxt (K []) t (* FIXME adapt ps? *)
in (t', (b, [(t', ps)])) end);
val (_, ctxt') = ctxt
- |> fold Variable.auto_fixes (map #1 asms)
+ |> fold Proof_Context.augment (map #1 asms)
|> Proof_Context.add_assms Local_Defs.def_export (map #2 asms);
in ctxt' end)
| init (Notes (kind, facts)) = Attrib.generic_notes kind facts #> #2
--- a/src/Pure/Isar/expression.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Isar/expression.ML Sun Jun 09 22:22:36 2019 +0200
@@ -184,7 +184,7 @@
val (type_parms'', insts'') = chop (length type_parms') checked;
(* context *)
- val ctxt' = fold Variable.auto_fixes checked ctxt;
+ val ctxt' = fold Proof_Context.augment checked ctxt;
val certT = Thm.trim_context_ctyp o Thm.ctyp_of ctxt';
val cert = Thm.trim_context_cterm o Thm.cterm_of ctxt';
@@ -266,7 +266,7 @@
| restore_elem (elem as Lazy_Notes _, _) = elem;
fun prep (_, pats) (ctxt, t :: ts) =
- let val ctxt' = Variable.auto_fixes t ctxt
+ let val ctxt' = Proof_Context.augment t ctxt
in
((t, Syntax.check_props (Proof_Context.set_mode Proof_Context.mode_pattern ctxt') pats),
(ctxt', ts))
@@ -547,9 +547,9 @@
val goal_ctxt = ctxt
|> fix_params fixed
- |> (fold o fold) Variable.auto_fixes (propss @ eq_propss);
+ |> (fold o fold) Proof_Context.augment (propss @ eq_propss);
- val export = Variable.export_morphism goal_ctxt ctxt;
+ val export = Proof_Context.export_morphism goal_ctxt ctxt;
val exp_fact = Drule.zero_var_indexes_list o map Thm.strip_shyps o Morphism.fact export;
val exp_term = Term_Subst.zero_var_indexes o Morphism.term export;
val exp_typ = Logic.type_map exp_term;
--- a/src/Pure/Isar/interpretation.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Isar/interpretation.ML Sun Jun 09 22:22:36 2019 +0200
@@ -70,7 +70,7 @@
val ((propss, eq_propss, deps, eqnss, export), expr_ctxt) = prep_expr expression initial_ctxt;
val (def_eqns, def_ctxt) =
augment_with_defs prep_term raw_defs deps expr_ctxt;
- val export' = Variable.export_morphism def_ctxt expr_ctxt;
+ val export' = Proof_Context.export_morphism def_ctxt expr_ctxt;
in (((propss, eq_propss, deps, eqnss, export, export'), def_eqns), def_ctxt) end;
in
--- a/src/Pure/Isar/isar_cmd.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Isar/isar_cmd.ML Sun Jun 09 22:22:36 2019 +0200
@@ -255,14 +255,14 @@
fun string_of_prop ctxt s =
let
val prop = Syntax.read_prop ctxt s;
- val ctxt' = Variable.auto_fixes prop ctxt;
+ val ctxt' = Proof_Context.augment prop ctxt;
in Pretty.string_of (Pretty.quote (Syntax.pretty_term ctxt' prop)) end;
fun string_of_term ctxt s =
let
val t = Syntax.read_term ctxt s;
val T = Term.type_of t;
- val ctxt' = Variable.auto_fixes t ctxt;
+ val ctxt' = Proof_Context.augment t ctxt;
in
Pretty.string_of
(Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t), Pretty.fbrk,
--- a/src/Pure/Isar/local_defs.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Isar/local_defs.ML Sun Jun 09 22:22:36 2019 +0200
@@ -249,15 +249,22 @@
|> (snd o Logic.dest_equals o Thm.prop_of)
|> conditional ? Logic.strip_imp_concl
|> (abs_def o #2 o cert_def ctxt get_pos);
- fun prove ctxt' def =
- Goal.prove ctxt'
- ((not (Variable.is_body ctxt') ? Variable.add_free_names ctxt' prop) []) [] prop
- (fn {context = ctxt'', ...} =>
- ALLGOALS
- (CONVERSION (meta_rewrite_conv ctxt'') THEN'
- rewrite_goal_tac ctxt'' [def] THEN'
- resolve_tac ctxt'' [Drule.reflexive_thm]))
- handle ERROR msg => cat_error msg "Failed to prove definitional specification";
+ fun prove def_ctxt0 def =
+ let
+ val def_ctxt = Proof_Context.augment prop def_ctxt0;
+ val def_thm =
+ Goal.prove def_ctxt [] [] prop
+ (fn {context = goal_ctxt, ...} =>
+ ALLGOALS
+ (CONVERSION (meta_rewrite_conv goal_ctxt) THEN'
+ rewrite_goal_tac goal_ctxt [def] THEN'
+ resolve_tac goal_ctxt [Drule.reflexive_thm]))
+ handle ERROR msg => cat_error msg "Failed to prove definitional specification";
+ in
+ def_thm
+ |> singleton (Proof_Context.export def_ctxt def_ctxt0)
+ |> Drule.zero_var_indexes
+ end;
in (((c, T), rhs), prove) end;
end;
--- a/src/Pure/Isar/proof.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Isar/proof.ML Sun Jun 09 22:22:36 2019 +0200
@@ -1093,7 +1093,7 @@
val (propss, binds) =
prep_propp (Proof_Context.set_mode Proof_Context.mode_schematic ctxt) propp;
val goal_ctxt = ctxt
- |> (fold o fold) Variable.auto_fixes propss
+ |> (fold o fold) Proof_Context.augment propss
|> fold Variable.bind_term binds;
fun after_qed' (result_ctxt, results) ctxt' = ctxt'
|> Proof_Context.restore_naming ctxt
--- a/src/Pure/Isar/proof_context.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Isar/proof_context.ML Sun Jun 09 22:22:36 2019 +0200
@@ -62,6 +62,7 @@
val facts_of: Proof.context -> Facts.T
val facts_of_fact: Proof.context -> string -> Facts.T
val markup_extern_fact: Proof.context -> string -> Markup.T list * xstring
+ val augment: term -> Proof.context -> Proof.context
val print_name: Proof.context -> string -> string
val pretty_name: Proof.context -> string -> Pretty.T
val pretty_term_abbrev: Proof.context -> term -> Pretty.T
@@ -224,6 +225,7 @@
val mode_abbrev = make_mode (false, false, true);
+
(** Isar proof context information **)
type cases = Rule_Cases.T Name_Space.table;
@@ -413,6 +415,14 @@
in (markups, xname) end;
+(* augment context by implicit term declarations *)
+
+fun augment t ctxt = ctxt
+ |> not (Variable.is_body ctxt) ?
+ Variable.add_fixes_direct (rev (Variable.add_free_names ctxt t []))
+ |> Variable.declare_term t;
+
+
(** pretty printing **)
--- a/src/Pure/Isar/specification.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Isar/specification.ML Sun Jun 09 22:22:36 2019 +0200
@@ -390,7 +390,7 @@
let
val (stmt, elems_ctxt) = prep_stmt raw_elems raw_stmt ctxt;
val prems = Assumption.local_prems_of elems_ctxt ctxt;
- val stmt_ctxt = fold (fold (Variable.auto_fixes o fst) o snd) stmt elems_ctxt;
+ val stmt_ctxt = fold (fold (Proof_Context.augment o fst) o snd) stmt elems_ctxt;
in
(case raw_stmt of
Element.Shows _ =>
--- a/src/Pure/PIDE/prover.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/PIDE/prover.scala Sun Jun 09 22:22:36 2019 +0200
@@ -231,7 +231,7 @@
}
if (result.length > 0) {
output(markup, Nil, List(XML.Text(Symbol.decode(result.toString))))
- result.length = 0
+ result.clear
}
else {
reader.close
--- a/src/Pure/System/tty_loop.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/System/tty_loop.scala Sun Jun 09 22:22:36 2019 +0200
@@ -29,7 +29,7 @@
if (result.length > 0) {
System.out.print(result.toString)
System.out.flush()
- result.length = 0
+ result.clear
}
else {
reader.close()
--- a/src/Pure/Thy/document_antiquotations.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Thy/document_antiquotations.ML Sun Jun 09 22:22:36 2019 +0200
@@ -30,7 +30,7 @@
let
val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c)
handle TYPE (msg, _, _) => error msg;
- val ([t'], _) = Variable.import_terms true [t] ctxt;
+ val (t', _) = yield_singleton (Variable.import_terms true) t ctxt;
in Thy_Output.pretty_term ctxt t' end;
fun pretty_abbrev ctxt s =
@@ -43,7 +43,7 @@
handle TYPE _ => err ();
val t' = Term.betapplys (Envir.expand_atom T (U, u), args);
val eq = Logic.mk_equals (t, t');
- val ctxt' = Variable.auto_fixes eq ctxt;
+ val ctxt' = Proof_Context.augment eq ctxt;
in Proof_Context.pretty_term_abbrev ctxt' eq end;
fun pretty_locale ctxt (name, pos) =
--- a/src/Pure/Thy/thy_output.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/Thy/thy_output.ML Sun Jun 09 22:22:36 2019 +0200
@@ -466,7 +466,7 @@
(* pretty printing *)
fun pretty_term ctxt t =
- Syntax.pretty_term (Variable.auto_fixes t ctxt) t;
+ Syntax.pretty_term (Proof_Context.augment t ctxt) t;
fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
--- a/src/Pure/assumption.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/assumption.ML Sun Jun 09 22:22:36 2019 +0200
@@ -108,10 +108,14 @@
(* export *)
+fun normalize ctxt0 th0 =
+ let val (ctxt, th) = Thm.join_transfer_context (ctxt0, th0)
+ in Raw_Simplifier.norm_hhf_protect ctxt th end;
+
fun export is_goal inner outer =
- Raw_Simplifier.norm_hhf_protect inner #>
+ normalize inner #>
fold_rev (fn (e, As) => #1 (e is_goal As)) (local_assumptions_of inner outer) #>
- Raw_Simplifier.norm_hhf_protect outer;
+ normalize outer;
fun export_term inner outer =
fold_rev (fn (e, As) => #2 (e false As)) (local_assumptions_of inner outer);
@@ -122,6 +126,8 @@
val term = export_term inner outer;
val typ = Logic.type_map term;
in
+ Morphism.transfer_morphism' inner $>
+ Morphism.transfer_morphism' outer $>
Morphism.morphism "Assumption.export"
{binding = [], typ = [typ], term = [term], fact = [map thm]}
end;
--- a/src/Pure/morphism.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/morphism.ML Sun Jun 09 22:22:36 2019 +0200
@@ -125,7 +125,7 @@
fun fact_morphism a fact = morphism a {binding = [], typ = [], term = [], fact = [fact]};
fun thm_morphism a thm = morphism a {binding = [], typ = [], term = [], fact = [map thm]};
-val transfer_morphism = thm_morphism "transfer" o Thm.transfer;
+val transfer_morphism = thm_morphism "transfer" o Thm.join_transfer;
val transfer_morphism' = transfer_morphism o Proof_Context.theory_of;
val transfer_morphism'' = transfer_morphism o Context.theory_of;
--- a/src/Pure/simplifier.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/simplifier.ML Sun Jun 09 22:22:36 2019 +0200
@@ -126,7 +126,7 @@
fun make_simproc ctxt name {lhss, proc} =
let
- val ctxt' = fold Variable.auto_fixes lhss ctxt;
+ val ctxt' = fold Proof_Context.augment lhss ctxt;
val lhss' = Variable.export_terms ctxt' ctxt lhss;
in
cert_simproc (Proof_Context.theory_of ctxt) name {lhss = lhss', proc = proc}
--- a/src/Pure/thm.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/thm.ML Sun Jun 09 22:22:36 2019 +0200
@@ -89,6 +89,8 @@
val transfer: theory -> thm -> thm
val transfer': Proof.context -> thm -> thm
val transfer'': Context.generic -> thm -> thm
+ val join_transfer: theory -> thm -> thm
+ val join_transfer_context: Proof.context * thm -> Proof.context * thm
val renamed_prop: term -> thm -> thm
val weaken: cterm -> thm -> thm
val weaken_sorts: sort list -> cterm -> cterm
@@ -530,6 +532,15 @@
val transfer' = transfer o Proof_Context.theory_of;
val transfer'' = transfer o Context.theory_of;
+fun join_transfer thy th =
+ if Context.subthy_id (Context.theory_id thy, theory_id th) then th
+ else transfer thy th;
+
+fun join_transfer_context (ctxt, th) =
+ if Context.subthy_id (Context.theory_id (Proof_Context.theory_of ctxt), theory_id th) then
+ (Context.raw_transfer (theory_of_thm th) ctxt, th)
+ else (ctxt, transfer' ctxt th);
+
(* matching *)
--- a/src/Pure/variable.ML Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Pure/variable.ML Sun Jun 09 22:22:36 2019 +0200
@@ -58,7 +58,6 @@
val add_fixes_binding: binding list -> Proof.context -> string list * Proof.context
val add_fixes: string list -> Proof.context -> string list * Proof.context
val add_fixes_direct: string list -> Proof.context -> Proof.context
- val auto_fixes: term -> Proof.context -> Proof.context
val fix_dummy_patterns: term -> Proof.context -> term * Proof.context
val variant_fixes: string list -> Proof.context -> string list * Proof.context
val gen_all: Proof.context -> thm -> thm
@@ -79,6 +78,7 @@
val import_prf: bool -> Proofterm.proof -> Proof.context -> Proofterm.proof * Proof.context
val import: bool -> thm list -> Proof.context ->
((((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list) * thm list) * Proof.context
+ val import_vars: Proof.context -> thm -> thm
val tradeT: (Proof.context -> thm list -> thm list) -> Proof.context -> thm list -> thm list
val trade: (Proof.context -> thm list -> thm list) -> Proof.context -> thm list -> thm list
val is_bound_focus: Proof.context -> bool
@@ -475,10 +475,6 @@
|> (snd o add_fixes xs)
|> restore_body ctxt;
-fun auto_fixes t ctxt = ctxt
- |> not (is_body ctxt) ? add_fixes_direct (rev (add_free_names ctxt t []))
- |> declare_term t;
-
(* dummy patterns *)
@@ -569,6 +565,8 @@
val term = singleton (export_terms inner outer);
val typ = Logic.type_map term;
in
+ Morphism.transfer_morphism' inner $>
+ Morphism.transfer_morphism' outer $>
Morphism.morphism "Variable.export" {binding = [], typ = [typ], term = [term], fact = [fact]}
end;
@@ -627,6 +625,10 @@
val ths' = map (Thm.instantiate insts') ths;
in ((insts', ths'), ctxt') end;
+fun import_vars ctxt th =
+ let val ((_, [th']), _) = ctxt |> set_body false |> import true [th];
+ in th' end;
+
(* import/export *)
--- a/src/Tools/VSCode/extension/README.md Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/extension/README.md Sun Jun 09 22:22:36 2019 +0200
@@ -1,15 +1,14 @@
# Isabelle Prover IDE support
This extension connects VSCode to the Isabelle Prover IDE infrastructure: it
-requires Isabelle2019.
+requires a repository version of Isabelle.
The implementation is centered around the VSCode Language Server protocol, but
with many add-ons that are specific to VSCode and Isabelle/PIDE.
See also:
- * <https://isabelle.in.tum.de/website-Isabelle2019>
- * <https://isabelle.in.tum.de/repos/isabelle/file/Isabelle2019/src/Tools/VSCode>
+ * <https://isabelle.in.tum.de/repos/isabelle/file/tip/src/Tools/VSCode>
* <https://github.com/Microsoft/language-server-protocol>
@@ -59,8 +58,8 @@
### Isabelle/VSCode Installation
- * Download Isabelle2019 from <https://isabelle.in.tum.de/website-Isabelle2019>
- or any of its mirror sites.
+ * Download a recent Isabelle development snapshot from
+ <https://isabelle.in.tum.de/devel/release_snapshot>
* Unpack and run the main Isabelle/jEdit application as usual, to ensure that
the logic image is built properly and Isabelle works as expected.
@@ -69,7 +68,7 @@
* Open the VSCode *Extensions* view and install the following:
- + *Isabelle2019* (needs to fit to the underlying Isabelle release).
+ + *Isabelle* (needs to fit to the underlying Isabelle release).
+ *Prettify Symbols Mode* (important for display of Isabelle symbols).
@@ -90,17 +89,17 @@
+ Linux:
```
- "isabelle.home": "/home/makarius/Isabelle2019"
+ "isabelle.home": "/home/makarius/Isabelle"
```
+ Mac OS X:
```
- "isabelle.home": "/Users/makarius/Isabelle.app/Isabelle2019"
+ "isabelle.home": "/Users/makarius/Isabelle.app/Isabelle"
```
+ Windows:
```
- "isabelle.home": "C:\\Users\\makarius\\Isabelle2019"
+ "isabelle.home": "C:\\Users\\makarius\\Isabelle"
```
* Restart the VSCode application to ensure that all extensions are properly
--- a/src/Tools/VSCode/extension/package.json Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/extension/package.json Sun Jun 09 22:22:36 2019 +0200
@@ -1,6 +1,6 @@
{
- "name": "Isabelle2019",
- "displayName": "Isabelle2019",
+ "name": "Isabelle",
+ "displayName": "Isabelle",
"description": "Isabelle Prover IDE",
"keywords": [
"theorem prover",
--- a/src/Tools/VSCode/src/channel.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/src/channel.scala Sun Jun 09 22:22:36 2019 +0200
@@ -8,6 +8,7 @@
import isabelle._
+import isabelle.vscode.Protocol
import java.io.{InputStream, OutputStream, FileOutputStream, ByteArrayOutputStream, File => JFile}
--- a/src/Tools/VSCode/src/document_model.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/src/document_model.scala Sun Jun 09 22:22:36 2019 +0200
@@ -8,6 +8,7 @@
import isabelle._
+import isabelle.vscode.{Protocol, Server}
import java.io.{File => JFile}
--- a/src/Tools/VSCode/src/dynamic_output.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/src/dynamic_output.scala Sun Jun 09 22:22:36 2019 +0200
@@ -8,6 +8,7 @@
import isabelle._
+import isabelle.vscode.{Protocol, Server}
object Dynamic_Output
--- a/src/Tools/VSCode/src/preview_panel.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/src/preview_panel.scala Sun Jun 09 22:22:36 2019 +0200
@@ -8,6 +8,7 @@
import isabelle._
+import isabelle.vscode.Protocol
import java.io.{File => JFile}
--- a/src/Tools/VSCode/src/protocol.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/src/protocol.scala Sun Jun 09 22:22:36 2019 +0200
@@ -9,6 +9,7 @@
import isabelle._
+import isabelle.vscode.Server
import java.io.{File => JFile}
--- a/src/Tools/VSCode/src/server.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/src/server.scala Sun Jun 09 22:22:36 2019 +0200
@@ -12,6 +12,7 @@
import isabelle._
+import isabelle.vscode.{Protocol, Server}
import java.io.{PrintStream, OutputStream, File => JFile}
--- a/src/Tools/VSCode/src/state_panel.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/src/state_panel.scala Sun Jun 09 22:22:36 2019 +0200
@@ -8,6 +8,7 @@
import isabelle._
+import isabelle.vscode.{Protocol, Server}
object State_Panel
--- a/src/Tools/VSCode/src/vscode_rendering.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/src/vscode_rendering.scala Sun Jun 09 22:22:36 2019 +0200
@@ -9,6 +9,7 @@
import isabelle._
+import isabelle.vscode.Protocol
import java.io.{File => JFile}
--- a/src/Tools/VSCode/src/vscode_resources.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/src/vscode_resources.scala Sun Jun 09 22:22:36 2019 +0200
@@ -8,6 +8,7 @@
import isabelle._
+import isabelle.vscode.{Protocol, Server}
import java.io.{File => JFile}
--- a/src/Tools/VSCode/src/vscode_spell_checker.scala Sun Jun 09 20:24:16 2019 +0200
+++ b/src/Tools/VSCode/src/vscode_spell_checker.scala Sun Jun 09 22:22:36 2019 +0200
@@ -8,6 +8,7 @@
import isabelle._
+import isabelle.vscode.Protocol
object VSCode_Spell_Checker