# HG changeset patch # User Andreas Lochbihler # Date 1546363999 -3600 # Node ID 2d88bf80c84f3ec47106332b04f4ea5412d3b8ae # Parent a59f7d07bf1748638c22533801b27430788c489d# Parent de09a7261120557937026a4b401c4834264afd1c merged diff -r de09a7261120 -r 2d88bf80c84f NEWS --- a/NEWS Tue Jan 01 17:04:53 2019 +0100 +++ b/NEWS Tue Jan 01 18:33:19 2019 +0100 @@ -84,7 +84,8 @@ INCOMPATIBILITY. * Strong congruence rules (with =simp=> in the premises) for constant f -are now uniformly called f_cong_strong. +are now uniformly called f_cong_simp, in accordance with congruence +rules produced for mappers by the datatype package. INCOMPATIBILITY. * Sledgehammer: The URL for SystemOnTPTP, which is used by remote provers, has been updated. diff -r de09a7261120 -r 2d88bf80c84f src/Doc/Isar_Ref/Outer_Syntax.thy --- a/src/Doc/Isar_Ref/Outer_Syntax.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Doc/Isar_Ref/Outer_Syntax.thy Tue Jan 01 18:33:19 2019 +0100 @@ -189,7 +189,9 @@ \} A @{syntax_def system_name} is like @{syntax name}, but it excludes - white-space characters and needs to conform to file-name notation. + white-space characters and needs to conform to file-name notation. Name + components that are special on Windows (e.g.\ \<^verbatim>\CON\, \<^verbatim>\PRN\, \<^verbatim>\AUX\) are + excluded on all platforms. \ diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Analysis/Bochner_Integration.thy --- a/src/HOL/Analysis/Bochner_Integration.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Analysis/Bochner_Integration.thy Tue Jan 01 18:33:19 2019 +0100 @@ -524,7 +524,7 @@ assumes "M = N" "\x. x \ space N \ f x = g x" "x = y" shows "has_bochner_integral M f x \ has_bochner_integral N g y" unfolding has_bochner_integral.simps assms(1,3) - using assms(2) by (simp cong: measurable_cong_strong nn_integral_cong_strong) + using assms(2) by (simp cong: measurable_cong_simp nn_integral_cong_simp) lemma%unimportant has_bochner_integral_cong_AE: "f \ borel_measurable M \ g \ borel_measurable M \ (AE x in M. f x = g x) \ diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Analysis/Caratheodory.thy --- a/src/HOL/Analysis/Caratheodory.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Analysis/Caratheodory.thy Tue Jan 01 18:33:19 2019 +0100 @@ -737,7 +737,7 @@ using f C Ai unfolding bij_betw_def by auto then have "\range f = A i" using f C Ai unfolding bij_betw_def - by (auto simp add: f_def cong del: SUP_cong_strong) + by (auto simp add: f_def cong del: SUP_cong_simp) moreover { have "(\j. \_r (f j)) = (\j. if j \ {..< card C} then \_r (f j) else 0)" using volume_empty[OF V(1)] by (auto intro!: arg_cong[where f=suminf] simp: f_def) diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Analysis/Derivative.thy --- a/src/HOL/Analysis/Derivative.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Analysis/Derivative.thy Tue Jan 01 18:33:19 2019 +0100 @@ -2193,7 +2193,7 @@ using assms(2-3) vector_derivative_works by auto -subsection\The notion of being field differentiable\ +subsection \Field differentiability\ definition%important field_differentiable :: "['a \ 'a::real_normed_field, 'a filter] \ bool" (infixr "(field'_differentiable)" 50) diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Analysis/Elementary_Topology.thy --- a/src/HOL/Analysis/Elementary_Topology.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Analysis/Elementary_Topology.thy Tue Jan 01 18:33:19 2019 +0100 @@ -687,7 +687,8 @@ subsection \Interior of a Set\ -definition%important "interior S = \{T. open T \ T \ S}" +definition%important interior :: "('a::topological_space) set \ 'a set" where +"interior S = \{T. open T \ T \ S}" lemma interiorI [intro?]: assumes "open T" and "x \ T" and "T \ S" @@ -851,7 +852,8 @@ subsection \Closure of a Set\ -definition%important "closure S = S \ {x | x. x islimpt S}" +definition%important closure :: "('a::topological_space) set \ 'a set" where +"closure S = S \ {x . x islimpt S}" lemma interior_closure: "interior S = - (closure (- S))" by (auto simp: interior_def closure_def islimpt_def) @@ -980,7 +982,8 @@ subsection \Frontier (also known as boundary)\ -definition%important "frontier S = closure S - interior S" +definition%important frontier :: "('a::topological_space) set \ 'a set" where +"frontier S = closure S - interior S" lemma frontier_closed [iff]: "closed (frontier S)" by (simp add: frontier_def closed_Diff) @@ -1630,8 +1633,10 @@ with \U \ \A = {}\ show False by auto qed -definition%important "countably_compact U \ - (\A. countable A \ (\a\A. open a) \ U \ \A \ (\T\A. finite T \ U \ \T))" +definition%important countably_compact :: "('a::topological_space) set \ bool" where +"countably_compact U \ + (\A. countable A \ (\a\A. open a) \ U \ \A + \ (\T\A. finite T \ U \ \T))" lemma countably_compactE: assumes "countably_compact s" and "\t\C. open t" and "s \ \C" "countable C" @@ -1698,9 +1703,10 @@ subsubsection\Sequential compactness\ -definition%important seq_compact :: "'a::topological_space set \ bool" - where "seq_compact S \ - (\f. (\n. f n \ S) \ (\l\S. \r::nat\nat. strict_mono r \ ((f \ r) \ l) sequentially))" +definition%important seq_compact :: "'a::topological_space set \ bool" where +"seq_compact S \ + (\f. (\n. f n \ S) + \ (\l\S. \r::nat\nat. strict_mono r \ ((f \ r) \ l) sequentially))" lemma seq_compactI: assumes "\f. \n. f n \ S \ \l\S. \r::nat\nat. strict_mono r \ ((f \ r) \ l) sequentially" diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Analysis/L2_Norm.thy --- a/src/HOL/Analysis/L2_Norm.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Analysis/L2_Norm.thy Tue Jan 01 18:33:19 2019 +0100 @@ -14,7 +14,7 @@ "\A = B; \x. x \ B \ f x = g x\ \ L2_set f A = L2_set g B" unfolding L2_set_def by simp -lemma L2_set_cong_strong: +lemma L2_set_cong_simp: "\A = B; \x. x \ B =simp=> f x = g x\ \ L2_set f A = L2_set g B" unfolding L2_set_def simp_implies_def by simp diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Analysis/Lebesgue_Measure.thy --- a/src/HOL/Analysis/Lebesgue_Measure.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Analysis/Lebesgue_Measure.thy Tue Jan 01 18:33:19 2019 +0100 @@ -407,7 +407,7 @@ lemma measurable_lebesgue_cong: assumes "\x. x \ S \ f x = g x" shows "f \ measurable (lebesgue_on S) M \ g \ measurable (lebesgue_on S) M" - by (metis (mono_tags, lifting) IntD1 assms measurable_cong_strong space_restrict_space) + by (metis (mono_tags, lifting) IntD1 assms measurable_cong_simp space_restrict_space) text%unimportant \Measurability of continuous functions\ diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Analysis/Measure_Space.thy --- a/src/HOL/Analysis/Measure_Space.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Analysis/Measure_Space.thy Tue Jan 01 18:33:19 2019 +0100 @@ -1143,7 +1143,7 @@ "(\x. x \ space M \ P x \ Q x) \ (AE x in M. P x) \ (AE x in M. Q x)" by auto -lemma AE_cong_strong: "M = N \ (\x. x \ space N =simp=> P x = Q x) \ (AE x in M. P x) \ (AE x in N. Q x)" +lemma AE_cong_simp: "M = N \ (\x. x \ space N =simp=> P x = Q x) \ (AE x in M. P x) \ (AE x in N. Q x)" by (auto simp: simp_implies_def) lemma AE_all_countable: @@ -1388,7 +1388,8 @@ subsection \Measure space induced by distribution of @{const measurable}-functions\ definition%important distr :: "'a measure \ 'b measure \ ('a \ 'b) \ 'b measure" where - "distr M N f = measure_of (space N) (sets N) (\A. emeasure M (f -` A \ space M))" +"distr M N f = + measure_of (space N) (sets N) (\A. emeasure M (f -` A \ space M))" lemma shows sets_distr[simp, measurable_cong]: "sets (distr M N f) = sets N" @@ -1708,9 +1709,8 @@ subsection \Set of measurable sets with finite measure\ -definition%important fmeasurable :: "'a measure \ 'a set set" -where - "fmeasurable M = {A\sets M. emeasure M A < \}" +definition%important fmeasurable :: "'a measure \ 'a set set" where +"fmeasurable M = {A\sets M. emeasure M A < \}" lemma fmeasurableD[dest, measurable_dest]: "A \ fmeasurable M \ A \ sets M" by (auto simp: fmeasurable_def) @@ -2589,7 +2589,7 @@ using E \ by (subst (1 2) emeasure_restrict_space) (auto simp: sets_eq sets_eq[THEN sets_eq_imp_space_eq]) next show "range (from_nat_into A) \ E" "(\i. from_nat_into A i) = \" - using A by (auto cong del: SUP_cong_strong) + using A by (auto cong del: SUP_cong_simp) next fix i have "emeasure (restrict_space M \) (from_nat_into A i) = emeasure M (from_nat_into A i)" @@ -2605,7 +2605,8 @@ subsection%unimportant \Null measure\ -definition "null_measure M = sigma (space M) (sets M)" +definition null_measure :: "'a measure \ 'a measure" where +"null_measure M = sigma (space M) (sets M)" lemma space_null_measure[simp]: "space (null_measure M) = space M" by (simp add: null_measure_def) @@ -2626,9 +2627,8 @@ subsection \Scaling a measure\ -definition%important scale_measure :: "ennreal \ 'a measure \ 'a measure" -where - "scale_measure r M = measure_of (space M) (sets M) (\A. r * emeasure M A)" +definition%important scale_measure :: "ennreal \ 'a measure \ 'a measure" where +"scale_measure r M = measure_of (space M) (sets M) (\A. r * emeasure M A)" lemma space_scale_measure: "space (scale_measure r M) = space M" by (simp add: scale_measure_def) @@ -2874,9 +2874,10 @@ by (cases "X \ sets M") (auto simp: emeasure_notin_sets) done -definition%important sup_measure' :: "'a measure \ 'a measure \ 'a measure" -where - "sup_measure' A B = measure_of (space A) (sets A) (\X. SUP Y\sets A. emeasure A (X \ Y) + emeasure B (X \ - Y))" +definition%important sup_measure' :: "'a measure \ 'a measure \ 'a measure" where +"sup_measure' A B = + measure_of (space A) (sets A) + (\X. SUP Y\sets A. emeasure A (X \ Y) + emeasure B (X \ - Y))" lemma assumes [simp]: "sets B = sets A" shows space_sup_measure'[simp]: "space (sup_measure' A B) = space A" @@ -3002,10 +3003,11 @@ qed qed simp_all -definition%important sup_lexord :: "'a \ 'a \ ('a \ 'b::order) \ 'a \ 'a \ 'a" -where - "sup_lexord A B k s c = - (if k A = k B then c else if \ k A \ k B \ \ k B \ k A then s else if k B \ k A then A else B)" +definition%important sup_lexord :: "'a \ 'a \ ('a \ 'b::order) \ 'a \ 'a \ 'a" where +"sup_lexord A B k s c = + (if k A = k B then c else + if \ k A \ k B \ \ k B \ k A then s else + if k B \ k A then A else B)" lemma sup_lexord: "(k A < k B \ P B) \ (k B < k A \ P A) \ (k A = k B \ P c) \ @@ -3031,8 +3033,7 @@ instantiation measure :: (type) semilattice_sup begin -definition%important sup_measure :: "'a measure \ 'a measure \ 'a measure" -where +definition%important sup_measure :: "'a measure \ 'a measure \ 'a measure" where "sup_measure A B = sup_lexord A B space (sigma (space A \ space B) {}) (sup_lexord A B sets (sigma (space A) (sets A \ sets B)) (sup_measure' A B))" @@ -3135,9 +3136,12 @@ lemma UN_space_closed: "\(sets ` S) \ Pow (\(space ` S))" using sets.space_closed by auto -definition Sup_lexord :: "('a \ 'b::complete_lattice) \ ('a set \ 'a) \ ('a set \ 'a) \ 'a set \ 'a" +definition%important + Sup_lexord :: "('a \ 'b::complete_lattice) \ ('a set \ 'a) \ ('a set \ 'a) \ 'a set \ 'a" where - "Sup_lexord k c s A = (let U = (SUP a\A. k a) in if \a\A. k a = U then c {a\A. k a = U} else s A)" + "Sup_lexord k c s A = + (let U = (SUP a\A. k a) + in if \a\A. k a = U then c {a\A. k a = U} else s A)" lemma Sup_lexord: "(\a S. a \ A \ k a = (SUP a\A. k a) \ S = {a'\A. k a' = k a} \ P (c S)) \ ((\a. a \ A \ k a \ (SUP a\A. k a)) \ P (s A)) \ @@ -3194,9 +3198,9 @@ "finite I \ I \ {} \ (\i. i \ I \ sets i = sets M) \ sets (sup_measure.F id I) = sets M" by (induction I rule: finite_ne_induct) (simp_all add: sets_sup) -definition%important Sup_measure' :: "'a measure set \ 'a measure" -where - "Sup_measure' M = measure_of (\a\M. space a) (\a\M. sets a) +definition%important Sup_measure' :: "'a measure set \ 'a measure" where +"Sup_measure' M = + measure_of (\a\M. space a) (\a\M. sets a) (\X. (SUP P\{P. finite P \ P \ M }. sup_measure.F id P X))" lemma space_Sup_measure'2: "space (Sup_measure' M) = (\m\M. space m)" @@ -3265,21 +3269,20 @@ using assms * by auto qed (rule UN_space_closed) -definition%important Sup_measure :: "'a measure set \ 'a measure" -where - "Sup_measure = Sup_lexord space (Sup_lexord sets Sup_measure' - (\U. sigma (\u\U. space u) (\u\U. sets u))) (\U. sigma (\u\U. space u) {})" - -definition%important Inf_measure :: "'a measure set \ 'a measure" -where +definition%important Sup_measure :: "'a measure set \ 'a measure" where +"Sup_measure = + Sup_lexord space + (Sup_lexord sets Sup_measure' + (\U. sigma (\u\U. space u) (\u\U. sets u))) + (\U. sigma (\u\U. space u) {})" + +definition%important Inf_measure :: "'a measure set \ 'a measure" where "Inf_measure A = Sup {x. \a\A. x \ a}" -definition%important inf_measure :: "'a measure \ 'a measure \ 'a measure" -where +definition%important inf_measure :: "'a measure \ 'a measure \ 'a measure" where "inf_measure a b = Inf {a, b}" -definition%important top_measure :: "'a measure" -where +definition%important top_measure :: "'a measure" where "top_measure = Inf {}" instance diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Analysis/Nonnegative_Lebesgue_Integration.thy --- a/src/HOL/Analysis/Nonnegative_Lebesgue_Integration.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Analysis/Nonnegative_Lebesgue_Integration.thy Tue Jan 01 18:33:19 2019 +0100 @@ -2058,7 +2058,7 @@ by (subst s_eq) (auto simp: image_subset_iff le_fun_def split: split_indicator split_indicator_asm) qed then show ?thesis - unfolding nn_integral_def_finite by (simp cong del: SUP_cong_strong) + unfolding nn_integral_def_finite by (simp cong del: SUP_cong_simp) qed lemma nn_integral_count_space_indicator: diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Analysis/Sigma_Algebra.thy --- a/src/HOL/Analysis/Sigma_Algebra.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Analysis/Sigma_Algebra.thy Tue Jan 01 18:33:19 2019 +0100 @@ -436,10 +436,10 @@ by (auto simp add: binary_def) lemma Un_range_binary: "a \ b = (\i::nat. binary a b i)" - by (simp add: range_binary_eq cong del: SUP_cong_strong) + by (simp add: range_binary_eq cong del: SUP_cong_simp) lemma Int_range_binary: "a \ b = (\i::nat. binary a b i)" - by (simp add: range_binary_eq cong del: INF_cong_strong) + by (simp add: range_binary_eq cong del: INF_cong_simp) lemma sigma_algebra_iff2: "sigma_algebra \ M \ @@ -802,7 +802,7 @@ subsubsection%unimportant \Ring generated by a semiring\ -definition (in semiring_of_sets) +definition (in semiring_of_sets) generated_ring :: "'a set set" where "generated_ring = { \C | C. C \ M \ finite C \ disjoint C }" lemma (in semiring_of_sets) generated_ringE[elim?]: @@ -943,11 +943,11 @@ done lemma UN_binaryset_eq: "(\i. binaryset A B i) = A \ B" - by (simp add: range_binaryset_eq cong del: SUP_cong_strong) + by (simp add: range_binaryset_eq cong del: SUP_cong_simp) subsubsection \Closed CDI\ -definition%important closed_cdi where +definition%important closed_cdi :: "'a set \ 'a set set \ bool" where "closed_cdi \ M \ M \ Pow \ & (\s \ M. \ - s \ M) & @@ -1181,16 +1181,16 @@ subsubsection \Dynkin systems\ -locale%important dynkin_system = subset_class + +locale%important Dynkin_system = subset_class + assumes space: "\ \ M" and compl[intro!]: "\A. A \ M \ \ - A \ M" and UN[intro!]: "\A. disjoint_family A \ range A \ M \ (\i::nat. A i) \ M" -lemma (in dynkin_system) empty[intro, simp]: "{} \ M" +lemma (in Dynkin_system) empty[intro, simp]: "{} \ M" using space compl[of "\"] by simp -lemma (in dynkin_system) diff: +lemma (in Dynkin_system) diff: assumes sets: "D \ M" "E \ M" and "D \ E" shows "E - D \ M" proof - @@ -1209,41 +1209,42 @@ finally show ?thesis . qed -lemma dynkin_systemI: +lemma Dynkin_systemI: assumes "\ A. A \ M \ A \ \" "\ \ M" assumes "\ A. A \ M \ \ - A \ M" assumes "\ A. disjoint_family A \ range A \ M \ (\i::nat. A i) \ M" - shows "dynkin_system \ M" - using assms by (auto simp: dynkin_system_def dynkin_system_axioms_def subset_class_def) + shows "Dynkin_system \ M" + using assms by (auto simp: Dynkin_system_def Dynkin_system_axioms_def subset_class_def) -lemma dynkin_systemI': +lemma Dynkin_systemI': assumes 1: "\ A. A \ M \ A \ \" assumes empty: "{} \ M" assumes Diff: "\ A. A \ M \ \ - A \ M" assumes 2: "\ A. disjoint_family A \ range A \ M \ (\i::nat. A i) \ M" - shows "dynkin_system \ M" + shows "Dynkin_system \ M" proof - from Diff[OF empty] have "\ \ M" by auto from 1 this Diff 2 show ?thesis - by (intro dynkin_systemI) auto + by (intro Dynkin_systemI) auto qed -lemma dynkin_system_trivial: - shows "dynkin_system A (Pow A)" - by (rule dynkin_systemI) auto +lemma Dynkin_system_trivial: + shows "Dynkin_system A (Pow A)" + by (rule Dynkin_systemI) auto -lemma sigma_algebra_imp_dynkin_system: - assumes "sigma_algebra \ M" shows "dynkin_system \ M" +lemma sigma_algebra_imp_Dynkin_system: + assumes "sigma_algebra \ M" shows "Dynkin_system \ M" proof - interpret sigma_algebra \ M by fact - show ?thesis using sets_into_space by (fastforce intro!: dynkin_systemI) + show ?thesis using sets_into_space by (fastforce intro!: Dynkin_systemI) qed subsubsection "Intersection sets systems" -definition%important "Int_stable M \ (\ a \ M. \ b \ M. a \ b \ M)" +definition%important Int_stable :: "'a set set \ bool" where +"Int_stable M \ (\ a \ M. \ b \ M. a \ b \ M)" lemma (in algebra) Int_stable: "Int_stable M" unfolding Int_stable_def by auto @@ -1260,7 +1261,7 @@ "Int_stable M \ a \ M \ b \ M \ a \ b \ M" unfolding Int_stable_def by auto -lemma (in dynkin_system) sigma_algebra_eq_Int_stable: +lemma (in Dynkin_system) sigma_algebra_eq_Int_stable: "sigma_algebra \ M \ Int_stable M" proof assume "sigma_algebra \ M" then show "Int_stable M" @@ -1283,48 +1284,48 @@ subsubsection "Smallest Dynkin systems" -definition%important dynkin where - "dynkin \ M = (\{D. dynkin_system \ D \ M \ D})" +definition%important Dynkin :: "'a set \ 'a set set \ 'a set set" where + "Dynkin \ M = (\{D. Dynkin_system \ D \ M \ D})" -lemma dynkin_system_dynkin: +lemma Dynkin_system_Dynkin: assumes "M \ Pow (\)" - shows "dynkin_system \ (dynkin \ M)" -proof (rule dynkin_systemI) - fix A assume "A \ dynkin \ M" + shows "Dynkin_system \ (Dynkin \ M)" +proof (rule Dynkin_systemI) + fix A assume "A \ Dynkin \ M" moreover - { fix D assume "A \ D" and d: "dynkin_system \ D" - then have "A \ \" by (auto simp: dynkin_system_def subset_class_def) } - moreover have "{D. dynkin_system \ D \ M \ D} \ {}" - using assms dynkin_system_trivial by fastforce + { fix D assume "A \ D" and d: "Dynkin_system \ D" + then have "A \ \" by (auto simp: Dynkin_system_def subset_class_def) } + moreover have "{D. Dynkin_system \ D \ M \ D} \ {}" + using assms Dynkin_system_trivial by fastforce ultimately show "A \ \" - unfolding dynkin_def using assms + unfolding Dynkin_def using assms by auto next - show "\ \ dynkin \ M" - unfolding dynkin_def using dynkin_system.space by fastforce + show "\ \ Dynkin \ M" + unfolding Dynkin_def using Dynkin_system.space by fastforce next - fix A assume "A \ dynkin \ M" - then show "\ - A \ dynkin \ M" - unfolding dynkin_def using dynkin_system.compl by force + fix A assume "A \ Dynkin \ M" + then show "\ - A \ Dynkin \ M" + unfolding Dynkin_def using Dynkin_system.compl by force next fix A :: "nat \ 'a set" - assume A: "disjoint_family A" "range A \ dynkin \ M" - show "(\i. A i) \ dynkin \ M" unfolding dynkin_def + assume A: "disjoint_family A" "range A \ Dynkin \ M" + show "(\i. A i) \ Dynkin \ M" unfolding Dynkin_def proof (simp, safe) - fix D assume "dynkin_system \ D" "M \ D" + fix D assume "Dynkin_system \ D" "M \ D" with A have "(\i. A i) \ D" - by (intro dynkin_system.UN) (auto simp: dynkin_def) + by (intro Dynkin_system.UN) (auto simp: Dynkin_def) then show "(\i. A i) \ D" by auto qed qed -lemma dynkin_Basic[intro]: "A \ M \ A \ dynkin \ M" - unfolding dynkin_def by auto +lemma Dynkin_Basic[intro]: "A \ M \ A \ Dynkin \ M" + unfolding Dynkin_def by auto -lemma (in dynkin_system) restricted_dynkin_system: +lemma (in Dynkin_system) restricted_Dynkin_system: assumes "D \ M" - shows "dynkin_system \ {Q. Q \ \ \ Q \ D \ M}" -proof (rule dynkin_systemI, simp_all) + shows "Dynkin_system \ {Q. Q \ \ \ Q \ D \ M}" +proof (rule Dynkin_systemI, simp_all) have "\ \ D = D" using \D \ M\ sets_into_space by auto then show "\ \ D \ M" @@ -1345,87 +1346,87 @@ by (auto simp del: UN_simps) qed -lemma (in dynkin_system) dynkin_subset: +lemma (in Dynkin_system) Dynkin_subset: assumes "N \ M" - shows "dynkin \ N \ M" + shows "Dynkin \ N \ M" proof - - have "dynkin_system \ M" .. - then have "dynkin_system \ M" - using assms unfolding dynkin_system_def dynkin_system_axioms_def subset_class_def by simp - with \N \ M\ show ?thesis by (auto simp add: dynkin_def) + have "Dynkin_system \ M" .. + then have "Dynkin_system \ M" + using assms unfolding Dynkin_system_def Dynkin_system_axioms_def subset_class_def by simp + with \N \ M\ show ?thesis by (auto simp add: Dynkin_def) qed -lemma sigma_eq_dynkin: +lemma sigma_eq_Dynkin: assumes sets: "M \ Pow \" assumes "Int_stable M" - shows "sigma_sets \ M = dynkin \ M" + shows "sigma_sets \ M = Dynkin \ M" proof - - have "dynkin \ M \ sigma_sets (\) (M)" - using sigma_algebra_imp_dynkin_system - unfolding dynkin_def sigma_sets_least_sigma_algebra[OF sets] by auto + have "Dynkin \ M \ sigma_sets (\) (M)" + using sigma_algebra_imp_Dynkin_system + unfolding Dynkin_def sigma_sets_least_sigma_algebra[OF sets] by auto moreover - interpret dynkin_system \ "dynkin \ M" - using dynkin_system_dynkin[OF sets] . - have "sigma_algebra \ (dynkin \ M)" + interpret Dynkin_system \ "Dynkin \ M" + using Dynkin_system_Dynkin[OF sets] . + have "sigma_algebra \ (Dynkin \ M)" unfolding sigma_algebra_eq_Int_stable Int_stable_def proof (intro ballI) - fix A B assume "A \ dynkin \ M" "B \ dynkin \ M" - let ?D = "\E. {Q. Q \ \ \ Q \ E \ dynkin \ M}" + fix A B assume "A \ Dynkin \ M" "B \ Dynkin \ M" + let ?D = "\E. {Q. Q \ \ \ Q \ E \ Dynkin \ M}" have "M \ ?D B" proof fix E assume "E \ M" - then have "M \ ?D E" "E \ dynkin \ M" + then have "M \ ?D E" "E \ Dynkin \ M" using sets_into_space \Int_stable M\ by (auto simp: Int_stable_def) - then have "dynkin \ M \ ?D E" - using restricted_dynkin_system \E \ dynkin \ M\ - by (intro dynkin_system.dynkin_subset) simp_all + then have "Dynkin \ M \ ?D E" + using restricted_Dynkin_system \E \ Dynkin \ M\ + by (intro Dynkin_system.Dynkin_subset) simp_all then have "B \ ?D E" - using \B \ dynkin \ M\ by auto - then have "E \ B \ dynkin \ M" + using \B \ Dynkin \ M\ by auto + then have "E \ B \ Dynkin \ M" by (subst Int_commute) simp then show "E \ ?D B" using sets \E \ M\ by auto qed - then have "dynkin \ M \ ?D B" - using restricted_dynkin_system \B \ dynkin \ M\ - by (intro dynkin_system.dynkin_subset) simp_all - then show "A \ B \ dynkin \ M" - using \A \ dynkin \ M\ sets_into_space by auto + then have "Dynkin \ M \ ?D B" + using restricted_Dynkin_system \B \ Dynkin \ M\ + by (intro Dynkin_system.Dynkin_subset) simp_all + then show "A \ B \ Dynkin \ M" + using \A \ Dynkin \ M\ sets_into_space by auto qed from sigma_algebra.sigma_sets_subset[OF this, of "M"] - have "sigma_sets (\) (M) \ dynkin \ M" by auto - ultimately have "sigma_sets (\) (M) = dynkin \ M" by auto + have "sigma_sets (\) (M) \ Dynkin \ M" by auto + ultimately have "sigma_sets (\) (M) = Dynkin \ M" by auto then show ?thesis - by (auto simp: dynkin_def) + by (auto simp: Dynkin_def) qed -lemma (in dynkin_system) dynkin_idem: - "dynkin \ M = M" +lemma (in Dynkin_system) Dynkin_idem: + "Dynkin \ M = M" proof - - have "dynkin \ M = M" + have "Dynkin \ M = M" proof - show "M \ dynkin \ M" - using dynkin_Basic by auto - show "dynkin \ M \ M" - by (intro dynkin_subset) auto + show "M \ Dynkin \ M" + using Dynkin_Basic by auto + show "Dynkin \ M \ M" + by (intro Dynkin_subset) auto qed then show ?thesis - by (auto simp: dynkin_def) + by (auto simp: Dynkin_def) qed -lemma (in dynkin_system) dynkin_lemma: +lemma (in Dynkin_system) Dynkin_lemma: assumes "Int_stable E" and E: "E \ M" "M \ sigma_sets \ E" shows "sigma_sets \ E = M" proof - have "E \ Pow \" using E sets_into_space by force - then have *: "sigma_sets \ E = dynkin \ E" - using \Int_stable E\ by (rule sigma_eq_dynkin) - then have "dynkin \ E = M" - using assms dynkin_subset[OF E(1)] by simp + then have *: "sigma_sets \ E = Dynkin \ E" + using \Int_stable E\ by (rule sigma_eq_Dynkin) + then have "Dynkin \ E = M" + using assms Dynkin_subset[OF E(1)] by simp with * show ?thesis - using assms by (auto simp: dynkin_def) + using assms by (auto simp: Dynkin_def) qed subsubsection \Induction rule for intersection-stable generators\ @@ -1447,10 +1448,10 @@ interpret sigma_algebra \ "sigma_sets \ G" using closed by (rule sigma_algebra_sigma_sets) from compl[OF _ empty] closed have space: "P \" by simp - interpret dynkin_system \ ?D + interpret Dynkin_system \ ?D by standard (auto dest: sets_into_space intro!: space compl union) have "sigma_sets \ G = ?D" - by (rule dynkin_lemma) (auto simp: basic \Int_stable G\) + by (rule Dynkin_lemma) (auto simp: basic \Int_stable G\) with A show ?thesis by auto qed @@ -1460,13 +1461,16 @@ "positive M \ \ \ {} = 0" definition%important countably_additive :: "'a set set \ ('a set \ ennreal) \ bool" where - "countably_additive M f \ (\A. range A \ M \ disjoint_family A \ (\i. A i) \ M \ +"countably_additive M f \ + (\A. range A \ M \ disjoint_family A \ (\i. A i) \ M \ (\i. f (A i)) = f (\i. A i))" definition%important measure_space :: "'a set \ 'a set set \ ('a set \ ennreal) \ bool" where - "measure_space \ A \ \ sigma_algebra \ A \ positive A \ \ countably_additive A \" +"measure_space \ A \ \ + sigma_algebra \ A \ positive A \ \ countably_additive A \" -typedef%important 'a measure = "{(\::'a set, A, \). (\a\-A. \ a = 0) \ measure_space \ A \ }" +typedef%important 'a measure = + "{(\::'a set, A, \). (\a\-A. \ a = 0) \ measure_space \ A \ }" proof%unimportant have "sigma_algebra UNIV {{}, UNIV}" by (auto simp: sigma_algebra_iff2) @@ -1498,8 +1502,10 @@ interpretation sets: sigma_algebra "space M" "sets M" for M :: "'a measure" using measure_space[of M] by (auto simp: measure_space_def) -definition%important measure_of :: "'a set \ 'a set set \ ('a set \ ennreal) \ 'a measure" where - "measure_of \ A \ = Abs_measure (\, if A \ Pow \ then sigma_sets \ A else {{}, \}, +definition%important measure_of :: "'a set \ 'a set set \ ('a set \ ennreal) \ 'a measure" + where +"measure_of \ A \ = + Abs_measure (\, if A \ Pow \ then sigma_sets \ A else {{}, \}, \a. if a \ sigma_sets \ A \ measure_space \ (sigma_sets \ A) \ then \ a else 0)" abbreviation "sigma \ A \ measure_of \ A (\x. 0)" @@ -1715,8 +1721,9 @@ subsubsection \Measurable functions\ -definition%important measurable :: "'a measure \ 'b measure \ ('a \ 'b) set" (infixr "\\<^sub>M" 60) where - "measurable A B = {f \ space A \ space B. \y \ sets B. f -` y \ space A \ sets A}" +definition%important measurable :: "'a measure \ 'b measure \ ('a \ 'b) set" + (infixr "\\<^sub>M" 60) where +"measurable A B = {f \ space A \ space B. \y \ sets B. f -` y \ space A \ sets A}" lemma measurableI: "(\x. x \ space M \ f x \ space N) \ (\A. A \ sets N \ f -` A \ space M \ sets M) \ @@ -1802,7 +1809,7 @@ unfolding measurable_def using assms by (simp cong: vimage_inter_cong Pi_cong add: simp_implies_def) -lemma measurable_cong_strong: +lemma measurable_cong_simp: "M = N \ M' = N' \ (\w. w \ space M \ f w = g w) \ f \ measurable M M' \ g \ measurable N N'" by (metis measurable_cong) @@ -1878,7 +1885,7 @@ subsubsection \Counting space\ definition%important count_space :: "'a set \ 'a measure" where - "count_space \ = measure_of \ (Pow \) (\A. if finite A then of_nat (card A) else \)" +"count_space \ = measure_of \ (Pow \) (\A. if finite A then of_nat (card A) else \)" lemma shows space_count_space[simp]: "space (count_space \) = \" @@ -1955,7 +1962,9 @@ subsubsection%unimportant \Extend measure\ -definition "extend_measure \ I G \ = +definition extend_measure :: "'a set \ 'b set \ ('b \ 'a set) \ ('b \ ennreal) \ 'a measure" + where +"extend_measure \ I G \ = (if (\\'. (\i\I. \' (G i) = \ i) \ measure_space \ (sigma_sets \ (G`I)) \') \ \ (\i\I. \ i = 0) then measure_of \ (G`I) (SOME \'. (\i\I. \' (G i) = \ i) \ measure_space \ (sigma_sets \ (G`I)) \') else measure_of \ (G`I) (\_. 0))" @@ -2015,7 +2024,7 @@ subsection \The smallest $\sigma$-algebra regarding a function\ -definition%important +definition%important vimage_algebra :: "'a set \ ('a \ 'b) \ 'b measure \ 'a measure" where "vimage_algebra X f M = sigma X {f -` A \ X | A. A \ sets M}" lemma space_vimage_algebra[simp]: "space (vimage_algebra X f M) = X" @@ -2090,7 +2099,7 @@ subsubsection \Restricted Space Sigma Algebra\ -definition restrict_space where +definition restrict_space :: "'a measure \ 'a set \ 'a measure" where "restrict_space M \ = measure_of (\ \ space M) (((\) \) ` sets M) (emeasure M)" lemma space_restrict_space: "space (restrict_space M \) = \ \ space M" diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Complete_Lattices.thy --- a/src/HOL/Complete_Lattices.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Complete_Lattices.thy Tue Jan 01 18:33:19 2019 +0100 @@ -61,7 +61,7 @@ lemma INF_cong: "A = B \ (\x. x \ B \ C x = D x) \ \(C ` A) = \(D ` B)" by (simp add: image_def) -lemma INF_cong_strong [cong]: +lemma INF_cong_simp [cong]: "A = B \ (\x. x \ B =simp=> C x = D x) \ \(C ` A) = \(D ` B)" unfolding simp_implies_def by (fact INF_cong) @@ -82,9 +82,9 @@ lemma SUP_cong: "A = B \ (\x. x \ B \ C x = D x) \ \(C ` A) = \(D ` B)" by (fact Inf.INF_cong) -lemma SUP_cong_strong [cong]: +lemma SUP_cong_simp [cong]: "A = B \ (\x. x \ B =simp=> C x = D x) \ \(C ` A) = \(D ` B)" -by (fact Inf.INF_cong_strong) +by (fact Inf.INF_cong_simp) end @@ -179,19 +179,19 @@ by (auto intro: le_infI le_infI1 le_infI2 antisym Inf_greatest Inf_lower) lemma INF_insert [simp]: "(\x\insert a A. f x) = f a \ \(f ` A)" - by (simp cong del: INF_cong_strong) + by (simp cong del: INF_cong_simp) lemma Sup_insert [simp]: "\insert a A = a \ \A" by (auto intro: le_supI le_supI1 le_supI2 antisym Sup_least Sup_upper) lemma SUP_insert [simp]: "(\x\insert a A. f x) = f a \ \(f ` A)" - by (simp cong del: SUP_cong_strong) + by (simp cong del: SUP_cong_simp) lemma INF_empty [simp]: "(\x\{}. f x) = \" - by (simp cong del: INF_cong_strong) + by (simp cong del: INF_cong_simp) lemma SUP_empty [simp]: "(\x\{}. f x) = \" - by (simp cong del: SUP_cong_strong) + by (simp cong del: SUP_cong_simp) lemma Inf_UNIV [simp]: "\UNIV = \" by (auto intro!: antisym Inf_lower) @@ -437,11 +437,11 @@ lemma INF_eq_iff: "I \ {} \ (\i. i \ I \ f i \ c) \ \(f ` I) = c \ (\i\I. f i = c)" using INF_eq_const [of I f c] INF_lower [of _ I f] - by (auto intro: antisym cong del: INF_cong_strong) + by (auto intro: antisym cong del: INF_cong_simp) lemma SUP_eq_iff: "I \ {} \ (\i. i \ I \ c \ f i) \ \(f ` I) = c \ (\i\I. f i = c)" using SUP_eq_const [of I f c] SUP_upper [of _ I f] - by (auto intro: antisym cong del: SUP_cong_strong) + by (auto intro: antisym cong del: SUP_cong_simp) end diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Data_Structures/Braun_Tree.thy --- a/src/HOL/Data_Structures/Braun_Tree.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Data_Structures/Braun_Tree.thy Tue Jan 01 18:33:19 2019 +0100 @@ -252,7 +252,7 @@ by (simp add: insert_ident Icc_eq_insert_lb_nat nat_in_image braun_indices1) thus ?case using Node.IH even_odd_decomp[OF _ _ 3] by(simp add: card_image inj_on_def card_braun_indices Let_def 1 2 inj_image_eq_iff image_comp - cong: image_cong_strong) + cong: image_cong_simp) qed lemma braun_iff_braun_indices: "braun t \ braun_indices t = {1..size t}" diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Hoare_Parallel/RG_Examples.thy --- a/src/HOL/Hoare_Parallel/RG_Examples.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Hoare_Parallel/RG_Examples.thy Tue Jan 01 18:33:19 2019 +0100 @@ -321,7 +321,7 @@ \\iX!i) mod n=i \ (\j<\X!i. j mod n=i \ \P(B!j)) \ (\Y!i P(B!(\Y!i)) \ \Y!i\ m+i) \ (\jY!j \ \X!i)\]" apply (rule Parallel) -apply (auto cong del: INF_cong_strong SUP_cong_strong) +apply (auto cong del: INF_cong_simp SUP_cong_simp) apply force apply (rule While) apply force diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Library/Complete_Partial_Order2.thy --- a/src/HOL/Library/Complete_Partial_Order2.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Library/Complete_Partial_Order2.thy Tue Jan 01 18:33:19 2019 +0100 @@ -188,7 +188,7 @@ proof(cases "Y = {}") case True then show ?thesis - by (simp add: image_constant_conv cong del: SUP_cong_strong) + by (simp add: image_constant_conv cong del: SUP_cong_simp) next case False have chain1: "\f. f \ Z \ Complete_Partial_Order.chain (\) (f ` Y)" @@ -518,7 +518,7 @@ context ccpo begin lemma cont_const [simp, cont_intro]: "cont luba orda Sup (\) (\x. c)" -by (rule contI) (simp add: image_constant_conv cong del: SUP_cong_strong) +by (rule contI) (simp add: image_constant_conv cong del: SUP_cong_simp) lemma mcont_const [cont_intro, simp]: "mcont luba orda Sup (\) (\x. c)" @@ -695,7 +695,7 @@ hence "\Y \ bound" using chain by(auto intro: ccpo_Sup_least) moreover have "Y \ {x. \ x \ bound} = {}" using True by auto ultimately show ?thesis using True Y - by (auto simp add: image_constant_conv cong del: c.SUP_cong_strong) + by (auto simp add: image_constant_conv cong del: c.SUP_cong_simp) next case False let ?Y = "Y \ {x. \ x \ bound}" diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Probability/Giry_Monad.thy --- a/src/HOL/Probability/Giry_Monad.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Probability/Giry_Monad.thy Tue Jan 01 18:33:19 2019 +0100 @@ -1763,7 +1763,7 @@ qed qed -lemma bind_cong_strong: "M = N \ (\x. x\space M =simp=> f x = g x) \ bind M f = bind N g" +lemma bind_cong_simp: "M = N \ (\x. x\space M =simp=> f x = g x) \ bind M f = bind N g" by (auto simp: simp_implies_def intro!: bind_cong) lemma sets_bind_measurable: diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Probability/Independent_Family.thy --- a/src/HOL/Probability/Independent_Family.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Probability/Independent_Family.thy Tue Jan 01 18:33:19 2019 +0100 @@ -118,9 +118,9 @@ and indep_setD_ev2: "B \ events" using indep unfolding indep_set_def indep_sets_def UNIV_bool by auto -lemma (in prob_space) indep_sets_dynkin: +lemma (in prob_space) indep_sets_Dynkin: assumes indep: "indep_sets F I" - shows "indep_sets (\i. dynkin (space M) (F i)) I" + shows "indep_sets (\i. Dynkin (space M) (F i)) I" (is "indep_sets ?F I") proof (subst indep_sets_finite_index_sets, intro allI impI ballI) fix J assume "finite J" "J \ I" "J \ {}" @@ -178,8 +178,8 @@ qed qed } note indep_sets_insert = this - have "dynkin_system (space M) ?D" - proof (rule dynkin_systemI', simp_all cong del: indep_sets_cong, safe) + have "Dynkin_system (space M) ?D" + proof (rule Dynkin_systemI', simp_all cong del: indep_sets_cong, safe) show "indep_sets (G(j := {{}})) K" by (rule indep_sets_insert) auto next @@ -249,8 +249,8 @@ by (auto dest!: sums_unique) qed (insert F, auto) qed (insert sets.sets_into_space, auto) - then have mono: "dynkin (space M) (G j) \ {E \ events. indep_sets (G(j := {E})) K}" - proof (rule dynkin_system.dynkin_subset, safe) + then have mono: "Dynkin (space M) (G j) \ {E \ events. indep_sets (G(j := {E})) K}" + proof (rule Dynkin_system.Dynkin_subset, safe) fix X assume "X \ G j" then show "X \ events" using G \j \ K\ by auto from \indep_sets G K\ @@ -276,7 +276,7 @@ by (intro indep_setsD[OF G(1)]) auto qed qed - then have "indep_sets (G(j := dynkin (space M) (G j))) K" + then have "indep_sets (G(j := Dynkin (space M) (G j))) K" by (rule indep_sets_mono_sets) (insert mono, auto) then show ?case by (rule indep_sets_mono_sets) (insert \j \ K\ \j \ J\, auto simp: G_def) @@ -291,9 +291,9 @@ assumes stable: "\i. i \ I \ Int_stable (F i)" shows "indep_sets (\i. sigma_sets (space M) (F i)) I" proof - - from indep_sets_dynkin[OF indep] + from indep_sets_Dynkin[OF indep] show ?thesis - proof (rule indep_sets_mono_sets, subst sigma_eq_dynkin, simp_all add: stable) + proof (rule indep_sets_mono_sets, subst sigma_eq_Dynkin, simp_all add: stable) fix i assume "i \ I" with indep have "F i \ events" by (auto simp: indep_sets_def) with sets.sets_into_space show "F i \ Pow (space M)" by auto @@ -662,8 +662,8 @@ have X_in: "X \ events" using tail_events_sets A X by auto - interpret D: dynkin_system "space M" ?D - proof (rule dynkin_systemI) + interpret D: Dynkin_system "space M" ?D + proof (rule Dynkin_systemI) fix D assume "D \ ?D" then show "D \ space M" using sets.sets_into_space by auto next @@ -739,8 +739,8 @@ by (intro sigma_sets_subseteq UN_mono) auto then have "tail_events A \ sigma_sets (space M) ?A" unfolding tail_events_def by auto } - also have "sigma_sets (space M) ?A = dynkin (space M) ?A" - proof (rule sigma_eq_dynkin) + also have "sigma_sets (space M) ?A = Dynkin (space M) ?A" + proof (rule sigma_eq_Dynkin) { fix B n assume "B \ sigma_sets (space M) (\m\{..n}. A m)" then have "B \ space M" by induct (insert A sets.sets_into_space[of _ M], auto) } @@ -763,8 +763,8 @@ then show "a \ b \ (\n. sigma_sets (space M) (\i\{..n}. A i))" by auto qed qed - also have "dynkin (space M) ?A \ ?D" - using \?A \ ?D\ by (auto intro!: D.dynkin_subset) + also have "Dynkin (space M) ?A \ ?D" + using \?A \ ?D\ by (auto intro!: D.Dynkin_subset) finally show ?thesis by auto qed diff -r de09a7261120 -r 2d88bf80c84f src/HOL/ROOT --- a/src/HOL/ROOT Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/ROOT Tue Jan 01 18:33:19 2019 +0100 @@ -65,7 +65,6 @@ "HOL-Library" "HOL-Computational_Algebra" theories - L2_Norm Analysis document_files "root.tex" diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Set.thy --- a/src/HOL/Set.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Set.thy Tue Jan 01 18:33:19 2019 +0100 @@ -426,7 +426,7 @@ (\x\A. P x) \ (\x\B. Q x)" by (simp add: Ball_def) -lemma ball_cong_strong [cong]: +lemma ball_cong_simp [cong]: "\ A = B; \x. x \ B =simp=> P x \ Q x \ \ (\x\A. P x) \ (\x\B. Q x)" by (simp add: simp_implies_def Ball_def) @@ -436,7 +436,7 @@ (\x\A. P x) \ (\x\B. Q x)" by (simp add: Bex_def cong: conj_cong) -lemma bex_cong_strong [cong]: +lemma bex_cong_simp [cong]: "\ A = B; \x. x \ B =simp=> P x \ Q x \ \ (\x\A. P x) \ (\x\B. Q x)" by (simp add: simp_implies_def Bex_def cong: conj_cong) @@ -939,7 +939,7 @@ lemma image_cong: "\ M = N; \x. x \ N \ f x = g x \ \ f ` M = g ` N" by (simp add: image_def) -lemma image_cong_strong: "\ M = N; \x. x \ N =simp=> f x = g x\ \ f ` M = g ` N" +lemma image_cong_simp: "\ M = N; \x. x \ N =simp=> f x = g x\ \ f ` M = g ` N" by (simp add: image_def simp_implies_def) lemma image_Int_subset: "f ` (A \ B) \ f ` A \ f ` B" diff -r de09a7261120 -r 2d88bf80c84f src/HOL/Topological_Spaces.thy --- a/src/HOL/Topological_Spaces.thy Tue Jan 01 17:04:53 2019 +0100 +++ b/src/HOL/Topological_Spaces.thy Tue Jan 01 18:33:19 2019 +0100 @@ -1882,7 +1882,7 @@ unfolding continuous_on_def by (intro ball_cong filterlim_cong) (auto simp: eventually_at_filter) -lemma continuous_on_cong_strong: +lemma continuous_on_cong_simp: "s = t \ (\x. x \ t =simp=> f x = g x) \ continuous_on s f \ continuous_on t g" unfolding simp_implies_def by (rule continuous_on_cong) diff -r de09a7261120 -r 2d88bf80c84f src/Pure/General/path.ML --- a/src/Pure/General/path.ML Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/General/path.ML Tue Jan 01 18:33:19 2019 +0100 @@ -51,18 +51,20 @@ local -fun err_elem msg s = error (msg ^ " path element specification " ^ quote s); +fun err_elem msg s = error (msg ^ " path element " ^ quote s); -fun check_elem s = - if s = "" orelse s = "~" orelse s = "~~" then err_elem "Illegal" s +val illegal_elem = ["", "~", "~~", ".", ".."]; +val illegal_char = ["/", "\\", "$", ":", "\"", "'", "<", ">", "|", "?", "*"]; + +val check_elem = tap (fn s => + if member (op =) illegal_elem s then err_elem "Illegal" s else - let - fun check c = - if exists_string (fn c' => c = c') s then - err_elem ("Illegal character " ^ quote c ^ " in") s - else (); - val _ = List.app check ["/", "\\", "$", ":", "\"", "'"]; - in s end; + (s, ()) |-> fold_string (fn c => fn () => + if ord c < 32 then + err_elem ("Illegal control character " ^ string_of_int (ord c) ^ " in") s + else if member (op =) illegal_char c then + err_elem ("Illegal character " ^ quote c ^ " in") s + else ())); in diff -r de09a7261120 -r 2d88bf80c84f src/Pure/General/path.scala --- a/src/Pure/General/path.scala Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/General/path.scala Tue Jan 01 18:33:19 2019 +0100 @@ -24,14 +24,20 @@ private case object Parent extends Elem private def err_elem(msg: String, s: String): Nothing = - error(msg + " path element specification " + quote(s)) + error(msg + " path element " + quote(s)) + + private val illegal_elem = Set("", "~", "~~", ".", "..") + private val illegal_char = "/\\$:\"'<>|?*" private def check_elem(s: String): String = - if (s == "" || s == "~" || s == "~~") err_elem("Illegal", s) + if (illegal_elem.contains(s)) err_elem("Illegal", s) else { - "/\\$:\"'".iterator.foreach(c => - if (s.iterator.contains(c)) - err_elem("Illegal character " + quote(c.toString) + " in", s)) + for (c <- s) { + if (c.toInt < 32) + err_elem("Illegal control character " + c.toInt + " in", s) + if (illegal_char.contains(c)) + err_elem("Illegal character " + quote(c.toString) + " in", s) + } s } @@ -112,6 +118,17 @@ /* encode */ val encode: XML.Encode.T[Path] = (path => XML.Encode.string(path.implode)) + + + /* reserved names */ + + private val reserved_windows: Set[String] = + Set("CON", "PRN", "AUX", "NUL", + "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", + "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9") + + def is_reserved(name: String): Boolean = + Long_Name.explode(name).exists(a => reserved_windows.contains(Word.uppercase(a))) } diff -r de09a7261120 -r 2d88bf80c84f src/Pure/Isar/token.scala --- a/src/Pure/Isar/token.scala Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/Isar/token.scala Tue Jan 01 18:33:19 2019 +0100 @@ -328,6 +328,8 @@ def is_system_name: Boolean = { val s = content - is_name && Path.is_wellformed(s) && !s.exists(Symbol.is_ascii_blank(_)) + is_name && Path.is_wellformed(s) && + !s.exists(Symbol.is_ascii_blank(_)) && + !Path.is_reserved(s) } } diff -r de09a7261120 -r 2d88bf80c84f src/Pure/PIDE/document.scala --- a/src/Pure/PIDE/document.scala Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/PIDE/document.scala Tue Jan 01 18:33:19 2019 +0100 @@ -293,6 +293,11 @@ def has_header: Boolean = header != Node.no_header + override def toString: String = + if (is_empty) "empty" + else if (get_blob.isDefined) "blob" + else "node" + def commands: Linear_Set[Command] = _commands.commands def load_commands: List[Command] = _commands.load_commands def load_commands_changed(doc_blobs: Blobs): Boolean = @@ -528,6 +533,7 @@ def node_name: Node.Name def node: Node + def nodes: List[(Node.Name, Node)] def commands_loading: List[Command] def commands_loading_ranges(pred: Node.Name => Boolean): List[Text.Range] @@ -1024,6 +1030,10 @@ val node_name: Node.Name = name val node: Node = version.nodes(name) + def nodes: List[(Node.Name, Node)] = + (node_name :: node.load_commands.flatMap(_.blobs_names)). + map(name => (name, version.nodes(name))) + val commands_loading: List[Command] = if (node_name.is_theory) Nil else version.nodes.commands_loading(node_name) diff -r de09a7261120 -r 2d88bf80c84f src/Pure/PIDE/headless.scala --- a/src/Pure/PIDE/headless.scala Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/PIDE/headless.scala Tue Jan 01 18:33:19 2019 +0100 @@ -10,6 +10,7 @@ import java.io.{File => JFile} import scala.annotation.tailrec +import scala.collection.mutable object Headless @@ -161,13 +162,17 @@ commit_cleanup_delay: Time = default_commit_cleanup_delay, progress: Progress = No_Progress): Use_Theories_Result = { - val dep_theories = + val dependencies = { val import_names = theories.map(thy => resources.import_name(qualifier, master_directory(master_dir), thy) -> Position.none) - resources.dependencies(import_names, progress = progress).check_errors.theories + resources.dependencies(import_names, progress = progress).check_errors } + val dep_theories = dependencies.theories + val dep_files = + dependencies.loaded_files(false).flatMap(_._2). + map(path => Document.Node.Name(resources.append("", path))) val use_theories_state = Synchronized(Use_Theories_State()) @@ -258,7 +263,7 @@ try { session.commands_changed += consumer - resources.load_theories(session, id, dep_theories, progress) + resources.load_theories(session, id, dep_theories, dep_files, progress) use_theories_state.value.await_result check_progress.cancel } @@ -341,9 +346,51 @@ } sealed case class State( - required: Multi_Map[Document.Node.Name, UUID.T] = Multi_Map.empty, - theories: Map[Document.Node.Name, Theory] = Map.empty) + blobs: Map[Document.Node.Name, Document.Blob] = Map.empty, + theories: Map[Document.Node.Name, Theory] = Map.empty, + required: Multi_Map[Document.Node.Name, UUID.T] = Multi_Map.empty) { + /* blobs */ + + def doc_blobs: Document.Blobs = Document.Blobs(blobs) + + def update_blobs(names: List[Document.Node.Name]): (Document.Blobs, State) = + { + val new_blobs = + names.flatMap(name => + { + val bytes = Bytes.read(name.path) + def new_blob: Document.Blob = + { + val text = bytes.text + Document.Blob(bytes, text, Symbol.Text_Chunk(text), changed = true) + } + blobs.get(name) match { + case Some(blob) => if (blob.bytes == bytes) None else Some(name -> new_blob) + case None => Some(name -> new_blob) + } + }) + val blobs1 = (blobs /: new_blobs)(_ + _) + val blobs2 = (blobs /: new_blobs)({ case (map, (a, b)) => map + (a -> b.unchanged) }) + (Document.Blobs(blobs1), copy(blobs = blobs2)) + } + + def blob_edits(name: Document.Node.Name, old_blob: Option[Document.Blob]) + : List[Document.Edit_Text] = + { + val blob = blobs.getOrElse(name, error("Missing blob " + quote(name.toString))) + val text_edits = + old_blob match { + case None => List(Text.Edit.insert(0, blob.source)) + case Some(blob0) => Text.Edit.replace(0, blob0.source, blob.source) + } + if (text_edits.isEmpty) Nil + else List(name -> Document.Node.Blob(blob), name -> Document.Node.Edits(text_edits)) + } + + + /* theories */ + lazy val theory_graph: Graph[Document.Node.Name, Unit] = { val entries = @@ -389,7 +436,7 @@ val edits = theory1.node_edits(Some(theory)) (edits, (node_name, theory1)) } - session.update(Document.Blobs.empty, theory_edits.flatMap(_._1)) + session.update(doc_blobs, theory_edits.flatMap(_._1)) st1.update_theories(theory_edits.map(_._2)) } @@ -403,7 +450,7 @@ val (retained, purged) = all_nodes.partition(retain) val purge_edits = purged.flatMap(name => theories(name).purge_edits) - session.update(Document.Blobs.empty, purge_edits) + session.update(doc_blobs, purge_edits) ((purged, retained), remove_theories(purged)) } @@ -475,6 +522,7 @@ session: Session, id: UUID.T, dep_theories: List[Document.Node.Name], + dep_files: List[Document.Node.Name], progress: Progress) { val loaded_theories = @@ -494,7 +542,7 @@ state.change(st => { - val st1 = st.insert_required(id, dep_theories) + val (doc_blobs1, st1) = st.insert_required(id, dep_theories).update_blobs(dep_files) val theory_edits = for (theory <- loaded_theories) yield { @@ -503,7 +551,11 @@ val edits = theory1.node_edits(st1.theories.get(node_name)) (edits, (node_name, theory1)) } - session.update(Document.Blobs.empty, theory_edits.flatMap(_._1)) + val file_edits = + for { node_name <- dep_files if doc_blobs1.changed(node_name) } + yield st1.blob_edits(node_name, st.blobs.get(node_name)) + + session.update(doc_blobs1, theory_edits.flatMap(_._1) ::: file_edits.flatten) st1.update_theories(theory_edits.map(_._2)) }) } diff -r de09a7261120 -r 2d88bf80c84f src/Pure/PIDE/markup.ML --- a/src/Pure/PIDE/markup.ML Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/PIDE/markup.ML Tue Jan 01 18:33:19 2019 +0100 @@ -45,6 +45,7 @@ val refN: string val completionN: string val completion: T val no_completionN: string val no_completion: T + val updateN: string val update: T val lineN: string val end_lineN: string val offsetN: string @@ -336,6 +337,8 @@ val (completionN, completion) = markup_elem "completion"; val (no_completionN, no_completion) = markup_elem "no_completion"; +val (updateN, update) = markup_elem "update"; + (* position *) diff -r de09a7261120 -r 2d88bf80c84f src/Pure/PIDE/markup.scala --- a/src/Pure/PIDE/markup.scala Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/PIDE/markup.scala Tue Jan 01 18:33:19 2019 +0100 @@ -115,6 +115,8 @@ val COMPLETION = "completion" val NO_COMPLETION = "no_completion" + val UPDATE = "update" + /* position */ diff -r de09a7261120 -r 2d88bf80c84f src/Pure/PIDE/resources.scala --- a/src/Pure/PIDE/resources.scala Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/PIDE/resources.scala Tue Jan 01 18:33:19 2019 +0100 @@ -90,11 +90,12 @@ } } - def pure_files(syntax: Outer_Syntax, dir: Path): List[Path] = + def pure_files(syntax: Outer_Syntax): List[Path] = { + val pure_dir = Path.explode("~~/src/Pure") val roots = for { (name, _) <- Thy_Header.ml_roots } - yield (dir + Path.explode(name)).expand + yield (pure_dir + Path.explode(name)).expand val files = for { (path, (_, theory)) <- roots zip Thy_Header.ml_roots @@ -344,11 +345,20 @@ graph2.map_node(name, _ => syntax) }) - def loaded_files: List[(String, List[Path])] = + def loaded_files(pure: Boolean): List[(String, List[Path])] = { - theories.map(_.theory) zip - Par_List.map((e: () => List[Path]) => e(), - theories.map(name => resources.loaded_files(loaded_theories.get_node(name.theory), name))) + val loaded_files = + theories.map(_.theory) zip + Par_List.map((e: () => List[Path]) => e(), + theories.map(name => + resources.loaded_files(loaded_theories.get_node(name.theory), name))) + + if (pure) { + val pure_files = resources.pure_files(overall_syntax) + loaded_files.map({ case (name, files) => + (name, if (name == Thy_Header.PURE) pure_files ::: files else files) }) + } + else loaded_files } def imported_files: List[Path] = diff -r de09a7261120 -r 2d88bf80c84f src/Pure/System/isabelle_tool.scala --- a/src/Pure/System/isabelle_tool.scala Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/System/isabelle_tool.scala Tue Jan 01 18:33:19 2019 +0100 @@ -155,6 +155,7 @@ Present.isabelle_tool, Profiling_Report.isabelle_tool, Server.isabelle_tool, + Update.isabelle_tool, Update_Cartouches.isabelle_tool, Update_Comments.isabelle_tool, Update_Header.isabelle_tool, diff -r de09a7261120 -r 2d88bf80c84f src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/Thy/sessions.scala Tue Jan 01 18:33:19 2019 +0100 @@ -303,14 +303,7 @@ val theory_files = dependencies.theories.map(_.path) val loaded_files = - if (inlined_files) { - if (Sessions.is_pure(info.name)) { - val pure_files = resources.pure_files(overall_syntax, info.dir) - dependencies.loaded_files.map({ case (name, files) => - (name, if (name == Thy_Header.PURE) pure_files ::: files else files) }) - } - else dependencies.loaded_files - } + if (inlined_files) dependencies.loaded_files(Sessions.is_pure(info.name)) else Nil val session_files = diff -r de09a7261120 -r 2d88bf80c84f src/Pure/Thy/thy_syntax.scala --- a/src/Pure/Thy/thy_syntax.scala Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/Thy/thy_syntax.scala Tue Jan 01 18:33:19 2019 +0100 @@ -301,7 +301,7 @@ { val (syntax_changed, nodes0, doc_edits0) = header_edits(resources, previous, edits) - def get_blob(name: Document.Node.Name) = + def get_blob(name: Document.Node.Name): Option[Document.Blob] = doc_blobs.get(name) orElse previous.nodes(name).get_blob def can_import(name: Document.Node.Name): Boolean = diff -r de09a7261120 -r 2d88bf80c84f src/Pure/Tools/update.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/Tools/update.scala Tue Jan 01 18:33:19 2019 +0100 @@ -0,0 +1,132 @@ +/* Title: Pure/Tools/update.scala + Author: Makarius + +Update theory sources based on PIDE markup. +*/ + +package isabelle + + +object Update +{ + def update(options: Options, logic: String, + progress: Progress = No_Progress, + log: Logger = No_Logger, + dirs: List[Path] = Nil, + select_dirs: List[Path] = Nil, + system_mode: Boolean = false, + selection: Sessions.Selection = Sessions.Selection.empty) + { + Build.build_logic(options, logic, build_heap = true, progress = progress, + dirs = dirs ::: select_dirs, system_mode = system_mode, strict = true) + + val dump_options = Dump.make_options(options) + + val deps = + Dump.dependencies(dump_options, progress = progress, + dirs = dirs, select_dirs = select_dirs, selection = selection)._1 + + val resources = + Headless.Resources.make(dump_options, logic, progress = progress, log = log, + session_dirs = dirs ::: select_dirs, + include_sessions = deps.sessions_structure.imports_topological_order) + + def update_xml(xml: XML.Body): XML.Body = + xml flatMap { + case XML.Wrapped_Elem(markup, body1, body2) => + if (markup.name == Markup.UPDATE) update_xml(body1) else update_xml(body2) + case XML.Elem(_, body) => update_xml(body) + case t => List(t) + } + + Dump.session(deps, resources, progress = progress, + process_theory = + (deps: Sessions.Deps, snapshot: Document.Snapshot, status: Document_Status.Node_Status) => + { + for ((node_name, node) <- snapshot.nodes) { + val xml = + snapshot.state.markup_to_XML(snapshot.version, node_name, + Text.Range.full, Markup.Elements(Markup.UPDATE)) + + val source1 = Symbol.encode(XML.content(update_xml(xml))) + if (source1 != Symbol.encode(node.source)) { + progress.echo("Updating " + node_name.path) + File.write(node_name.path, source1) + } + } + }) + } + + + /* Isabelle tool wrapper */ + + val isabelle_tool = + Isabelle_Tool("update", "update theory sources based on PIDE markup", args => + { + var base_sessions: List[String] = Nil + var select_dirs: List[Path] = Nil + var requirements = false + var exclude_session_groups: List[String] = Nil + var all_sessions = false + var dirs: List[Path] = Nil + var session_groups: List[String] = Nil + var logic = Isabelle_System.getenv("ISABELLE_LOGIC") + var options = Options.init() + var system_mode = false + var verbose = false + var exclude_sessions: List[String] = Nil + + val getopts = Getopts(""" +Usage: isabelle update [OPTIONS] [SESSIONS ...] + + Options are: + -B NAME include session NAME and all descendants + -D DIR include session directory and select its sessions + -R operate on requirements of selected sessions + -X NAME exclude sessions from group NAME and all descendants + -a select all sessions + -d DIR include session directory + -g NAME select session group NAME + -l NAME logic session name (default ISABELLE_LOGIC=""" + quote(logic) + """) + -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) + -s system build mode for logic image + -u OPT overide update option: shortcut for "-o update_OPT" + -v verbose + -x NAME exclude session NAME and all descendants + + Update theory sources based on PIDE markup. +""", + "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), + "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), + "R" -> (_ => requirements = true), + "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), + "a" -> (_ => all_sessions = true), + "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), + "g:" -> (arg => session_groups = session_groups ::: List(arg)), + "l:" -> (arg => logic = arg), + "o:" -> (arg => options = options + arg), + "s" -> (_ => system_mode = true), + "u:" -> (arg => options = options + ("update_" + arg)), + "v" -> (_ => verbose = true), + "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) + + val sessions = getopts(args) + + val progress = new Console_Progress(verbose = verbose) + + progress.interrupt_handler { + update(options, logic, + progress = progress, + dirs = dirs, + select_dirs = select_dirs, + selection = Sessions.Selection( + requirements = requirements, + all_sessions = all_sessions, + base_sessions = base_sessions, + exclude_session_groups = exclude_session_groups, + exclude_sessions = exclude_sessions, + session_groups = session_groups, + sessions = sessions)) + } + }) +} diff -r de09a7261120 -r 2d88bf80c84f src/Pure/build-jars --- a/src/Pure/build-jars Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Pure/build-jars Tue Jan 01 18:33:19 2019 +0100 @@ -160,6 +160,7 @@ Tools/simplifier_trace.scala Tools/spell_checker.scala Tools/task_statistics.scala + Tools/update.scala Tools/update_cartouches.scala Tools/update_comments.scala Tools/update_header.scala diff -r de09a7261120 -r 2d88bf80c84f src/Tools/VSCode/src/document_model.scala --- a/src/Tools/VSCode/src/document_model.scala Tue Jan 01 17:04:53 2019 +0100 +++ b/src/Tools/VSCode/src/document_model.scala Tue Jan 01 18:33:19 2019 +0100 @@ -144,7 +144,7 @@ def get_blob: Option[Document.Blob] = if (is_theory) None - else Some((Document.Blob(content.bytes, content.text, content.chunk, pending_edits.nonEmpty))) + else Some(Document.Blob(content.bytes, content.text, content.chunk, pending_edits.nonEmpty)) /* bibtex entries */