# HG changeset patch # User wenzelm # Date 1413921345 -7200 # Node ID e55fe82f3803ba8b64f7298a7c0d3b1a1a078149 # Parent cb9d84d3e7f270c8541484cc248abfa08a9a0e8a# Parent 790ff9eb2578dd0077290540aab50147c945a018 merged diff -r cb9d84d3e7f2 -r e55fe82f3803 NEWS --- a/NEWS Tue Oct 21 21:10:44 2014 +0200 +++ b/NEWS Tue Oct 21 21:55:45 2014 +0200 @@ -20,6 +20,10 @@ * Improved folding mode "isabelle" based on Isar syntax. Alternatively, the "sidekick" mode may be used for document structure. +* Extended bracket matching based on Isar language structure. System +option jedit_structure_limit determines maximum number of lines to +scan in the buffer. + * Support for BibTeX files: context menu, context-sensitive token marker, SideKick parser. diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Doc/Implementation/Prelim.thy --- a/src/Doc/Implementation/Prelim.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Doc/Implementation/Prelim.thy Tue Oct 21 21:55:45 2014 +0200 @@ -1048,10 +1048,9 @@ text \\medskip That position can be also printed in a message as follows:\ -ML_command \ - writeln - ("Look here" ^ Position.here (Binding.pos_of @{binding here})) -\ +ML_command + \writeln + ("Look here" ^ Position.here (Binding.pos_of @{binding here}))\ text \This illustrates a key virtue of formalized bindings as opposed to raw specifications of base names: the system can use this @@ -1062,8 +1061,6 @@ directly, which is occasionally useful for experimentation and diagnostic purposes:\ -ML_command \ - warning ("Look here" ^ Position.here @{here}) -\ +ML_command \warning ("Look here" ^ Position.here @{here})\ end diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Hahn_Banach/Bounds.thy --- a/src/HOL/Hahn_Banach/Bounds.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Hahn_Banach/Bounds.thy Tue Oct 21 21:55:45 2014 +0200 @@ -2,7 +2,7 @@ Author: Gertrud Bauer, TU Munich *) -header {* Bounds *} +header \Bounds\ theory Bounds imports Main "~~/src/HOL/Library/ContNotDenum" @@ -15,12 +15,9 @@ lemmas [elim?] = lub.least lub.upper -definition the_lub :: "'a::order set \ 'a" +definition the_lub :: "'a::order set \ 'a" ("\_" [90] 90) where "the_lub A = The (lub A)" -notation (xsymbols) - the_lub ("\_" [90] 90) - lemma the_lub_equality [elim?]: assumes "lub A x" shows "\A = (x::'a::order)" @@ -28,7 +25,7 @@ interpret lub A x by fact show ?thesis proof (unfold the_lub_def) - from `lub A x` show "The (lub A) = x" + from \lub A x\ show "The (lub A) = x" proof fix x' assume lub': "lub A x'" show "x' = x" diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Hahn_Banach/Function_Norm.thy --- a/src/HOL/Hahn_Banach/Function_Norm.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Hahn_Banach/Function_Norm.thy Tue Oct 21 21:55:45 2014 +0200 @@ -2,15 +2,15 @@ Author: Gertrud Bauer, TU Munich *) -header {* The norm of a function *} +header \The norm of a function\ theory Function_Norm imports Normed_Space Function_Order begin -subsection {* Continuous linear forms*} +subsection \Continuous linear forms\ -text {* +text \ A linear form @{text f} on a normed vector space @{text "(V, \\\)"} is \emph{continuous}, iff it is bounded, i.e. \begin{center} @@ -19,7 +19,7 @@ In our application no other functions than linear forms are considered, so we can define continuous linear forms as bounded linear forms: -*} +\ locale continuous = linearform + fixes norm :: "_ \ real" ("\_\") @@ -39,9 +39,9 @@ qed -subsection {* The norm of a linear form *} +subsection \The norm of a linear form\ -text {* +text \ The least real number @{text c} for which holds \begin{center} @{text "\x \ V. \f x\ \ c \ \x\"} @@ -70,7 +70,7 @@ @{text fn_norm} is equal to the supremum of @{text B}, if the supremum exists (otherwise it is undefined). -*} +\ locale fn_norm = fixes norm :: "_ \ real" ("\_\") @@ -83,34 +83,34 @@ lemma (in fn_norm) B_not_empty [intro]: "0 \ B V f" by (simp add: B_def) -text {* +text \ The following lemma states that every continuous linear form on a normed space @{text "(V, \\\)"} has a function norm. -*} +\ lemma (in normed_vectorspace_with_fn_norm) fn_norm_works: assumes "continuous V f norm" shows "lub (B V f) (\f\\V)" proof - interpret continuous V f norm by fact - txt {* The existence of the supremum is shown using the + txt \The existence of the supremum is shown using the completeness of the reals. Completeness means, that every - non-empty bounded set of reals has a supremum. *} + non-empty bounded set of reals has a supremum.\ have "\a. lub (B V f) a" proof (rule real_complete) - txt {* First we have to show that @{text B} is non-empty: *} + txt \First we have to show that @{text B} is non-empty:\ have "0 \ B V f" .. then show "\x. x \ B V f" .. - txt {* Then we have to show that @{text B} is bounded: *} + txt \Then we have to show that @{text B} is bounded:\ show "\c. \y \ B V f. y \ c" proof - - txt {* We know that @{text f} is bounded by some value @{text c}. *} + txt \We know that @{text f} is bounded by some value @{text c}.\ from bounded obtain c where c: "\x \ V. \f x\ \ c * \x\" .. - txt {* To prove the thesis, we have to show that there is some + txt \To prove the thesis, we have to show that there is some @{text b}, such that @{text "y \ b"} for all @{text "y \ - B"}. Due to the definition of @{text B} there are two cases. *} + B"}. Due to the definition of @{text B} there are two cases.\ def b \ "max c 0" have "\y \ B V f. y \ b" @@ -121,16 +121,16 @@ assume "y = 0" then show ?thesis unfolding b_def by arith next - txt {* The second case is @{text "y = \f x\ / \x\"} for some - @{text "x \ V"} with @{text "x \ 0"}. *} + txt \The second case is @{text "y = \f x\ / \x\"} for some + @{text "x \ V"} with @{text "x \ 0"}.\ assume "y \ 0" with y obtain x where y_rep: "y = \f x\ * inverse \x\" and x: "x \ V" and neq: "x \ 0" by (auto simp add: B_def divide_inverse) from x neq have gt: "0 < \x\" .. - txt {* The thesis follows by a short calculation using the - fact that @{text f} is bounded. *} + txt \The thesis follows by a short calculation using the + fact that @{text f} is bounded.\ note y_rep also have "\f x\ * inverse \x\ \ (c * \x\) * inverse \x\" @@ -162,7 +162,7 @@ proof - interpret continuous V f norm by fact have "lub (B V f) (\f\\V)" - using `continuous V f norm` by (rule fn_norm_works) + using \continuous V f norm\ by (rule fn_norm_works) from this and b show ?thesis .. qed @@ -173,32 +173,32 @@ proof - interpret continuous V f norm by fact have "lub (B V f) (\f\\V)" - using `continuous V f norm` by (rule fn_norm_works) + using \continuous V f norm\ by (rule fn_norm_works) from this and b show ?thesis .. qed -text {* The norm of a continuous function is always @{text "\ 0"}. *} +text \The norm of a continuous function is always @{text "\ 0"}.\ lemma (in normed_vectorspace_with_fn_norm) fn_norm_ge_zero [iff]: assumes "continuous V f norm" shows "0 \ \f\\V" proof - interpret continuous V f norm by fact - txt {* The function norm is defined as the supremum of @{text B}. + txt \The function norm is defined as the supremum of @{text B}. So it is @{text "\ 0"} if all elements in @{text B} are @{text "\ - 0"}, provided the supremum exists and @{text B} is not empty. *} + 0"}, provided the supremum exists and @{text B} is not empty.\ have "lub (B V f) (\f\\V)" - using `continuous V f norm` by (rule fn_norm_works) + using \continuous V f norm\ by (rule fn_norm_works) moreover have "0 \ B V f" .. ultimately show ?thesis .. qed -text {* +text \ \medskip The fundamental property of function norms is: \begin{center} @{text "\f x\ \ \f\ \ \x\"} \end{center} -*} +\ lemma (in normed_vectorspace_with_fn_norm) fn_norm_le_cong: assumes "continuous V f norm" "linearform V f" @@ -214,7 +214,7 @@ also have "f 0 = 0" by rule unfold_locales also have "\\\ = 0" by simp also have a: "0 \ \f\\V" - using `continuous V f norm` by (rule fn_norm_ge_zero) + using \continuous V f norm\ by (rule fn_norm_ge_zero) from x have "0 \ norm x" .. with a have "0 \ \f\\V * \x\" by (simp add: zero_le_mult_iff) finally show "\f x\ \ \f\\V * \x\" . @@ -227,20 +227,20 @@ from x show "0 \ \x\" .. from x and neq have "\f x\ * inverse \x\ \ B V f" by (auto simp add: B_def divide_inverse) - with `continuous V f norm` show "\f x\ * inverse \x\ \ \f\\V" + with \continuous V f norm\ show "\f x\ * inverse \x\ \ \f\\V" by (rule fn_norm_ub) qed finally show ?thesis . qed qed -text {* +text \ \medskip The function norm is the least positive real number for which the following inequation holds: \begin{center} @{text "\f x\ \ c \ \x\"} \end{center} -*} +\ lemma (in normed_vectorspace_with_fn_norm) fn_norm_least [intro?]: assumes "continuous V f norm" @@ -274,7 +274,7 @@ qed finally show ?thesis . qed - qed (insert `continuous V f norm`, simp_all add: continuous_def) + qed (insert \continuous V f norm\, simp_all add: continuous_def) qed end diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Hahn_Banach/Function_Order.thy --- a/src/HOL/Hahn_Banach/Function_Order.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Hahn_Banach/Function_Order.thy Tue Oct 21 21:55:45 2014 +0200 @@ -2,15 +2,15 @@ Author: Gertrud Bauer, TU Munich *) -header {* An order on functions *} +header \An order on functions\ theory Function_Order imports Subspace Linearform begin -subsection {* The graph of a function *} +subsection \The graph of a function\ -text {* +text \ We define the \emph{graph} of a (real) function @{text f} with domain @{text F} as the set \begin{center} @@ -19,7 +19,7 @@ So we are modeling partial functions by specifying the domain and the mapping function. We use the term ``function'' also for its graph. -*} +\ type_synonym 'a graph = "('a \ real) set" @@ -38,12 +38,12 @@ using assms unfolding graph_def by blast -subsection {* Functions ordered by domain extension *} +subsection \Functions ordered by domain extension\ -text {* +text \ A function @{text h'} is an extension of @{text h}, iff the graph of @{text h} is a subset of the graph of @{text h'}. -*} +\ lemma graph_extI: "(\x. x \ H \ h x = h' x) \ H \ H' @@ -57,12 +57,12 @@ unfolding graph_def by blast -subsection {* Domain and function of a graph *} +subsection \Domain and function of a graph\ -text {* +text \ The inverse functions to @{text graph} are @{text domain} and @{text funct}. -*} +\ definition domain :: "'a graph \ 'a set" where "domain g = {x. \y. (x, y) \ g}" @@ -70,10 +70,10 @@ definition funct :: "'a graph \ ('a \ real)" where "funct g = (\x. (SOME y. (x, y) \ g))" -text {* +text \ The following lemma states that @{text g} is the graph of a function if the relation induced by @{text g} is unique. -*} +\ lemma graph_domain_funct: assumes uniq: "\x y z. (x, y) \ g \ (x, z) \ g \ z = y" @@ -91,18 +91,18 @@ qed -subsection {* Norm-preserving extensions of a function *} +subsection \Norm-preserving extensions of a function\ -text {* +text \ Given a linear form @{text f} on the space @{text F} and a seminorm @{text p} on @{text E}. The set of all linear extensions of @{text f}, to superspaces @{text H} of @{text F}, which are bounded by @{text p}, is defined as follows. -*} +\ definition norm_pres_extensions :: - "'a::{plus, minus, uminus, zero} set \ ('a \ real) \ 'a set \ ('a \ real) + "'a::{plus,minus,uminus,zero} set \ ('a \ real) \ 'a set \ ('a \ real) \ 'a graph set" where "norm_pres_extensions E p F f diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Hahn_Banach/Hahn_Banach.thy --- a/src/HOL/Hahn_Banach/Hahn_Banach.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Hahn_Banach/Hahn_Banach.thy Tue Oct 21 21:55:45 2014 +0200 @@ -2,20 +2,20 @@ Author: Gertrud Bauer, TU Munich *) -header {* The Hahn-Banach Theorem *} +header \The Hahn-Banach Theorem\ theory Hahn_Banach imports Hahn_Banach_Lemmas begin -text {* +text \ We present the proof of two different versions of the Hahn-Banach Theorem, closely following @{cite \\S36\ "Heuser:1986"}. -*} +\ -subsection {* The Hahn-Banach Theorem for vector spaces *} +subsection \The Hahn-Banach Theorem for vector spaces\ -text {* +text \ \textbf{Hahn-Banach Theorem.} Let @{text F} be a subspace of a real vector space @{text E}, let @{text p} be a semi-norm on @{text E}, and @{text f} be a linear form defined on @{text F} such that @{text @@ -51,16 +51,16 @@ \end{itemize} \end{enumerate} -*} +\ theorem Hahn_Banach: assumes E: "vectorspace E" and "subspace F E" and "seminorm E p" and "linearform F f" assumes fp: "\x \ F. f x \ p x" shows "\h. linearform E h \ (\x \ F. h x = f x) \ (\x \ E. h x \ p x)" - -- {* Let @{text E} be a vector space, @{text F} a subspace of @{text E}, @{text p} a seminorm on @{text E}, *} - -- {* and @{text f} a linear form on @{text F} such that @{text f} is bounded by @{text p}, *} - -- {* then @{text f} can be extended to a linear form @{text h} on @{text E} in a norm-preserving way. \skp *} + -- \Let @{text E} be a vector space, @{text F} a subspace of @{text E}, @{text p} a seminorm on @{text E},\ + -- \and @{text f} a linear form on @{text F} such that @{text f} is bounded by @{text p},\ + -- \then @{text f} can be extended to a linear form @{text h} on @{text E} in a norm-preserving way. \skp\ proof - interpret vectorspace E by fact interpret subspace F E by fact @@ -69,12 +69,12 @@ def M \ "norm_pres_extensions E p F f" then have M: "M = \" by (simp only:) from E have F: "vectorspace F" .. - note FE = `F \ E` + note FE = \F \ E\ { fix c assume cM: "c \ chains M" and ex: "\x. x \ c" have "\c \ M" - -- {* Show that every non-empty chain @{text c} of @{text M} has an upper bound in @{text M}: *} - -- {* @{text "\c"} is greater than any element of the chain @{text c}, so it suffices to show @{text "\c \ M"}. *} + -- \Show that every non-empty chain @{text c} of @{text M} has an upper bound in @{text M}:\ + -- \@{text "\c"} is greater than any element of the chain @{text c}, so it suffices to show @{text "\c \ M"}.\ unfolding M_def proof (rule norm_pres_extensionI) let ?H = "domain (\c)" @@ -104,9 +104,9 @@ qed } then have "\g \ M. \x \ M. g \ x \ x = g" - -- {* With Zorn's Lemma we can conclude that there is a maximal element in @{text M}. \skp *} + -- \With Zorn's Lemma we can conclude that there is a maximal element in @{text M}. \skp\ proof (rule Zorn's_Lemma) - -- {* We show that @{text M} is non-empty: *} + -- \We show that @{text M} is non-empty:\ show "graph F f \ M" unfolding M_def proof (rule norm_pres_extensionI2) @@ -125,18 +125,18 @@ and HE: "H \ E" and FH: "F \ H" and graphs: "graph F f \ graph H h" and hp: "\x \ H. h x \ p x" unfolding M_def .. - -- {* @{text g} is a norm-preserving extension of @{text f}, in other words: *} - -- {* @{text g} is the graph of some linear form @{text h} defined on a subspace @{text H} of @{text E}, *} - -- {* and @{text h} is an extension of @{text f} that is again bounded by @{text p}. \skp *} + -- \@{text g} is a norm-preserving extension of @{text f}, in other words:\ + -- \@{text g} is the graph of some linear form @{text h} defined on a subspace @{text H} of @{text E},\ + -- \and @{text h} is an extension of @{text f} that is again bounded by @{text p}. \skp\ from HE E have H: "vectorspace H" by (rule subspace.vectorspace) have HE_eq: "H = E" - -- {* We show that @{text h} is defined on whole @{text E} by classical contradiction. \skp *} + -- \We show that @{text h} is defined on whole @{text E} by classical contradiction. \skp\ proof (rule classical) assume neq: "H \ E" - -- {* Assume @{text h} is not defined on whole @{text E}. Then show that @{text h} can be extended *} - -- {* in a norm-preserving way to a function @{text h'} with the graph @{text g'}. \skp *} + -- \Assume @{text h} is not defined on whole @{text E}. Then show that @{text h} can be extended\ + -- \in a norm-preserving way to a function @{text h'} with the graph @{text g'}. \skp\ have "\g' \ M. g \ g' \ g \ g'" proof - from HE have "H \ E" .. @@ -147,12 +147,12 @@ proof assume "x' = 0" with H have "x' \ H" by (simp only: vectorspace.zero) - with `x' \ H` show False by contradiction + with \x' \ H\ show False by contradiction qed qed def H' \ "H + lin x'" - -- {* Define @{text H'} as the direct sum of @{text H} and the linear closure of @{text x'}. \skp *} + -- \Define @{text H'} as the direct sum of @{text H} and the linear closure of @{text x'}. \skp\ have HH': "H \ H'" proof (unfold H'_def) from x'E have "vectorspace (lin x')" .. @@ -162,9 +162,9 @@ obtain xi where xi: "\y \ H. - p (y + x') - h y \ xi \ xi \ p (y + x') - h y" - -- {* Pick a real number @{text \} that fulfills certain inequations; this will *} - -- {* be used to establish that @{text h'} is a norm-preserving extension of @{text h}. - \label{ex-xi-use}\skp *} + -- \Pick a real number @{text \} that fulfills certain inequations; this will\ + -- \be used to establish that @{text h'} is a norm-preserving extension of @{text h}. + \label{ex-xi-use}\skp\ proof - from H have "\xi. \y \ H. - p (y + x') - h y \ xi \ xi \ p (y + x') - h y" @@ -191,10 +191,10 @@ def h' \ "\x. let (y, a) = SOME (y, a). x = y + a \ x' \ y \ H in h y + a * xi" - -- {* Define the extension @{text h'} of @{text h} to @{text H'} using @{text \}. \skp *} + -- \Define the extension @{text h'} of @{text h} to @{text H'} using @{text \}. \skp\ have "g \ graph H' h' \ g \ graph H' h'" - -- {* @{text h'} is an extension of @{text h} \dots \skp *} + -- \@{text h'} is an extension of @{text h} \dots \skp\ proof show "g \ graph H' h'" proof - @@ -202,7 +202,7 @@ proof (rule graph_extI) fix t assume t: "t \ H" from E HE t have "(SOME (y, a). t = y + a \ x' \ y \ H) = (t, 0)" - using `x' \ H` `x' \ E` `x' \ 0` by (rule decomp_H'_H) + using \x' \ H\ \x' \ E\ \x' \ 0\ by (rule decomp_H'_H) with h'_def show "h t = h' t" by (simp add: Let_def) next from HH' show "H \ H'" .. @@ -225,18 +225,18 @@ then have "(x', h' x') \ graph H' h'" .. with eq have "(x', h' x') \ graph H h" by (simp only:) then have "x' \ H" .. - with `x' \ H` show False by contradiction + with \x' \ H\ show False by contradiction qed with g_rep show ?thesis by simp qed qed moreover have "graph H' h' \ M" - -- {* and @{text h'} is norm-preserving. \skp *} + -- \and @{text h'} is norm-preserving. \skp\ proof (unfold M_def) show "graph H' h' \ norm_pres_extensions E p F f" proof (rule norm_pres_extensionI2) show "linearform H' h'" - using h'_def H'_def HE linearform `x' \ H` `x' \ E` `x' \ 0` E + using h'_def H'_def HE linearform \x' \ H\ \x' \ E\ \x' \ 0\ E by (rule h'_lf) show "H' \ E" unfolding H'_def @@ -245,7 +245,7 @@ show "vectorspace E" by fact from x'E show "lin x' \ E" .. qed - from H `F \ H` HH' show FH': "F \ H'" + from H \F \ H\ HH' show FH': "F \ H'" by (rule vectorspace.subspace_trans) show "graph F f \ graph H' h'" proof (rule graph_extI) @@ -271,15 +271,15 @@ from FH' show "F \ H'" .. qed show "\x \ H'. h' x \ p x" - using h'_def H'_def `x' \ H` `x' \ E` `x' \ 0` E HE - `seminorm E p` linearform and hp xi + using h'_def H'_def \x' \ H\ \x' \ E\ \x' \ 0\ E HE + \seminorm E p\ linearform and hp xi by (rule h'_norm_pres) qed qed ultimately show ?thesis .. qed then have "\ (\x \ M. g \ x \ g = x)" by simp - -- {* So the graph @{text g} of @{text h} cannot be maximal. Contradiction! \skp *} + -- \So the graph @{text g} of @{text h} cannot be maximal. Contradiction! \skp\ with gx show "H = E" by contradiction qed @@ -297,9 +297,9 @@ qed -subsection {* Alternative formulation *} +subsection \Alternative formulation\ -text {* +text \ The following alternative formulation of the Hahn-Banach Theorem\label{abs-Hahn-Banach} uses the fact that for a real linear form @{text f} and a seminorm @{text p} the following inequations @@ -311,7 +311,7 @@ @{text "\x \ H. h x \ p x"} \\ \end{tabular} \end{center} -*} +\ theorem abs_Hahn_Banach: assumes E: "vectorspace E" and FE: "subspace F E" @@ -342,13 +342,13 @@ qed -subsection {* The Hahn-Banach Theorem for normed spaces *} +subsection \The Hahn-Banach Theorem for normed spaces\ -text {* +text \ Every continuous linear form @{text f} on a subspace @{text F} of a norm space @{text E}, can be extended to a continuous linear form @{text g} on @{text E} such that @{text "\f\ = \g\"}. -*} +\ theorem norm_Hahn_Banach: fixes V and norm ("\_\") @@ -375,23 +375,23 @@ have ge_zero: "0 \ \f\\F" by (rule normed_vectorspace_with_fn_norm.fn_norm_ge_zero [OF normed_vectorspace_with_fn_norm.intro, - OF F_norm `continuous F f norm` , folded B_def fn_norm_def]) - txt {* We define a function @{text p} on @{text E} as follows: - @{text "p x = \f\ \ \x\"} *} + OF F_norm \continuous F f norm\ , folded B_def fn_norm_def]) + txt \We define a function @{text p} on @{text E} as follows: + @{text "p x = \f\ \ \x\"}\ def p \ "\x. \f\\F * \x\" - txt {* @{text p} is a seminorm on @{text E}: *} + txt \@{text p} is a seminorm on @{text E}:\ have q: "seminorm E p" proof fix x y a assume x: "x \ E" and y: "y \ E" - txt {* @{text p} is positive definite: *} + txt \@{text p} is positive definite:\ have "0 \ \f\\F" by (rule ge_zero) moreover from x have "0 \ \x\" .. ultimately show "0 \ p x" by (simp add: p_def zero_le_mult_iff) - txt {* @{text p} is absolutely homogeneous: *} + txt \@{text p} is absolutely homogeneous:\ show "p (a \ x) = \a\ * p x" proof - @@ -402,7 +402,7 @@ finally show ?thesis . qed - txt {* Furthermore, @{text p} is subadditive: *} + txt \Furthermore, @{text p} is subadditive:\ show "p (x + y) \ p x + p y" proof - @@ -417,22 +417,22 @@ qed qed - txt {* @{text f} is bounded by @{text p}. *} + txt \@{text f} is bounded by @{text p}.\ have "\x \ F. \f x\ \ p x" proof fix x assume "x \ F" - with `continuous F f norm` and linearform + with \continuous F f norm\ and linearform show "\f x\ \ p x" unfolding p_def by (rule normed_vectorspace_with_fn_norm.fn_norm_le_cong [OF normed_vectorspace_with_fn_norm.intro, OF F_norm, folded B_def fn_norm_def]) qed - txt {* Using the fact that @{text p} is a seminorm and @{text f} is bounded + txt \Using the fact that @{text p} is a seminorm and @{text f} is bounded by @{text p} we can apply the Hahn-Banach Theorem for real vector spaces. So @{text f} can be extended in a norm-preserving way to - some function @{text g} on the whole vector space @{text E}. *} + some function @{text g} on the whole vector space @{text E}.\ with E FE linearform q obtain g where linearformE: "linearform E g" @@ -440,7 +440,7 @@ and b: "\x \ E. \g x\ \ p x" by (rule abs_Hahn_Banach [elim_format]) iprover - txt {* We furthermore have to show that @{text g} is also continuous: *} + txt \We furthermore have to show that @{text g} is also continuous:\ have g_cont: "continuous E g norm" using linearformE proof @@ -449,11 +449,11 @@ by (simp only: p_def) qed - txt {* To complete the proof, we show that @{text "\g\ = \f\"}. *} + txt \To complete the proof, we show that @{text "\g\ = \f\"}.\ have "\g\\E = \f\\F" proof (rule order_antisym) - txt {* + txt \ First we show @{text "\g\ \ \f\"}. The function norm @{text "\g\"} is defined as the smallest @{text "c \ \"} such that \begin{center} @@ -467,7 +467,7 @@ @{text "\x \ E. \g x\ \ \f\ \ \x\"} \end{tabular} \end{center} - *} +\ from g_cont _ ge_zero show "\g\\E \ \f\\F" @@ -477,7 +477,7 @@ by (simp only: p_def) qed - txt {* The other direction is achieved by a similar argument. *} + txt \The other direction is achieved by a similar argument.\ show "\f\\F \ \g\\E" proof (rule normed_vectorspace_with_fn_norm.fn_norm_least diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Hahn_Banach/Hahn_Banach_Ext_Lemmas.thy --- a/src/HOL/Hahn_Banach/Hahn_Banach_Ext_Lemmas.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Hahn_Banach/Hahn_Banach_Ext_Lemmas.thy Tue Oct 21 21:55:45 2014 +0200 @@ -2,13 +2,13 @@ Author: Gertrud Bauer, TU Munich *) -header {* Extending non-maximal functions *} +header \Extending non-maximal functions\ theory Hahn_Banach_Ext_Lemmas imports Function_Norm begin -text {* +text \ In this section the following context is presumed. Let @{text E} be a real vector space with a seminorm @{text q} on @{text E}. @{text F} is a subspace of @{text E} and @{text f} a linear function on @@ -38,7 +38,7 @@ @{text "\u \ F. \v \ F. a u \ b v"} \end{tabular} \end{center} -*} +\ lemma ex_xi: assumes "vectorspace F" @@ -46,9 +46,9 @@ shows "\xi::real. \y \ F. a y \ xi \ xi \ b y" proof - interpret vectorspace F by fact - txt {* From the completeness of the reals follows: + txt \From the completeness of the reals follows: The set @{text "S = {a u. u \ F}"} has a supremum, if it is - non-empty and has an upper bound. *} + non-empty and has an upper bound.\ let ?S = "{a u | u. u \ F}" have "\xi. lub ?S xi" @@ -81,11 +81,11 @@ } ultimately show "\xi. \y \ F. a y \ xi \ xi \ b y" by blast qed -text {* +text \ \medskip The function @{text h'} is defined as a @{text "h' x = h y + a \ \"} where @{text "x = y + a \ \"} is a linear extension of @{text h} to @{text H'}. -*} +\ lemma h'_lf: assumes h'_def: "h' \ \x. let (y, a) = @@ -101,10 +101,10 @@ interpret vectorspace E by fact show ?thesis proof - note E = `vectorspace E` + note E = \vectorspace E\ have H': "vectorspace H'" proof (unfold H'_def) - from `x0 \ E` + from \x0 \ E\ have "lin x0 \ E" .. with HE show "vectorspace (H + lin x0)" using E .. qed @@ -121,7 +121,7 @@ unfolding H'_def sum_def lin_def by blast have ya: "y1 + y2 = y \ a1 + a2 = a" using E HE _ y x0 - proof (rule decomp_H') txt_raw {* \label{decomp-H-use} *} + proof (rule decomp_H') txt_raw \\label{decomp-H-use}\ from HE y1 y2 show "y1 + y2 \ H" by (rule subspace.add_closed) from x0 and HE y y1 y2 @@ -188,8 +188,8 @@ qed qed -text {* \medskip The linear extension @{text h'} of @{text h} - is bounded by the seminorm @{text p}. *} +text \\medskip The linear extension @{text h'} of @{text h} + is bounded by the seminorm @{text p}.\ lemma h'_norm_pres: assumes h'_def: "h' \ \x. let (y, a) = @@ -229,8 +229,8 @@ also from x0 y' z have "p y = p (y + a \ x0)" by simp finally show ?thesis . next - txt {* In the case @{text "a < 0"}, we use @{text "a\<^sub>1"} - with @{text ya} taken as @{text "y / a"}: *} + txt \In the case @{text "a < 0"}, we use @{text "a\<^sub>1"} + with @{text ya} taken as @{text "y / a"}:\ assume lz: "a < 0" then have nz: "a \ 0" by simp from a1 ay have "- p (inverse a \ y + x0) - h (inverse a \ y) \ xi" .. @@ -251,8 +251,8 @@ finally have "a * xi \ p (y + a \ x0) - h y" . then show ?thesis by simp next - txt {* In the case @{text "a > 0"}, we use @{text "a\<^sub>2"} - with @{text ya} taken as @{text "y / a"}: *} + txt \In the case @{text "a > 0"}, we use @{text "a\<^sub>2"} + with @{text ya} taken as @{text "y / a"}:\ assume gz: "0 < a" then have nz: "a \ 0" by simp from a2 ay have "xi \ p (inverse a \ y + x0) - h (inverse a \ y)" .. diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Hahn_Banach/Hahn_Banach_Sup_Lemmas.thy --- a/src/HOL/Hahn_Banach/Hahn_Banach_Sup_Lemmas.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Hahn_Banach/Hahn_Banach_Sup_Lemmas.thy Tue Oct 21 21:55:45 2014 +0200 @@ -2,13 +2,13 @@ Author: Gertrud Bauer, TU Munich *) -header {* The supremum w.r.t.~the function order *} +header \The supremum w.r.t.~the function order\ theory Hahn_Banach_Sup_Lemmas imports Function_Norm Zorn_Lemma begin -text {* +text \ This section contains some lemmas that will be used in the proof of the Hahn-Banach Theorem. In this section the following context is presumed. Let @{text E} be a real vector space with a seminorm @@ -22,7 +22,7 @@ the function @{text f} and let @{text "graph H h"} be the supremum of @{text c}. Every element in @{text H} is member of one of the elements of the chain. -*} +\ lemmas [dest?] = chainsD lemmas chainsE2 [elim?] = chainsD2 [elim_format] @@ -53,13 +53,13 @@ ultimately show ?thesis using * by blast qed -text {* +text \ \medskip Let @{text c} be a chain of norm-preserving extensions of the function @{text f} and let @{text "graph H h"} be the supremum of @{text c}. Every element in the domain @{text H} of the supremum function is member of the domain @{text H'} of some function @{text h'}, such that @{text h} extends @{text h'}. -*} +\ lemma some_H'h': assumes M: "M = norm_pres_extensions E p F f" @@ -81,12 +81,12 @@ ultimately show ?thesis using * by blast qed -text {* +text \ \medskip Any two elements @{text x} and @{text y} in the domain @{text H} of the supremum function @{text h} are both in the domain @{text H'} of some function @{text h'}, such that @{text h} extends @{text h'}. -*} +\ lemma some_H'h'2: assumes M: "M = norm_pres_extensions E p F f" @@ -99,8 +99,8 @@ \ linearform H' h' \ H' \ E \ F \ H' \ graph F f \ graph H' h' \ (\x \ H'. h' x \ p x)" proof - - txt {* @{text y} is in the domain @{text H''} of some function @{text h''}, - such that @{text h} extends @{text h''}. *} + txt \@{text y} is in the domain @{text H''} of some function @{text h''}, + such that @{text h} extends @{text h''}.\ from M cM u and y obtain H' h' where y_hy: "(y, h y) \ graph H' h'" @@ -110,8 +110,8 @@ "graph F f \ graph H' h'" "\x \ H'. h' x \ p x" by (rule some_H'h't [elim_format]) blast - txt {* @{text x} is in the domain @{text H'} of some function @{text h'}, - such that @{text h} extends @{text h'}. *} + txt \@{text x} is in the domain @{text H'} of some function @{text h'}, + such that @{text h} extends @{text h'}.\ from M cM u and x obtain H'' h'' where x_hx: "(x, h x) \ graph H'' h''" @@ -121,10 +121,10 @@ "graph F f \ graph H'' h''" "\x \ H''. h'' x \ p x" by (rule some_H'h't [elim_format]) blast - txt {* Since both @{text h'} and @{text h''} are elements of the chain, + txt \Since both @{text h'} and @{text h''} are elements of the chain, @{text h''} is an extension of @{text h'} or vice versa. Thus both @{text x} and @{text y} are contained in the greater - one. \label{cases1}*} + one. \label{cases1}\ from cM c'' c' have "graph H'' h'' \ graph H' h' \ graph H' h' \ graph H'' h''" (is "?case1 \ ?case2") .. @@ -151,10 +151,10 @@ qed qed -text {* +text \ \medskip The relation induced by the graph of the supremum of a chain @{text c} is definite, i.~e.~t is the graph of a function. -*} +\ lemma sup_definite: assumes M_def: "M \ norm_pres_extensions E p F f" @@ -175,9 +175,9 @@ then obtain H2 h2 where G2_rep: "G2 = graph H2 h2" unfolding M_def by blast - txt {* @{text "G\<^sub>1"} is contained in @{text "G\<^sub>2"} + txt \@{text "G\<^sub>1"} is contained in @{text "G\<^sub>2"} or vice versa, since both @{text "G\<^sub>1"} and @{text - "G\<^sub>2"} are members of @{text c}. \label{cases2}*} + "G\<^sub>2"} are members of @{text c}. \label{cases2}\ from cM G1 G2 have "G1 \ G2 \ G2 \ G1" (is "?case1 \ ?case2") .. then show ?thesis @@ -200,14 +200,14 @@ qed qed -text {* +text \ \medskip The limit function @{text h} is linear. Every element @{text x} in the domain of @{text h} is in the domain of a function @{text h'} in the chain of norm preserving extensions. Furthermore, @{text h} is an extension of @{text h'} so the function values of @{text x} are identical for @{text h'} and @{text h}. Finally, the function @{text h'} is linear by construction of @{text M}. -*} +\ lemma sup_lf: assumes M: "M = norm_pres_extensions E p F f" @@ -255,12 +255,12 @@ qed qed -text {* +text \ \medskip The limit of a non-empty chain of norm preserving extensions of @{text f} is an extension of @{text f}, since every element of the chain is an extension of @{text f} and the supremum is an extension for every element of the chain. -*} +\ lemma sup_ext: assumes graph: "graph H h = \c" @@ -281,12 +281,12 @@ finally show ?thesis . qed -text {* +text \ \medskip The domain @{text H} of the limit function is a superspace of @{text F}, since @{text F} is a subset of @{text H}. The existence of the @{text 0} element in @{text F} and the closure properties follow from the fact that @{text F} is a vector space. -*} +\ lemma sup_supF: assumes graph: "graph H h = \c" @@ -306,10 +306,10 @@ with FE show "a \ x \ F" by (rule subspace.mult_closed) qed -text {* +text \ \medskip The domain @{text H} of the limit function is a subspace of @{text E}. -*} +\ lemma sup_subE: assumes graph: "graph H h = \c" @@ -362,10 +362,10 @@ qed qed -text {* +text \ \medskip The limit function is bounded by the norm @{text p} as well, since all elements in the chain are bounded by @{text p}. -*} +\ lemma sup_norm_pres: assumes graph: "graph H h = \c" @@ -383,7 +383,7 @@ finally show "h x \ p x" . qed -text {* +text \ \medskip The following lemma is a property of linear forms on real vector spaces. It will be used for the lemma @{text abs_Hahn_Banach} (see page \pageref{abs-Hahn_Banach}). \label{abs-ineq-iff} For real @@ -394,7 +394,7 @@ @{text "\x \ H. h x \ p x"} \\ \end{tabular} \end{center} -*} +\ lemma abs_ineq_iff: assumes "subspace H E" and "vectorspace E" and "seminorm E p" @@ -405,7 +405,7 @@ interpret vectorspace E by fact interpret seminorm E p by fact interpret linearform H h by fact - have H: "vectorspace H" using `vectorspace E` .. + have H: "vectorspace H" using \vectorspace E\ .. { assume l: ?L show ?R @@ -422,13 +422,13 @@ fix x assume x: "x \ H" show "\a b :: real. - a \ b \ b \ a \ \b\ \ a" by arith - from `linearform H h` and H x + from \linearform H h\ and H x have "- h x = h (- x)" by (rule linearform.neg [symmetric]) also from H x have "- x \ H" by (rule vectorspace.neg_closed) with r have "h (- x) \ p (- x)" .. also have "\ = p x" - using `seminorm E p` `vectorspace E` + using \seminorm E p\ \vectorspace E\ proof (rule seminorm.minus) from x show "x \ E" .. qed diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Hahn_Banach/Linearform.thy --- a/src/HOL/Hahn_Banach/Linearform.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Hahn_Banach/Linearform.thy Tue Oct 21 21:55:45 2014 +0200 @@ -2,16 +2,16 @@ Author: Gertrud Bauer, TU Munich *) -header {* Linearforms *} +header \Linearforms\ theory Linearform imports Vector_Space begin -text {* +text \ A \emph{linear form} is a function on a vector space into the reals that is additive and multiplicative. -*} +\ locale linearform = fixes V :: "'a\{minus, plus, zero, uminus} set" and f @@ -40,11 +40,11 @@ assume x: "x \ V" and y: "y \ V" then have "x - y = x + - y" by (rule diff_eq1) also have "f \ = f x + f (- y)" by (rule add) (simp_all add: x y) - also have "f (- y) = - f y" using `vectorspace V` y by (rule neg) + also have "f (- y) = - f y" using \vectorspace V\ y by (rule neg) finally show ?thesis by simp qed -text {* Every linear form yields @{text 0} for the @{text 0} vector. *} +text \Every linear form yields @{text 0} for the @{text 0} vector.\ lemma (in linearform) zero [iff]: assumes "vectorspace V" @@ -52,7 +52,7 @@ proof - interpret vectorspace V by fact have "f 0 = f (0 - 0)" by simp - also have "\ = f 0 - f 0" using `vectorspace V` by (rule diff) simp_all + also have "\ = f 0 - f 0" using \vectorspace V\ by (rule diff) simp_all also have "\ = 0" by simp finally show ?thesis . qed diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Hahn_Banach/Normed_Space.thy --- a/src/HOL/Hahn_Banach/Normed_Space.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Hahn_Banach/Normed_Space.thy Tue Oct 21 21:55:45 2014 +0200 @@ -2,19 +2,19 @@ Author: Gertrud Bauer, TU Munich *) -header {* Normed vector spaces *} +header \Normed vector spaces\ theory Normed_Space imports Subspace begin -subsection {* Quasinorms *} +subsection \Quasinorms\ -text {* +text \ A \emph{seminorm} @{text "\\\"} is a function on a real vector space into the reals that has the following properties: it is positive definite, absolute homogeneous and subadditive. -*} +\ locale seminorm = fixes V :: "'a\{minus, plus, zero, uminus} set" @@ -54,23 +54,23 @@ qed -subsection {* Norms *} +subsection \Norms\ -text {* +text \ A \emph{norm} @{text "\\\"} is a seminorm that maps only the @{text 0} vector to @{text 0}. -*} +\ locale norm = seminorm + assumes zero_iff [iff]: "x \ V \ (\x\ = 0) = (x = 0)" -subsection {* Normed vector spaces *} +subsection \Normed vector spaces\ -text {* +text \ A vector space together with a norm is called a \emph{normed space}. -*} +\ locale normed_vectorspace = vectorspace + norm @@ -90,9 +90,9 @@ finally show ?thesis . qed -text {* +text \ Any subspace of a normed vector space is again a normed vectorspace. -*} +\ lemma subspace_normed_vs [intro?]: fixes F E norm diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Hahn_Banach/Subspace.thy --- a/src/HOL/Hahn_Banach/Subspace.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Hahn_Banach/Subspace.thy Tue Oct 21 21:55:45 2014 +0200 @@ -2,19 +2,19 @@ Author: Gertrud Bauer, TU Munich *) -header {* Subspaces *} +header \Subspaces\ theory Subspace imports Vector_Space "~~/src/HOL/Library/Set_Algebras" begin -subsection {* Definition *} +subsection \Definition\ -text {* +text \ A non-empty subset @{text U} of a vector space @{text V} is a \emph{subspace} of @{text V}, iff @{text U} is closed under addition and scalar multiplication. -*} +\ locale subspace = fixes U :: "'a\{minus, plus, zero, uminus} set" and V @@ -49,11 +49,11 @@ from x y show ?thesis by (simp add: diff_eq1 negate_eq1) qed -text {* +text \ \medskip Similar as for linear spaces, the existence of the zero element in every subspace follows from the non-emptiness of the carrier set and by vector space laws. -*} +\ lemma (in subspace) zero [intro]: assumes "vectorspace V" @@ -63,7 +63,7 @@ have "U \ {}" by (rule non_empty) then obtain x where x: "x \ U" by blast then have "x \ V" .. then have "0 = x - x" by simp - also from `vectorspace V` x x have "\ \ U" by (rule diff_closed) + also from \vectorspace V\ x x have "\ \ U" by (rule diff_closed) finally show ?thesis . qed @@ -76,7 +76,7 @@ from x show ?thesis by (simp add: negate_eq1) qed -text {* \medskip Further derived laws: every subspace is a vector space. *} +text \\medskip Further derived laws: every subspace is a vector space.\ lemma (in subspace) vectorspace [iff]: assumes "vectorspace V" @@ -104,7 +104,7 @@ qed -text {* The subspace relation is reflexive. *} +text \The subspace relation is reflexive.\ lemma (in vectorspace) subspace_refl [intro]: "V \ V" proof @@ -117,7 +117,7 @@ from x show "a \ x \ V" by simp qed -text {* The subspace relation is transitive. *} +text \The subspace relation is transitive.\ lemma (in vectorspace) subspace_trans [trans]: "U \ V \ V \ W \ U \ W" @@ -136,14 +136,14 @@ qed -subsection {* Linear closure *} +subsection \Linear closure\ -text {* +text \ The \emph{linear closure} of a vector @{text x} is the set of all scalar multiples of @{text x}. -*} +\ -definition lin :: "('a::{minus, plus, zero}) \ 'a set" +definition lin :: "('a::{minus,plus,zero}) \ 'a set" where "lin x = {a \ x | a. True}" lemma linI [intro]: "y = a \ x \ y \ lin x" @@ -156,7 +156,7 @@ unfolding lin_def by blast -text {* Every vector is contained in its linear closure. *} +text \Every vector is contained in its linear closure.\ lemma (in vectorspace) x_lin_x [iff]: "x \ V \ x \ lin x" proof - @@ -172,7 +172,7 @@ then show "0 = 0 \ x" by simp qed -text {* Any linear closure is a subspace. *} +text \Any linear closure is a subspace.\ lemma (in vectorspace) lin_subspace [intro]: assumes x: "x \ V" @@ -208,25 +208,25 @@ qed -text {* Any linear closure is a vector space. *} +text \Any linear closure is a vector space.\ lemma (in vectorspace) lin_vectorspace [intro]: assumes "x \ V" shows "vectorspace (lin x)" proof - - from `x \ V` have "subspace (lin x) V" + from \x \ V\ have "subspace (lin x) V" by (rule lin_subspace) from this and vectorspace_axioms show ?thesis by (rule subspace.vectorspace) qed -subsection {* Sum of two vectorspaces *} +subsection \Sum of two vectorspaces\ -text {* +text \ The \emph{sum} of two vectorspaces @{text U} and @{text V} is the set of all sums of elements from @{text U} and @{text V}. -*} +\ lemma sum_def: "U + V = {u + v | u v. u \ U \ v \ V}" unfolding set_plus_def by auto @@ -243,7 +243,7 @@ "u \ U \ v \ V \ u + v \ U + V" unfolding sum_def by blast -text {* @{text U} is a subspace of @{text "U + V"}. *} +text \@{text U} is a subspace of @{text "U + V"}.\ lemma subspace_sum1 [iff]: assumes "vectorspace U" "vectorspace V" @@ -267,7 +267,7 @@ qed qed -text {* The sum of two subspaces is again a subspace. *} +text \The sum of two subspaces is again a subspace.\ lemma sum_subspace [intro?]: assumes "subspace U E" "vectorspace E" "subspace V E" @@ -280,8 +280,8 @@ proof have "0 \ U + V" proof - show "0 \ U" using `vectorspace E` .. - show "0 \ V" using `vectorspace E` .. + show "0 \ U" using \vectorspace E\ .. + show "0 \ V" using \vectorspace E\ .. show "(0::'a) = 0 + 0" by simp qed then show "U + V \ {}" by blast @@ -316,22 +316,22 @@ qed qed -text{* The sum of two subspaces is a vectorspace. *} +text\The sum of two subspaces is a vectorspace.\ lemma sum_vs [intro?]: "U \ E \ V \ E \ vectorspace E \ vectorspace (U + V)" by (rule subspace.vectorspace) (rule sum_subspace) -subsection {* Direct sums *} +subsection \Direct sums\ -text {* +text \ The sum of @{text U} and @{text V} is called \emph{direct}, iff the zero element is the only common element of @{text U} and @{text V}. For every element @{text x} of the direct sum of @{text U} and @{text V} the decomposition in @{text "x = u + v"} with @{text "u \ U"} and @{text "v \ V"} is unique. -*} +\ lemma decomp: assumes "vectorspace E" "subspace U E" "subspace V E" @@ -347,9 +347,9 @@ show ?thesis proof have U: "vectorspace U" (* FIXME: use interpret *) - using `subspace U E` `vectorspace E` by (rule subspace.vectorspace) + using \subspace U E\ \vectorspace E\ by (rule subspace.vectorspace) have V: "vectorspace V" - using `subspace V E` `vectorspace E` by (rule subspace.vectorspace) + using \subspace V E\ \vectorspace E\ by (rule subspace.vectorspace) from u1 u2 v1 v2 and sum have eq: "u1 - u2 = v2 - v1" by (simp add: add_diff_swap) from u1 u2 have u: "u1 - u2 \ U" @@ -374,14 +374,14 @@ qed qed -text {* +text \ An application of the previous lemma will be used in the proof of the Hahn-Banach Theorem (see page \pageref{decomp-H-use}): for any element @{text "y + a \ x\<^sub>0"} of the direct sum of a vectorspace @{text H} and the linear closure of @{text "x\<^sub>0"} the components @{text "y \ H"} and @{text a} are uniquely determined. -*} +\ lemma decomp_H': assumes "vectorspace E" "subspace H E" @@ -414,31 +414,31 @@ from x have "x \ H" .. with xx' have "inverse a \ a \ x' \ H" by simp with a and x' have "x' \ H" by (simp add: mult_assoc2) - with `x' \ H` show ?thesis by contradiction + with \x' \ H\ show ?thesis by contradiction qed then show "x \ {0}" .. qed show "{0} \ H \ lin x'" proof - - have "0 \ H" using `vectorspace E` .. - moreover have "0 \ lin x'" using `x' \ E` .. + have "0 \ H" using \vectorspace E\ .. + moreover have "0 \ lin x'" using \x' \ E\ .. ultimately show ?thesis by blast qed qed - show "lin x' \ E" using `x' \ E` .. - qed (rule `vectorspace E`, rule `subspace H E`, rule y1, rule y2, rule eq) + show "lin x' \ E" using \x' \ E\ .. + qed (rule \vectorspace E\, rule \subspace H E\, rule y1, rule y2, rule eq) then show "y1 = y2" .. from c have "a1 \ x' = a2 \ x'" .. with x' show "a1 = a2" by (simp add: mult_right_cancel) qed qed -text {* +text \ Since for any element @{text "y + a \ x'"} of the direct sum of a vectorspace @{text H} and the linear closure of @{text x'} the components @{text "y \ H"} and @{text a} are unique, it follows from @{text "y \ H"} that @{text "a = 0"}. -*} +\ lemma decomp_H'_H: assumes "vectorspace E" "subspace H E" @@ -456,16 +456,16 @@ proof (rule decomp_H') from ya x' show "y + a \ x' = t + 0 \ x'" by simp from ya show "y \ H" .. - qed (rule `vectorspace E`, rule `subspace H E`, rule t, (rule x')+) + qed (rule \vectorspace E\, rule \subspace H E\, rule t, (rule x')+) with t x' show "(y, a) = (y + a \ x', 0)" by simp qed qed -text {* +text \ The components @{text "y \ H"} and @{text a} in @{text "y + a \ x'"} are unique, so the function @{text h'} defined by @{text "h' (y + a \ x') = h y + a \ \"} is definite. -*} +\ lemma h'_definite: fixes H @@ -498,7 +498,7 @@ from xq show "fst q \ H" .. from xp and xq show "fst p + snd p \ x' = fst q + snd q \ x'" by simp - qed (rule `vectorspace E`, rule `subspace H E`, (rule x')+) + qed (rule \vectorspace E\, rule \subspace H E\, (rule x')+) then show ?thesis by (cases p, cases q) simp qed qed diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Hahn_Banach/Vector_Space.thy --- a/src/HOL/Hahn_Banach/Vector_Space.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Hahn_Banach/Vector_Space.thy Tue Oct 21 21:55:45 2014 +0200 @@ -2,32 +2,27 @@ Author: Gertrud Bauer, TU Munich *) -header {* Vector spaces *} +header \Vector spaces\ theory Vector_Space imports Complex_Main Bounds begin -subsection {* Signature *} +subsection \Signature\ -text {* +text \ For the definition of real vector spaces a type @{typ 'a} of the sort @{text "{plus, minus, zero}"} is considered, on which a real scalar multiplication @{text \} is declared. -*} +\ consts - prod :: "real \ 'a::{plus, minus, zero} \ 'a" (infixr "'(*')" 70) - -notation (xsymbols) - prod (infixr "\" 70) -notation (HTML output) - prod (infixr "\" 70) + prod :: "real \ 'a::{plus,minus,zero} \ 'a" (infixr "\" 70) -subsection {* Vector space laws *} +subsection \Vector space laws\ -text {* +text \ A \emph{vector space} is a non-empty set @{text V} of elements from @{typ 'a} with the following vector space laws: The set @{text V} is closed under addition and scalar multiplication, addition is @@ -36,7 +31,7 @@ addition. Addition and multiplication are distributive; scalar multiplication is associative and the real number @{text "1"} is the neutral element of scalar multiplication. -*} +\ locale vectorspace = fixes V @@ -83,8 +78,8 @@ theorems add_ac = add_assoc add_commute add_left_commute -text {* The existence of the zero element of a vector space - follows from the non-emptiness of carrier set. *} +text \The existence of the zero element of a vector space + follows from the non-emptiness of carrier set.\ lemma zero [iff]: "0 \ V" proof - @@ -127,7 +122,7 @@ diff_mult_distrib1 diff_mult_distrib2 -text {* \medskip Further derived laws: *} +text \\medskip Further derived laws:\ lemma mult_zero_left [simp]: "x \ V \ 0 \ x = 0" proof - @@ -236,11 +231,11 @@ proof (rule classical) assume a: "a \ 0" from x a have "x = (inverse a * a) \ x" by simp - also from `x \ V` have "\ = inverse a \ (a \ x)" by (rule mult_assoc) + also from \x \ V\ have "\ = inverse a \ (a \ x)" by (rule mult_assoc) also from ax have "\ = inverse a \ 0" by simp also have "\ = 0" by simp finally have "x = 0" . - with `x \ 0` show "a = 0" by contradiction + with \x \ 0\ show "a = 0" by contradiction qed lemma mult_left_cancel: @@ -330,7 +325,7 @@ proof - from assms have "- c + (a + b) = - c + (c + d)" by (simp add: add_left_cancel) - also have "\ = d" using `c \ V` `d \ V` by (rule minus_add_cancel) + also have "\ = d" using \c \ V\ \d \ V\ by (rule minus_add_cancel) finally have eq: "- c + (a + b) = d" . from vs have "a - c = (- c + (a + b)) + - b" by (simp add: add_ac diff_eq1) diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Hahn_Banach/Zorn_Lemma.thy --- a/src/HOL/Hahn_Banach/Zorn_Lemma.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Hahn_Banach/Zorn_Lemma.thy Tue Oct 21 21:55:45 2014 +0200 @@ -2,13 +2,13 @@ Author: Gertrud Bauer, TU Munich *) -header {* Zorn's Lemma *} +header \Zorn's Lemma\ theory Zorn_Lemma imports Main begin -text {* +text \ Zorn's Lemmas states: if every linear ordered subset of an ordered set @{text S} has an upper bound in @{text S}, then there exists a maximal element in @{text S}. In our application, @{text S} is a @@ -17,7 +17,7 @@ of Zorn's lemma can be modified: if @{text S} is non-empty, it suffices to show that for every non-empty chain @{text c} in @{text S} the union of @{text c} also lies in @{text S}. -*} +\ theorem Zorn's_Lemma: assumes r: "\c. c \ chains S \ \x. x \ c \ \c \ S" @@ -30,14 +30,14 @@ show "\y \ S. \z \ c. z \ y" proof cases - txt {* If @{text c} is an empty chain, then every element in - @{text S} is an upper bound of @{text c}. *} + txt \If @{text c} is an empty chain, then every element in + @{text S} is an upper bound of @{text c}.\ assume "c = {}" with aS show ?thesis by fast - txt {* If @{text c} is non-empty, then @{text "\c"} is an upper - bound of @{text c}, lying in @{text S}. *} + txt \If @{text c} is non-empty, then @{text "\c"} is an upper + bound of @{text c}, lying in @{text S}.\ next assume "c \ {}" @@ -46,7 +46,7 @@ show "\z \ c. z \ \c" by fast show "\c \ S" proof (rule r) - from `c \ {}` show "\x. x \ c" by fast + from \c \ {}\ show "\x. x \ c" by fast show "c \ chains S" by fact qed qed diff -r cb9d84d3e7f2 -r e55fe82f3803 src/HOL/Multivariate_Analysis/Topology_Euclidean_Space.thy --- a/src/HOL/Multivariate_Analysis/Topology_Euclidean_Space.thy Tue Oct 21 21:10:44 2014 +0200 +++ b/src/HOL/Multivariate_Analysis/Topology_Euclidean_Space.thy Tue Oct 21 21:55:45 2014 +0200 @@ -1015,7 +1015,7 @@ and "(\i\Basis. a\i \ c\i \ d\i \ b\i) \ box c d \ cbox a b" and "(\i\Basis. a\i \ c\i \ d\i \ b\i) \ box c d \ box a b" unfolding subset_eq[unfolded Ball_def] unfolding mem_box - by (best intro: order_trans less_le_trans le_less_trans less_imp_le)+ + by (best intro: order_trans less_le_trans le_less_trans less_imp_le)+ lemma box_subset_cbox: fixes a :: "'a::euclidean_space" @@ -1153,7 +1153,9 @@ lemma UN_box_eq_UNIV: "(\i::nat. box (- (real i *\<^sub>R One)) (real i *\<^sub>R One)) = UNIV" proof - - { fix x b :: 'a assume [simp]: "b \ Basis" + { + fix x b :: 'a + assume [simp]: "b \ Basis" have "\x \ b\ \ real (natceiling \x \ b\)" by (rule real_natceiling_ge) also have "\ \ real (natceiling (Max ((\b. \x \ b\)`Basis)))" @@ -1679,7 +1681,7 @@ by (simp add: frontier_def) lemma frontier_subset_eq: "frontier S \ S \ closed S" -proof- +proof - { assume "frontier S \ S" then have "closure S \ S" @@ -1690,13 +1692,14 @@ then show ?thesis using frontier_subset_closed[of S] .. qed -lemma frontier_complement: "frontier(- S) = frontier S" +lemma frontier_complement: "frontier (- S) = frontier S" by (auto simp add: frontier_def closure_complement interior_complement) lemma frontier_disjoint_eq: "frontier S \ S = {} \ open S" using frontier_complement frontier_subset_eq[of "- S"] unfolding open_closed by auto + subsection {* Filters and the ``eventually true'' quantifier *} definition indirection :: "'a::real_normed_vector \ 'a \ 'a filter" diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Pure/Isar/outer_syntax.scala --- a/src/Pure/Isar/outer_syntax.scala Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Pure/Isar/outer_syntax.scala Tue Oct 21 21:55:45 2014 +0200 @@ -63,11 +63,11 @@ /* overall document structure */ sealed abstract class Document { def length: Int } - case class Document_Block(val name: String, val body: List[Document]) extends Document + case class Document_Block(name: String, text: String, body: List[Document]) extends Document { val length: Int = (0 /: body)(_ + _.length) } - case class Document_Atom(val command: Command) extends Document + case class Document_Atom(command: Command) extends Document { def length: Int = command.length } @@ -192,8 +192,8 @@ if (tok.is_command) { if (command_kind(tok, Keyword.theory_goal)) (2, 1) else if (command_kind(tok, Keyword.theory)) (1, 0) - else if (command_kind(tok, Keyword.proof_goal) || tok.source == "{") (y + 2, y + 1) - else if (command_kind(tok, Keyword.qed) || tok.source == "}") (y + 1, y - 1) + else if (command_kind(tok, Keyword.proof_goal) || tok.is_begin_block) (y + 2, y + 1) + else if (command_kind(tok, Keyword.qed) || tok.is_end_block) (y + 1, y - 1) else if (command_kind(tok, Keyword.qed_global)) (1, 0) else (x, y) } @@ -216,11 +216,7 @@ } } - def scan_line( - input: CharSequence, - context: Scan.Line_Context, - structure: Outer_Syntax.Line_Structure) - : (List[Token], Scan.Line_Context, Outer_Syntax.Line_Structure) = + def scan_line(input: CharSequence, context: Scan.Line_Context): (List[Token], Scan.Line_Context) = { var in: Reader[Char] = new CharSequenceReader(input) val toks = new mutable.ListBuffer[Token] @@ -232,8 +228,7 @@ error("Unexpected failure of tokenizing input:\n" + rest.source.toString) } } - val tokens = toks.toList - (tokens, ctxt, line_structure(tokens, structure)) + (toks.toList, ctxt) } @@ -293,32 +288,32 @@ } } - def parse_document(node_name: Document.Node.Name, text: CharSequence): Outer_Syntax.Document = + def parse_document(node_name: Document.Node.Name, text: CharSequence): + List[Outer_Syntax.Document] = { /* stack operations */ def buffer(): mutable.ListBuffer[Outer_Syntax.Document] = new mutable.ListBuffer[Outer_Syntax.Document] - var stack: List[(Int, String, mutable.ListBuffer[Outer_Syntax.Document])] = - List((0, node_name.toString, buffer())) + var stack: List[(Int, Command, mutable.ListBuffer[Outer_Syntax.Document])] = + List((0, Command.empty, buffer())) @tailrec def close(level: Int => Boolean) { stack match { - case (lev, name, body) :: (_, _, body2) :: rest if level(lev) => - body2 += Outer_Syntax.Document_Block(name, body.toList) + case (lev, command, body) :: (_, _, body2) :: rest if level(lev) => + body2 += Outer_Syntax.Document_Block(command.name, command.source, body.toList) stack = stack.tail close(level) case _ => } } - def result(): Outer_Syntax.Document = + def result(): List[Outer_Syntax.Document] = { close(_ => true) - val (_, name, body) = stack.head - Outer_Syntax.Document_Block(name, body.toList) + stack.head._3.toList } def add(command: Command) @@ -326,7 +321,7 @@ heading_level(command) match { case Some(i) => close(_ > i) - stack = (i + 1, command.source, buffer()) :: stack + stack = (i + 1, command, buffer()) :: stack case None => } stack.head._3 += Outer_Syntax.Document_Atom(command) diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Pure/Isar/token.scala --- a/src/Pure/Isar/token.scala Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Pure/Isar/token.scala Tue Oct 21 21:55:45 2014 +0200 @@ -190,7 +190,10 @@ source.startsWith(Symbol.open_decoded)) def is_begin: Boolean = is_keyword && source == "begin" - def is_end: Boolean = is_keyword && source == "end" + def is_end: Boolean = is_command && source == "end" + + def is_begin_block: Boolean = is_command && source == "{" + def is_end_block: Boolean = is_command && source == "}" def content: String = if (kind == Token.Kind.STRING) Scan.Parsers.quoted_content("\"", source) diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Pure/PIDE/resources.ML Tue Oct 21 21:55:45 2014 +0200 @@ -224,7 +224,7 @@ space_explode "/" name |> map Latex.output_ascii |> space_implode (Latex.output_ascii "/" ^ "\\discretionary{}{}{}") - |> Thy_Output.enclose_isabelle_tt ctxt + |> enclose "\\isatt{" "}" end; in diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Pure/PIDE/text.scala --- a/src/Pure/PIDE/text.scala Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Pure/PIDE/text.scala Tue Oct 21 21:55:45 2014 +0200 @@ -51,6 +51,8 @@ def is_singularity: Boolean = start == stop def inflate_singularity: Range = if (is_singularity) Range(start, start + 1) else this + def touches(i: Offset): Boolean = start <= i && i <= stop + def contains(i: Offset): Boolean = start == i || start < i && i < stop def contains(that: Range): Boolean = this.contains(that.start) && that.stop <= this.stop def overlaps(that: Range): Boolean = this.contains(that.start) || that.contains(this.start) diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Pure/Thy/thy_output.ML --- a/src/Pure/Thy/thy_output.ML Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Pure/Thy/thy_output.ML Tue Oct 21 21:55:45 2014 +0200 @@ -35,7 +35,6 @@ Token.src -> 'a list -> Pretty.T list val output: Proof.context -> Pretty.T list -> string val verbatim_text: Proof.context -> string -> string - val enclose_isabelle_tt: Proof.context -> string -> string end; structure Thy_Output: THY_OUTPUT = @@ -470,10 +469,6 @@ fun tweak_line ctxt s = if Config.get ctxt display then s else Symbol.strip_blanks s; -fun tweak_lines ctxt s = - if Config.get ctxt display then s - else s |> split_lines |> map Symbol.strip_blanks |> space_implode " "; - fun pretty_text ctxt = Pretty.chunks o map Pretty.str o map (tweak_line ctxt) o split_lines; @@ -643,18 +638,18 @@ (* verbatim text *) -fun enclose_isabelle_tt ctxt = - if Config.get ctxt display - then enclose "\\begin{isabellett}%\n" "%\n\\end{isabellett}" - else enclose "\\isatt{" "}"; +fun verbatim_text ctxt = + if Config.get ctxt display then + Latex.output_ascii #> + enclose "\\begin{isabellett}%\n" "%\n\\end{isabellett}" + else + split_lines #> + map (Latex.output_ascii #> enclose "\\isatt{" "}") #> + space_implode "\\isasep\\isanewline%\n"; -fun verbatim_text ctxt = - tweak_lines ctxt - #> Latex.output_ascii - #> enclose_isabelle_tt ctxt; - -val _ = Theory.setup - (antiquotation @{binding verbatim} (Scan.lift Args.text) (verbatim_text o #context)); +val _ = + Theory.setup + (antiquotation @{binding verbatim} (Scan.lift Args.text) (verbatim_text o #context)); (* ML text *) diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Tools/jEdit/etc/options --- a/src/Tools/jEdit/etc/options Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Tools/jEdit/etc/options Tue Oct 21 21:55:45 2014 +0200 @@ -30,6 +30,9 @@ public option jedit_text_overview_limit : int = 65536 -- "maximum amount of text to visualize in overview column" +public option jedit_structure_limit : int = 1000 + -- "maximum number of lines to scan for language structure" + public option jedit_symbols_search_limit : int = 50 -- "maximum number of symbols in search result" diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Tools/jEdit/lib/Tools/jedit --- a/src/Tools/jEdit/lib/Tools/jedit Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Tools/jEdit/lib/Tools/jedit Tue Oct 21 21:55:45 2014 +0200 @@ -46,6 +46,7 @@ "src/simplifier_trace_window.scala" "src/sledgehammer_dockable.scala" "src/spell_checker.scala" + "src/structure_matching.scala" "src/symbols_dockable.scala" "src/syslog_dockable.scala" "src/text_overview.scala" diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Tools/jEdit/src/bibtex_jedit.scala --- a/src/Tools/jEdit/src/bibtex_jedit.scala Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Tools/jEdit/src/bibtex_jedit.scala Tue Oct 21 21:55:45 2014 +0200 @@ -231,9 +231,8 @@ { override def supportsCompletion = false - private class Asset( - label: String, label_html: String, start: Text.Offset, stop: Text.Offset, source: String) - extends Isabelle_Sidekick.Asset(label, start, stop) { + private class Asset(label: String, label_html: String, range: Text.Range, source: String) + extends Isabelle_Sidekick.Asset(label, range) { override def getShortString: String = label_html override def getLongString: String = source } @@ -252,7 +251,8 @@ val label = kind + (if (name == "") "" else " " + name) val label_html = "" + kind + "" + (if (name == "") "" else " " + name) + "" - val asset = new Asset(label, label_html, offset, offset + source.size, source) + val range = Text.Range(offset, offset + source.size) + val asset = new Asset(label, label_html, range, source) data.root.add(new DefaultMutableTreeNode(asset)) } offset += source.size diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Tools/jEdit/src/document_view.scala --- a/src/Tools/jEdit/src/document_view.scala Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Tools/jEdit/src/document_view.scala Tue Oct 21 21:55:45 2014 +0200 @@ -251,6 +251,8 @@ text_area.addKeyListener(key_listener) text_area.addCaretListener(caret_listener) text_area.addLeftOfScrollBar(overview) + Isabelle.structure_matchers(JEdit_Lib.buffer_mode(text_area.getBuffer)). + foreach(text_area.addStructureMatcher(_)) session.raw_edits += main session.commands_changed += main } @@ -261,6 +263,8 @@ session.raw_edits -= main session.commands_changed -= main + Isabelle.structure_matchers(JEdit_Lib.buffer_mode(text_area.getBuffer)). + foreach(text_area.removeStructureMatcher(_)) text_area.removeLeftOfScrollBar(overview); overview.delay_repaint.revoke() text_area.removeCaretListener(caret_listener); delay_caret_update.revoke() text_area.removeKeyListener(key_listener) diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Tools/jEdit/src/fold_handling.scala --- a/src/Tools/jEdit/src/fold_handling.scala Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Tools/jEdit/src/fold_handling.scala Tue Oct 21 21:55:45 2014 +0200 @@ -29,16 +29,16 @@ } override def getFoldLevel(buffer: JEditBuffer, line: Int, seg: Segment): Int = - Token_Markup.buffer_line_structure(buffer, line).depth max 0 + Token_Markup.buffer_line_context(buffer, line).structure.depth max 0 override def getPrecedingFoldLevels( buffer: JEditBuffer, line: Int, seg: Segment, level: Int): java.util.List[Integer] = { - val struct = Token_Markup.buffer_line_structure(buffer, line) + val struct = Token_Markup.buffer_line_context(buffer, line).structure val result = if (line > 0 && struct.command) Range.inclusive(line - 1, 0, -1).iterator. - map(Token_Markup.buffer_line_structure(buffer, _)). + map(i => Token_Markup.buffer_line_context(buffer, i).structure). takeWhile(_.improper).map(_ => struct.depth max 0).toList else Nil diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Tools/jEdit/src/isabelle.scala --- a/src/Tools/jEdit/src/isabelle.scala Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Tools/jEdit/src/isabelle.scala Tue Oct 21 21:55:45 2014 +0200 @@ -15,7 +15,7 @@ import scala.swing.event.ButtonClicked import org.gjt.sp.jedit.{jEdit, View, Buffer} -import org.gjt.sp.jedit.textarea.JEditTextArea +import org.gjt.sp.jedit.textarea.{JEditTextArea, StructureMatcher} import org.gjt.sp.jedit.syntax.TokenMarker import org.gjt.sp.jedit.gui.{DockableWindowManager, CompleteWord} import org.gjt.sp.jedit.options.PluginOptions @@ -73,6 +73,12 @@ def token_marker(name: String): Option[TokenMarker] = markers.get(name) + /* structure matchers */ + + def structure_matchers(name: String): List[StructureMatcher] = + if (name == "isabelle") List(Structure_Matching.Isabelle_Matcher) else Nil + + /* dockable windows */ private def wm(view: View): DockableWindowManager = view.getDockableWindowManager diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Tools/jEdit/src/isabelle_sidekick.scala --- a/src/Tools/jEdit/src/isabelle_sidekick.scala Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Tools/jEdit/src/isabelle_sidekick.scala Tue Oct 21 21:55:45 2014 +0200 @@ -30,15 +30,17 @@ data } - class Asset(name: String, start: Text.Offset, end: Text.Offset) extends IAsset + class Keyword_Asset(keyword: String, text: String, range: Text.Range) extends IAsset { - protected var _name = name - protected var _start = int_to_pos(start) - protected var _end = int_to_pos(end) + protected var _name = text + protected var _start = int_to_pos(range.start) + protected var _end = int_to_pos(range.stop) override def getIcon: Icon = null override def getShortString: String = "" + - HTML.encode(_name) + "" + (if (keyword != "" && _name.startsWith(keyword)) + "" + HTML.encode(keyword) + "" + HTML.encode(_name.substring(keyword.length)) + else HTML.encode(_name)) + "" override def getLongString: String = _name override def getName: String = _name override def setName(name: String) = _name = name @@ -49,6 +51,8 @@ override def toString: String = _name } + class Asset(name: String, range: Text.Range) extends Keyword_Asset("", name, range) + def swing_markup_tree(tree: Markup_Tree, parent: DefaultMutableTreeNode, swing_node: Text.Info[List[XML.Elem]] => DefaultMutableTreeNode) { @@ -101,27 +105,29 @@ { override def parser(buffer: Buffer, syntax: Outer_Syntax, data: SideKickParsedData): Boolean = { - def make_tree(offset: Text.Offset, document: Outer_Syntax.Document) - : List[DefaultMutableTreeNode] = - document match { - case Outer_Syntax.Document_Block(name, body) => - val node = - new DefaultMutableTreeNode( - new Isabelle_Sidekick.Asset( - Library.first_line(name), offset, offset + document.length)) - (offset /: body)((i, doc) => - { - for (nd <- make_tree(i, doc)) node.add(nd) - i + doc.length - }) - List(node) - case _ => Nil + def make_tree( + parent: DefaultMutableTreeNode, + offset: Text.Offset, + documents: List[Outer_Syntax.Document]) + { + (offset /: documents) { case (i, document) => + document match { + case Outer_Syntax.Document_Block(name, text, body) => + val range = Text.Range(i, i + document.length) + val node = + new DefaultMutableTreeNode( + new Isabelle_Sidekick.Keyword_Asset(name, Library.first_line(text), range)) + parent.add(node) + make_tree(node, i, body) + case _ => + } + i + document.length } + } node_name(buffer) match { case Some(name) => - val document = syntax.parse_document(name, JEdit_Lib.buffer_text(buffer)) - for (node <- make_tree(0, document)) data.root.add(node) + make_tree(data.root, 0, syntax.parse_document(name, JEdit_Lib.buffer_text(buffer))) true case None => false } @@ -169,7 +175,7 @@ .mkString new DefaultMutableTreeNode( - new Isabelle_Sidekick.Asset(command.toString, range.start, range.stop) { + new Isabelle_Sidekick.Asset(command.toString, range) { override def getShortString: String = content override def getLongString: String = info_text override def toString: String = quote(content) + " " + range.toString @@ -189,7 +195,7 @@ private val Heading2 = """^\*\*\*\w*(.*)\w*\*\*\*\w*$""".r private def make_node(s: String, start: Text.Offset, stop: Text.Offset): DefaultMutableTreeNode = - new DefaultMutableTreeNode(new Isabelle_Sidekick.Asset(s, start, stop)) + new DefaultMutableTreeNode(new Isabelle_Sidekick.Asset(s, Text.Range(start, stop))) override def parser(buffer: Buffer, syntax: Outer_Syntax, data: SideKickParsedData): Boolean = { diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Tools/jEdit/src/jEdit.props --- a/src/Tools/jEdit/src/jEdit.props Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Tools/jEdit/src/jEdit.props Tue Oct 21 21:55:45 2014 +0200 @@ -180,6 +180,7 @@ fallbackEncodings=UTF-8 ISO-8859-15 US-ASCII firstTime=false focus-buffer-switcher.shortcut2=A+CIRCUMFLEX +foldPainter=Circle home.shortcut= insert-newline-indent.shortcut= insert-newline.shortcut=ENTER diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Tools/jEdit/src/structure_matching.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Tools/jEdit/src/structure_matching.scala Tue Oct 21 21:55:45 2014 +0200 @@ -0,0 +1,121 @@ +/* Title: Tools/jEdit/src/structure_matching.scala + Author: Makarius + +Structure matcher for Isabelle/Isar outer syntax. +*/ + +package isabelle.jedit + + +import isabelle._ + +import org.gjt.sp.jedit.textarea.{TextArea, StructureMatcher} + + +object Structure_Matching +{ + object Isabelle_Matcher extends StructureMatcher + { + def find_block( + open: Token => Boolean, + close: Token => Boolean, + reset: Token => Boolean, + it: Iterator[Text.Info[Token]]): Option[(Text.Range, Text.Range)] = + { + val range1 = it.next.range + it.scanLeft((range1, 1))( + { case ((r, d), Text.Info(range, tok)) => + if (open(tok)) (range, d + 1) + else if (close(tok)) (range, d - 1) + else if (reset(tok)) (range, 0) + else (r, d) } + ).collectFirst({ case (range2, 0) => (range1, range2) }) + } + + def find_pair(text_area: TextArea): Option[(Text.Range, Text.Range)] = + { + val buffer = text_area.getBuffer + val caret_line = text_area.getCaretLine + val caret = text_area.getCaretPosition + + PIDE.session.recent_syntax match { + case syntax: Outer_Syntax + if syntax != Outer_Syntax.empty => + + val limit = PIDE.options.value.int("jedit_structure_limit") max 0 + + def iterator(line: Int, lim: Int = limit): Iterator[Text.Info[Token]] = + Token_Markup.line_token_iterator(syntax, buffer, line, line + lim). + filter(_.info.is_proper) + + def rev_iterator(line: Int, lim: Int = limit): Iterator[Text.Info[Token]] = + Token_Markup.line_token_reverse_iterator(syntax, buffer, line, line - lim). + filter(_.info.is_proper) + + def caret_iterator(): Iterator[Text.Info[Token]] = + iterator(caret_line).dropWhile(info => !info.range.touches(caret)) + + def rev_caret_iterator(): Iterator[Text.Info[Token]] = + rev_iterator(caret_line).dropWhile(info => !info.range.touches(caret)) + + iterator(caret_line, 1).find(info => info.range.touches(caret)) + match { + case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.theory_goal) => + find_block( + syntax.command_kind(_, Keyword.proof_goal), + syntax.command_kind(_, Keyword.qed), + syntax.command_kind(_, Keyword.qed_global), + caret_iterator()) + + case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.proof_goal) => + find_block( + syntax.command_kind(_, Keyword.proof_goal), + syntax.command_kind(_, Keyword.qed), + _ => false, + caret_iterator()) + + case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.qed_global) => + rev_caret_iterator().find(info => syntax.command_kind(info.info, Keyword.theory)) + match { + case Some(Text.Info(range2, tok)) + if syntax.command_kind(tok, Keyword.theory_goal) => Some((range1, range2)) + case _ => None + } + + case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.qed) => + find_block( + syntax.command_kind(_, Keyword.qed), + t => + syntax.command_kind(t, Keyword.proof_goal) || + syntax.command_kind(t, Keyword.theory_goal), + _ => false, + rev_caret_iterator()) + + case Some(Text.Info(range1, tok)) if tok.is_begin => + find_block(_.is_begin, _.is_end, _ => false, caret_iterator()) + + case Some(Text.Info(range1, tok)) if tok.is_end => + find_block(_.is_end, _.is_begin, _ => false, rev_caret_iterator()) + + case _ => None + } + case _ => None + } + } + + def getMatch(text_area: TextArea): StructureMatcher.Match = + find_pair(text_area) match { + case Some((_, range)) => + val line = text_area.getBuffer.getLineOfOffset(range.start) + new StructureMatcher.Match(Structure_Matching.Isabelle_Matcher, + line, range.start, line, range.stop) + case None => null + } + + def selectMatch(text_area: TextArea) + { + // FIXME + } + } +} + diff -r cb9d84d3e7f2 -r e55fe82f3803 src/Tools/jEdit/src/token_markup.scala --- a/src/Tools/jEdit/src/token_markup.scala Tue Oct 21 21:10:44 2014 +0200 +++ b/src/Tools/jEdit/src/token_markup.scala Tue Oct 21 21:55:45 2014 +0200 @@ -177,6 +177,11 @@ private val context_rules = new ParserRuleSet("isabelle", "MAIN") + object Line_Context + { + val init = new Line_Context(Some(Scan.Finished), Outer_Syntax.Line_Structure.init) + } + class Line_Context( val context: Option[Scan.Line_Context], val structure: Outer_Syntax.Line_Structure) @@ -190,7 +195,7 @@ } } - def buffer_line_context(buffer: JEditBuffer, line: Int): Option[Line_Context] = + def buffer_line_context(buffer: JEditBuffer, line: Int): Line_Context = Untyped.get(buffer, "lineMgr") match { case line_mgr: LineManager => def context = @@ -198,18 +203,54 @@ case c: Line_Context => Some(c) case _ => None } - context orElse { + context getOrElse { buffer.markTokens(line, DummyTokenHandler.INSTANCE) - context + context getOrElse Line_Context.init } - case _ => None + case _ => Line_Context.init } - def buffer_line_structure(buffer: JEditBuffer, line: Int): Outer_Syntax.Line_Structure = - buffer_line_context(buffer, line) match { - case Some(c) => c.structure - case _ => Outer_Syntax.Line_Structure.init - } + + /* line tokens */ + + def try_line_tokens(syntax: Outer_Syntax, buffer: JEditBuffer, line: Int): Option[List[Token]] = + { + val line_context = + if (line == 0) Line_Context.init + else buffer_line_context(buffer, line - 1) + for { + ctxt <- line_context.context + text <- JEdit_Lib.try_get_text(buffer, JEdit_Lib.line_range(buffer, line)) + } yield syntax.scan_line(text, ctxt)._1 + } + + def line_token_iterator( + syntax: Outer_Syntax, + buffer: JEditBuffer, + start_line: Int, + end_line: Int): Iterator[Text.Info[Token]] = + for { + line <- Range(start_line max 0, end_line min buffer.getLineCount).iterator + tokens <- try_line_tokens(syntax, buffer, line).iterator + starts = + tokens.iterator.scanLeft(buffer.getLineStartOffset(line))( + (i, tok) => i + tok.source.length) + (i, tok) <- starts zip tokens.iterator + } yield Text.Info(Text.Range(i, i + tok.source.length), tok) + + def line_token_reverse_iterator( + syntax: Outer_Syntax, + buffer: JEditBuffer, + start_line: Int, + end_line: Int): Iterator[Text.Info[Token]] = + for { + line <- Range(start_line min (buffer.getLineCount - 1), end_line max -1, -1).iterator + tokens <- try_line_tokens(syntax, buffer, line).iterator + stops = + tokens.reverseIterator.scanLeft(buffer.getLineEndOffset(line) min buffer.getLength)( + (i, tok) => i - tok.source.length) + (i, tok) <- stops zip tokens.reverseIterator + } yield Text.Info(Text.Range(i - tok.source.length, i), tok) /* token marker */ @@ -219,24 +260,22 @@ override def markTokens(context: TokenMarker.LineContext, handler: TokenHandler, raw_line: Segment): TokenMarker.LineContext = { - val (opt_ctxt, structure) = - context match { - case c: Line_Context => (c.context, c.structure) - case _ => (Some(Scan.Finished), Outer_Syntax.Line_Structure.init) - } val line = if (raw_line == null) new Segment else raw_line + val line_context = context match { case c: Line_Context => c case _ => Line_Context.init } + val structure = line_context.structure val context1 = { val (styled_tokens, context1) = - (opt_ctxt, Isabelle.mode_syntax(mode)) match { + (line_context.context, Isabelle.mode_syntax(mode)) match { case (Some(ctxt), _) if mode == "isabelle-ml" || mode == "sml" => val (tokens, ctxt1) = ML_Lex.tokenize_line(mode == "sml", line, ctxt) val styled_tokens = tokens.map(tok => (Rendering.ml_token_markup(tok), tok.source)) (styled_tokens, new Line_Context(Some(ctxt1), structure)) case (Some(ctxt), Some(syntax)) if syntax.has_tokens => - val (tokens, ctxt1, structure1) = syntax.scan_line(line, ctxt, structure) + val (tokens, ctxt1) = syntax.scan_line(line, ctxt) + val structure1 = syntax.line_structure(tokens, structure) val styled_tokens = tokens.map(tok => (Rendering.token_markup(syntax, tok), tok.source)) (styled_tokens, new Line_Context(Some(ctxt1), structure1))