merged
authorwenzelm
Tue, 21 Oct 2014 21:55:45 +0200
changeset 58759 e55fe82f3803
parent 58740 cb9d84d3e7f2 (current diff)
parent 58758 790ff9eb2578 (diff)
child 58760 3600ee38daa0
merged
NEWS
src/HOL/Multivariate_Analysis/Topology_Euclidean_Space.thy
--- a/NEWS	Tue Oct 21 21:10:44 2014 +0200
+++ b/NEWS	Tue Oct 21 21:55:45 2014 +0200
@@ -20,6 +20,10 @@
 * Improved folding mode "isabelle" based on Isar syntax.
 Alternatively, the "sidekick" mode may be used for document structure.
 
+* Extended bracket matching based on Isar language structure.  System
+option jedit_structure_limit determines maximum number of lines to
+scan in the buffer.
+
 * Support for BibTeX files: context menu, context-sensitive token
 marker, SideKick parser.
 
--- a/src/Doc/Implementation/Prelim.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Doc/Implementation/Prelim.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -1048,10 +1048,9 @@
 text \<open>\medskip That position can be also printed in a message as
   follows:\<close>
 
-ML_command \<open>
-  writeln
-    ("Look here" ^ Position.here (Binding.pos_of @{binding here}))
-\<close>
+ML_command
+  \<open>writeln
+    ("Look here" ^ Position.here (Binding.pos_of @{binding here}))\<close>
 
 text \<open>This illustrates a key virtue of formalized bindings as
   opposed to raw specifications of base names: the system can use this
@@ -1062,8 +1061,6 @@
   directly, which is occasionally useful for experimentation and
   diagnostic purposes:\<close>
 
-ML_command \<open>
-  warning ("Look here" ^ Position.here @{here})
-\<close>
+ML_command \<open>warning ("Look here" ^ Position.here @{here})\<close>
 
 end
--- a/src/HOL/Hahn_Banach/Bounds.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Hahn_Banach/Bounds.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -2,7 +2,7 @@
     Author:     Gertrud Bauer, TU Munich
 *)
 
-header {* Bounds *}
+header \<open>Bounds\<close>
 
 theory Bounds
 imports Main "~~/src/HOL/Library/ContNotDenum"
@@ -15,12 +15,9 @@
 
 lemmas [elim?] = lub.least lub.upper
 
-definition the_lub :: "'a::order set \<Rightarrow> 'a"
+definition the_lub :: "'a::order set \<Rightarrow> 'a"  ("\<Squnion>_" [90] 90)
   where "the_lub A = The (lub A)"
 
-notation (xsymbols)
-  the_lub  ("\<Squnion>_" [90] 90)
-
 lemma the_lub_equality [elim?]:
   assumes "lub A x"
   shows "\<Squnion>A = (x::'a::order)"
@@ -28,7 +25,7 @@
   interpret lub A x by fact
   show ?thesis
   proof (unfold the_lub_def)
-    from `lub A x` show "The (lub A) = x"
+    from \<open>lub A x\<close> show "The (lub A) = x"
     proof
       fix x' assume lub': "lub A x'"
       show "x' = x"
--- a/src/HOL/Hahn_Banach/Function_Norm.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Hahn_Banach/Function_Norm.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -2,15 +2,15 @@
     Author:     Gertrud Bauer, TU Munich
 *)
 
-header {* The norm of a function *}
+header \<open>The norm of a function\<close>
 
 theory Function_Norm
 imports Normed_Space Function_Order
 begin
 
-subsection {* Continuous linear forms*}
+subsection \<open>Continuous linear forms\<close>
 
-text {*
+text \<open>
   A linear form @{text f} on a normed vector space @{text "(V, \<parallel>\<cdot>\<parallel>)"}
   is \emph{continuous}, iff it is bounded, i.e.
   \begin{center}
@@ -19,7 +19,7 @@
   In our application no other functions than linear forms are
   considered, so we can define continuous linear forms as bounded
   linear forms:
-*}
+\<close>
 
 locale continuous = linearform +
   fixes norm :: "_ \<Rightarrow> real"    ("\<parallel>_\<parallel>")
@@ -39,9 +39,9 @@
 qed
 
 
-subsection {* The norm of a linear form *}
+subsection \<open>The norm of a linear form\<close>
 
-text {*
+text \<open>
   The least real number @{text c} for which holds
   \begin{center}
   @{text "\<forall>x \<in> V. \<bar>f x\<bar> \<le> c \<cdot> \<parallel>x\<parallel>"}
@@ -70,7 +70,7 @@
 
   @{text fn_norm} is equal to the supremum of @{text B}, if the
   supremum exists (otherwise it is undefined).
-*}
+\<close>
 
 locale fn_norm =
   fixes norm :: "_ \<Rightarrow> real"    ("\<parallel>_\<parallel>")
@@ -83,34 +83,34 @@
 lemma (in fn_norm) B_not_empty [intro]: "0 \<in> B V f"
   by (simp add: B_def)
 
-text {*
+text \<open>
   The following lemma states that every continuous linear form on a
   normed space @{text "(V, \<parallel>\<cdot>\<parallel>)"} has a function norm.
-*}
+\<close>
 
 lemma (in normed_vectorspace_with_fn_norm) fn_norm_works:
   assumes "continuous V f norm"
   shows "lub (B V f) (\<parallel>f\<parallel>\<hyphen>V)"
 proof -
   interpret continuous V f norm by fact
-  txt {* The existence of the supremum is shown using the
+  txt \<open>The existence of the supremum is shown using the
     completeness of the reals. Completeness means, that every
-    non-empty bounded set of reals has a supremum. *}
+    non-empty bounded set of reals has a supremum.\<close>
   have "\<exists>a. lub (B V f) a"
   proof (rule real_complete)
-    txt {* First we have to show that @{text B} is non-empty: *}
+    txt \<open>First we have to show that @{text B} is non-empty:\<close>
     have "0 \<in> B V f" ..
     then show "\<exists>x. x \<in> B V f" ..
 
-    txt {* Then we have to show that @{text B} is bounded: *}
+    txt \<open>Then we have to show that @{text B} is bounded:\<close>
     show "\<exists>c. \<forall>y \<in> B V f. y \<le> c"
     proof -
-      txt {* We know that @{text f} is bounded by some value @{text c}. *}
+      txt \<open>We know that @{text f} is bounded by some value @{text c}.\<close>
       from bounded obtain c where c: "\<forall>x \<in> V. \<bar>f x\<bar> \<le> c * \<parallel>x\<parallel>" ..
 
-      txt {* To prove the thesis, we have to show that there is some
+      txt \<open>To prove the thesis, we have to show that there is some
         @{text b}, such that @{text "y \<le> b"} for all @{text "y \<in>
-        B"}. Due to the definition of @{text B} there are two cases. *}
+        B"}. Due to the definition of @{text B} there are two cases.\<close>
 
       def b \<equiv> "max c 0"
       have "\<forall>y \<in> B V f. y \<le> b"
@@ -121,16 +121,16 @@
           assume "y = 0"
           then show ?thesis unfolding b_def by arith
         next
-          txt {* The second case is @{text "y = \<bar>f x\<bar> / \<parallel>x\<parallel>"} for some
-            @{text "x \<in> V"} with @{text "x \<noteq> 0"}. *}
+          txt \<open>The second case is @{text "y = \<bar>f x\<bar> / \<parallel>x\<parallel>"} for some
+            @{text "x \<in> V"} with @{text "x \<noteq> 0"}.\<close>
           assume "y \<noteq> 0"
           with y obtain x where y_rep: "y = \<bar>f x\<bar> * inverse \<parallel>x\<parallel>"
               and x: "x \<in> V" and neq: "x \<noteq> 0"
             by (auto simp add: B_def divide_inverse)
           from x neq have gt: "0 < \<parallel>x\<parallel>" ..
 
-          txt {* The thesis follows by a short calculation using the
-            fact that @{text f} is bounded. *}
+          txt \<open>The thesis follows by a short calculation using the
+            fact that @{text f} is bounded.\<close>
 
           note y_rep
           also have "\<bar>f x\<bar> * inverse \<parallel>x\<parallel> \<le> (c * \<parallel>x\<parallel>) * inverse \<parallel>x\<parallel>"
@@ -162,7 +162,7 @@
 proof -
   interpret continuous V f norm by fact
   have "lub (B V f) (\<parallel>f\<parallel>\<hyphen>V)"
-    using `continuous V f norm` by (rule fn_norm_works)
+    using \<open>continuous V f norm\<close> by (rule fn_norm_works)
   from this and b show ?thesis ..
 qed
 
@@ -173,32 +173,32 @@
 proof -
   interpret continuous V f norm by fact
   have "lub (B V f) (\<parallel>f\<parallel>\<hyphen>V)"
-    using `continuous V f norm` by (rule fn_norm_works)
+    using \<open>continuous V f norm\<close> by (rule fn_norm_works)
   from this and b show ?thesis ..
 qed
 
-text {* The norm of a continuous function is always @{text "\<ge> 0"}. *}
+text \<open>The norm of a continuous function is always @{text "\<ge> 0"}.\<close>
 
 lemma (in normed_vectorspace_with_fn_norm) fn_norm_ge_zero [iff]:
   assumes "continuous V f norm"
   shows "0 \<le> \<parallel>f\<parallel>\<hyphen>V"
 proof -
   interpret continuous V f norm by fact
-  txt {* The function norm is defined as the supremum of @{text B}.
+  txt \<open>The function norm is defined as the supremum of @{text B}.
     So it is @{text "\<ge> 0"} if all elements in @{text B} are @{text "\<ge>
-    0"}, provided the supremum exists and @{text B} is not empty. *}
+    0"}, provided the supremum exists and @{text B} is not empty.\<close>
   have "lub (B V f) (\<parallel>f\<parallel>\<hyphen>V)"
-    using `continuous V f norm` by (rule fn_norm_works)
+    using \<open>continuous V f norm\<close> by (rule fn_norm_works)
   moreover have "0 \<in> B V f" ..
   ultimately show ?thesis ..
 qed
 
-text {*
+text \<open>
   \medskip The fundamental property of function norms is:
   \begin{center}
   @{text "\<bar>f x\<bar> \<le> \<parallel>f\<parallel> \<cdot> \<parallel>x\<parallel>"}
   \end{center}
-*}
+\<close>
 
 lemma (in normed_vectorspace_with_fn_norm) fn_norm_le_cong:
   assumes "continuous V f norm" "linearform V f"
@@ -214,7 +214,7 @@
     also have "f 0 = 0" by rule unfold_locales
     also have "\<bar>\<dots>\<bar> = 0" by simp
     also have a: "0 \<le> \<parallel>f\<parallel>\<hyphen>V"
-      using `continuous V f norm` by (rule fn_norm_ge_zero)
+      using \<open>continuous V f norm\<close> by (rule fn_norm_ge_zero)
     from x have "0 \<le> norm x" ..
     with a have "0 \<le> \<parallel>f\<parallel>\<hyphen>V * \<parallel>x\<parallel>" by (simp add: zero_le_mult_iff)
     finally show "\<bar>f x\<bar> \<le> \<parallel>f\<parallel>\<hyphen>V * \<parallel>x\<parallel>" .
@@ -227,20 +227,20 @@
       from x show "0 \<le> \<parallel>x\<parallel>" ..
       from x and neq have "\<bar>f x\<bar> * inverse \<parallel>x\<parallel> \<in> B V f"
         by (auto simp add: B_def divide_inverse)
-      with `continuous V f norm` show "\<bar>f x\<bar> * inverse \<parallel>x\<parallel> \<le> \<parallel>f\<parallel>\<hyphen>V"
+      with \<open>continuous V f norm\<close> show "\<bar>f x\<bar> * inverse \<parallel>x\<parallel> \<le> \<parallel>f\<parallel>\<hyphen>V"
         by (rule fn_norm_ub)
     qed
     finally show ?thesis .
   qed
 qed
 
-text {*
+text \<open>
   \medskip The function norm is the least positive real number for
   which the following inequation holds:
   \begin{center}
     @{text "\<bar>f x\<bar> \<le> c \<cdot> \<parallel>x\<parallel>"}
   \end{center}
-*}
+\<close>
 
 lemma (in normed_vectorspace_with_fn_norm) fn_norm_least [intro?]:
   assumes "continuous V f norm"
@@ -274,7 +274,7 @@
       qed
       finally show ?thesis .
     qed
-  qed (insert `continuous V f norm`, simp_all add: continuous_def)
+  qed (insert \<open>continuous V f norm\<close>, simp_all add: continuous_def)
 qed
 
 end
--- a/src/HOL/Hahn_Banach/Function_Order.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Hahn_Banach/Function_Order.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -2,15 +2,15 @@
     Author:     Gertrud Bauer, TU Munich
 *)
 
-header {* An order on functions *}
+header \<open>An order on functions\<close>
 
 theory Function_Order
 imports Subspace Linearform
 begin
 
-subsection {* The graph of a function *}
+subsection \<open>The graph of a function\<close>
 
-text {*
+text \<open>
   We define the \emph{graph} of a (real) function @{text f} with
   domain @{text F} as the set
   \begin{center}
@@ -19,7 +19,7 @@
   So we are modeling partial functions by specifying the domain and
   the mapping function. We use the term ``function'' also for its
   graph.
-*}
+\<close>
 
 type_synonym 'a graph = "('a \<times> real) set"
 
@@ -38,12 +38,12 @@
   using assms unfolding graph_def by blast
 
 
-subsection {* Functions ordered by domain extension *}
+subsection \<open>Functions ordered by domain extension\<close>
 
-text {*
+text \<open>
   A function @{text h'} is an extension of @{text h}, iff the graph of
   @{text h} is a subset of the graph of @{text h'}.
-*}
+\<close>
 
 lemma graph_extI:
   "(\<And>x. x \<in> H \<Longrightarrow> h x = h' x) \<Longrightarrow> H \<subseteq> H'
@@ -57,12 +57,12 @@
   unfolding graph_def by blast
 
 
-subsection {* Domain and function of a graph *}
+subsection \<open>Domain and function of a graph\<close>
 
-text {*
+text \<open>
   The inverse functions to @{text graph} are @{text domain} and @{text
   funct}.
-*}
+\<close>
 
 definition domain :: "'a graph \<Rightarrow> 'a set"
   where "domain g = {x. \<exists>y. (x, y) \<in> g}"
@@ -70,10 +70,10 @@
 definition funct :: "'a graph \<Rightarrow> ('a \<Rightarrow> real)"
   where "funct g = (\<lambda>x. (SOME y. (x, y) \<in> g))"
 
-text {*
+text \<open>
   The following lemma states that @{text g} is the graph of a function
   if the relation induced by @{text g} is unique.
-*}
+\<close>
 
 lemma graph_domain_funct:
   assumes uniq: "\<And>x y z. (x, y) \<in> g \<Longrightarrow> (x, z) \<in> g \<Longrightarrow> z = y"
@@ -91,18 +91,18 @@
 qed
 
 
-subsection {* Norm-preserving extensions of a function *}
+subsection \<open>Norm-preserving extensions of a function\<close>
 
-text {*
+text \<open>
   Given a linear form @{text f} on the space @{text F} and a seminorm
   @{text p} on @{text E}. The set of all linear extensions of @{text
   f}, to superspaces @{text H} of @{text F}, which are bounded by
   @{text p}, is defined as follows.
-*}
+\<close>
 
 definition
   norm_pres_extensions ::
-    "'a::{plus, minus, uminus, zero} set \<Rightarrow> ('a \<Rightarrow> real) \<Rightarrow> 'a set \<Rightarrow> ('a \<Rightarrow> real)
+    "'a::{plus,minus,uminus,zero} set \<Rightarrow> ('a \<Rightarrow> real) \<Rightarrow> 'a set \<Rightarrow> ('a \<Rightarrow> real)
       \<Rightarrow> 'a graph set"
 where
   "norm_pres_extensions E p F f
--- a/src/HOL/Hahn_Banach/Hahn_Banach.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Hahn_Banach/Hahn_Banach.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -2,20 +2,20 @@
     Author:     Gertrud Bauer, TU Munich
 *)
 
-header {* The Hahn-Banach Theorem *}
+header \<open>The Hahn-Banach Theorem\<close>
 
 theory Hahn_Banach
 imports Hahn_Banach_Lemmas
 begin
 
-text {*
+text \<open>
   We present the proof of two different versions of the Hahn-Banach
   Theorem, closely following @{cite \<open>\S36\<close> "Heuser:1986"}.
-*}
+\<close>
 
-subsection {* The Hahn-Banach Theorem for vector spaces *}
+subsection \<open>The Hahn-Banach Theorem for vector spaces\<close>
 
-text {*
+text \<open>
   \textbf{Hahn-Banach Theorem.} Let @{text F} be a subspace of a real
   vector space @{text E}, let @{text p} be a semi-norm on @{text E},
   and @{text f} be a linear form defined on @{text F} such that @{text
@@ -51,16 +51,16 @@
 
   \end{itemize}
   \end{enumerate}
-*}
+\<close>
 
 theorem Hahn_Banach:
   assumes E: "vectorspace E" and "subspace F E"
     and "seminorm E p" and "linearform F f"
   assumes fp: "\<forall>x \<in> F. f x \<le> p x"
   shows "\<exists>h. linearform E h \<and> (\<forall>x \<in> F. h x = f x) \<and> (\<forall>x \<in> E. h x \<le> p x)"
-    -- {* Let @{text E} be a vector space, @{text F} a subspace of @{text E}, @{text p} a seminorm on @{text E}, *}
-    -- {* and @{text f} a linear form on @{text F} such that @{text f} is bounded by @{text p}, *}
-    -- {* then @{text f} can be extended to a linear form @{text h} on @{text E} in a norm-preserving way. \skp *}
+    -- \<open>Let @{text E} be a vector space, @{text F} a subspace of @{text E}, @{text p} a seminorm on @{text E},\<close>
+    -- \<open>and @{text f} a linear form on @{text F} such that @{text f} is bounded by @{text p},\<close>
+    -- \<open>then @{text f} can be extended to a linear form @{text h} on @{text E} in a norm-preserving way. \skp\<close>
 proof -
   interpret vectorspace E by fact
   interpret subspace F E by fact
@@ -69,12 +69,12 @@
   def M \<equiv> "norm_pres_extensions E p F f"
   then have M: "M = \<dots>" by (simp only:)
   from E have F: "vectorspace F" ..
-  note FE = `F \<unlhd> E`
+  note FE = \<open>F \<unlhd> E\<close>
   {
     fix c assume cM: "c \<in> chains M" and ex: "\<exists>x. x \<in> c"
     have "\<Union>c \<in> M"
-      -- {* Show that every non-empty chain @{text c} of @{text M} has an upper bound in @{text M}: *}
-      -- {* @{text "\<Union>c"} is greater than any element of the chain @{text c}, so it suffices to show @{text "\<Union>c \<in> M"}. *}
+      -- \<open>Show that every non-empty chain @{text c} of @{text M} has an upper bound in @{text M}:\<close>
+      -- \<open>@{text "\<Union>c"} is greater than any element of the chain @{text c}, so it suffices to show @{text "\<Union>c \<in> M"}.\<close>
       unfolding M_def
     proof (rule norm_pres_extensionI)
       let ?H = "domain (\<Union>c)"
@@ -104,9 +104,9 @@
     qed
   }
   then have "\<exists>g \<in> M. \<forall>x \<in> M. g \<subseteq> x \<longrightarrow> x = g"
-  -- {* With Zorn's Lemma we can conclude that there is a maximal element in @{text M}. \skp *}
+  -- \<open>With Zorn's Lemma we can conclude that there is a maximal element in @{text M}. \skp\<close>
   proof (rule Zorn's_Lemma)
-      -- {* We show that @{text M} is non-empty: *}
+      -- \<open>We show that @{text M} is non-empty:\<close>
     show "graph F f \<in> M"
       unfolding M_def
     proof (rule norm_pres_extensionI2)
@@ -125,18 +125,18 @@
     and HE: "H \<unlhd> E" and FH: "F \<unlhd> H"
     and graphs: "graph F f \<subseteq> graph H h"
     and hp: "\<forall>x \<in> H. h x \<le> p x" unfolding M_def ..
-      -- {* @{text g} is a norm-preserving extension of @{text f}, in other words: *}
-      -- {* @{text g} is the graph of some linear form @{text h} defined on a subspace @{text H} of @{text E}, *}
-      -- {* and @{text h} is an extension of @{text f} that is again bounded by @{text p}. \skp *}
+      -- \<open>@{text g} is a norm-preserving extension of @{text f}, in other words:\<close>
+      -- \<open>@{text g} is the graph of some linear form @{text h} defined on a subspace @{text H} of @{text E},\<close>
+      -- \<open>and @{text h} is an extension of @{text f} that is again bounded by @{text p}. \skp\<close>
   from HE E have H: "vectorspace H"
     by (rule subspace.vectorspace)
 
   have HE_eq: "H = E"
-    -- {* We show that @{text h} is defined on whole @{text E} by classical contradiction. \skp *}
+    -- \<open>We show that @{text h} is defined on whole @{text E} by classical contradiction. \skp\<close>
   proof (rule classical)
     assume neq: "H \<noteq> E"
-      -- {* Assume @{text h} is not defined on whole @{text E}. Then show that @{text h} can be extended *}
-      -- {* in a norm-preserving way to a function @{text h'} with the graph @{text g'}. \skp *}
+      -- \<open>Assume @{text h} is not defined on whole @{text E}. Then show that @{text h} can be extended\<close>
+      -- \<open>in a norm-preserving way to a function @{text h'} with the graph @{text g'}. \skp\<close>
     have "\<exists>g' \<in> M. g \<subseteq> g' \<and> g \<noteq> g'"
     proof -
       from HE have "H \<subseteq> E" ..
@@ -147,12 +147,12 @@
         proof
           assume "x' = 0"
           with H have "x' \<in> H" by (simp only: vectorspace.zero)
-          with `x' \<notin> H` show False by contradiction
+          with \<open>x' \<notin> H\<close> show False by contradiction
         qed
       qed
 
       def H' \<equiv> "H + lin x'"
-        -- {* Define @{text H'} as the direct sum of @{text H} and the linear closure of @{text x'}. \skp *}
+        -- \<open>Define @{text H'} as the direct sum of @{text H} and the linear closure of @{text x'}. \skp\<close>
       have HH': "H \<unlhd> H'"
       proof (unfold H'_def)
         from x'E have "vectorspace (lin x')" ..
@@ -162,9 +162,9 @@
       obtain xi where
         xi: "\<forall>y \<in> H. - p (y + x') - h y \<le> xi
           \<and> xi \<le> p (y + x') - h y"
-        -- {* Pick a real number @{text \<xi>} that fulfills certain inequations; this will *}
-        -- {* be used to establish that @{text h'} is a norm-preserving extension of @{text h}.
-           \label{ex-xi-use}\skp *}
+        -- \<open>Pick a real number @{text \<xi>} that fulfills certain inequations; this will\<close>
+        -- \<open>be used to establish that @{text h'} is a norm-preserving extension of @{text h}.
+           \label{ex-xi-use}\skp\<close>
       proof -
         from H have "\<exists>xi. \<forall>y \<in> H. - p (y + x') - h y \<le> xi
             \<and> xi \<le> p (y + x') - h y"
@@ -191,10 +191,10 @@
 
       def h' \<equiv> "\<lambda>x. let (y, a) =
           SOME (y, a). x = y + a \<cdot> x' \<and> y \<in> H in h y + a * xi"
-        -- {* Define the extension @{text h'} of @{text h} to @{text H'} using @{text \<xi>}. \skp *}
+        -- \<open>Define the extension @{text h'} of @{text h} to @{text H'} using @{text \<xi>}. \skp\<close>
 
       have "g \<subseteq> graph H' h' \<and> g \<noteq> graph H' h'"
-        -- {* @{text h'} is an extension of @{text h} \dots \skp *}
+        -- \<open>@{text h'} is an extension of @{text h} \dots \skp\<close>
       proof
         show "g \<subseteq> graph H' h'"
         proof -
@@ -202,7 +202,7 @@
           proof (rule graph_extI)
             fix t assume t: "t \<in> H"
             from E HE t have "(SOME (y, a). t = y + a \<cdot> x' \<and> y \<in> H) = (t, 0)"
-              using `x' \<notin> H` `x' \<in> E` `x' \<noteq> 0` by (rule decomp_H'_H)
+              using \<open>x' \<notin> H\<close> \<open>x' \<in> E\<close> \<open>x' \<noteq> 0\<close> by (rule decomp_H'_H)
             with h'_def show "h t = h' t" by (simp add: Let_def)
           next
             from HH' show "H \<subseteq> H'" ..
@@ -225,18 +225,18 @@
             then have "(x', h' x') \<in> graph H' h'" ..
             with eq have "(x', h' x') \<in> graph H h" by (simp only:)
             then have "x' \<in> H" ..
-            with `x' \<notin> H` show False by contradiction
+            with \<open>x' \<notin> H\<close> show False by contradiction
           qed
           with g_rep show ?thesis by simp
         qed
       qed
       moreover have "graph H' h' \<in> M"
-        -- {* and @{text h'} is norm-preserving. \skp *}
+        -- \<open>and @{text h'} is norm-preserving. \skp\<close>
       proof (unfold M_def)
         show "graph H' h' \<in> norm_pres_extensions E p F f"
         proof (rule norm_pres_extensionI2)
           show "linearform H' h'"
-            using h'_def H'_def HE linearform `x' \<notin> H` `x' \<in> E` `x' \<noteq> 0` E
+            using h'_def H'_def HE linearform \<open>x' \<notin> H\<close> \<open>x' \<in> E\<close> \<open>x' \<noteq> 0\<close> E
             by (rule h'_lf)
           show "H' \<unlhd> E"
           unfolding H'_def
@@ -245,7 +245,7 @@
             show "vectorspace E" by fact
             from x'E show "lin x' \<unlhd> E" ..
           qed
-          from H `F \<unlhd> H` HH' show FH': "F \<unlhd> H'"
+          from H \<open>F \<unlhd> H\<close> HH' show FH': "F \<unlhd> H'"
             by (rule vectorspace.subspace_trans)
           show "graph F f \<subseteq> graph H' h'"
           proof (rule graph_extI)
@@ -271,15 +271,15 @@
             from FH' show "F \<subseteq> H'" ..
           qed
           show "\<forall>x \<in> H'. h' x \<le> p x"
-            using h'_def H'_def `x' \<notin> H` `x' \<in> E` `x' \<noteq> 0` E HE
-              `seminorm E p` linearform and hp xi
+            using h'_def H'_def \<open>x' \<notin> H\<close> \<open>x' \<in> E\<close> \<open>x' \<noteq> 0\<close> E HE
+              \<open>seminorm E p\<close> linearform and hp xi
             by (rule h'_norm_pres)
         qed
       qed
       ultimately show ?thesis ..
     qed
     then have "\<not> (\<forall>x \<in> M. g \<subseteq> x \<longrightarrow> g = x)" by simp
-      -- {* So the graph @{text g} of @{text h} cannot be maximal. Contradiction! \skp *}
+      -- \<open>So the graph @{text g} of @{text h} cannot be maximal. Contradiction! \skp\<close>
     with gx show "H = E" by contradiction
   qed
 
@@ -297,9 +297,9 @@
 qed
 
 
-subsection  {* Alternative formulation *}
+subsection  \<open>Alternative formulation\<close>
 
-text {*
+text \<open>
   The following alternative formulation of the Hahn-Banach
   Theorem\label{abs-Hahn-Banach} uses the fact that for a real linear
   form @{text f} and a seminorm @{text p} the following inequations
@@ -311,7 +311,7 @@
   @{text "\<forall>x \<in> H. h x \<le> p x"} \\
   \end{tabular}
   \end{center}
-*}
+\<close>
 
 theorem abs_Hahn_Banach:
   assumes E: "vectorspace E" and FE: "subspace F E"
@@ -342,13 +342,13 @@
 qed
 
 
-subsection {* The Hahn-Banach Theorem for normed spaces *}
+subsection \<open>The Hahn-Banach Theorem for normed spaces\<close>
 
-text {*
+text \<open>
   Every continuous linear form @{text f} on a subspace @{text F} of a
   norm space @{text E}, can be extended to a continuous linear form
   @{text g} on @{text E} such that @{text "\<parallel>f\<parallel> = \<parallel>g\<parallel>"}.
-*}
+\<close>
 
 theorem norm_Hahn_Banach:
   fixes V and norm ("\<parallel>_\<parallel>")
@@ -375,23 +375,23 @@
   have ge_zero: "0 \<le> \<parallel>f\<parallel>\<hyphen>F"
     by (rule normed_vectorspace_with_fn_norm.fn_norm_ge_zero
       [OF normed_vectorspace_with_fn_norm.intro,
-       OF F_norm `continuous F f norm` , folded B_def fn_norm_def])
-  txt {* We define a function @{text p} on @{text E} as follows:
-    @{text "p x = \<parallel>f\<parallel> \<cdot> \<parallel>x\<parallel>"} *}
+       OF F_norm \<open>continuous F f norm\<close> , folded B_def fn_norm_def])
+  txt \<open>We define a function @{text p} on @{text E} as follows:
+    @{text "p x = \<parallel>f\<parallel> \<cdot> \<parallel>x\<parallel>"}\<close>
   def p \<equiv> "\<lambda>x. \<parallel>f\<parallel>\<hyphen>F * \<parallel>x\<parallel>"
 
-  txt {* @{text p} is a seminorm on @{text E}: *}
+  txt \<open>@{text p} is a seminorm on @{text E}:\<close>
   have q: "seminorm E p"
   proof
     fix x y a assume x: "x \<in> E" and y: "y \<in> E"
     
-    txt {* @{text p} is positive definite: *}
+    txt \<open>@{text p} is positive definite:\<close>
     have "0 \<le> \<parallel>f\<parallel>\<hyphen>F" by (rule ge_zero)
     moreover from x have "0 \<le> \<parallel>x\<parallel>" ..
     ultimately show "0 \<le> p x"  
       by (simp add: p_def zero_le_mult_iff)
 
-    txt {* @{text p} is absolutely homogeneous: *}
+    txt \<open>@{text p} is absolutely homogeneous:\<close>
 
     show "p (a \<cdot> x) = \<bar>a\<bar> * p x"
     proof -
@@ -402,7 +402,7 @@
       finally show ?thesis .
     qed
 
-    txt {* Furthermore, @{text p} is subadditive: *}
+    txt \<open>Furthermore, @{text p} is subadditive:\<close>
 
     show "p (x + y) \<le> p x + p y"
     proof -
@@ -417,22 +417,22 @@
     qed
   qed
 
-  txt {* @{text f} is bounded by @{text p}. *}
+  txt \<open>@{text f} is bounded by @{text p}.\<close>
 
   have "\<forall>x \<in> F. \<bar>f x\<bar> \<le> p x"
   proof
     fix x assume "x \<in> F"
-    with `continuous F f norm` and linearform
+    with \<open>continuous F f norm\<close> and linearform
     show "\<bar>f x\<bar> \<le> p x"
       unfolding p_def by (rule normed_vectorspace_with_fn_norm.fn_norm_le_cong
         [OF normed_vectorspace_with_fn_norm.intro,
          OF F_norm, folded B_def fn_norm_def])
   qed
 
-  txt {* Using the fact that @{text p} is a seminorm and @{text f} is bounded
+  txt \<open>Using the fact that @{text p} is a seminorm and @{text f} is bounded
     by @{text p} we can apply the Hahn-Banach Theorem for real vector
     spaces. So @{text f} can be extended in a norm-preserving way to
-    some function @{text g} on the whole vector space @{text E}. *}
+    some function @{text g} on the whole vector space @{text E}.\<close>
 
   with E FE linearform q obtain g where
       linearformE: "linearform E g"
@@ -440,7 +440,7 @@
     and b: "\<forall>x \<in> E. \<bar>g x\<bar> \<le> p x"
     by (rule abs_Hahn_Banach [elim_format]) iprover
 
-  txt {* We furthermore have to show that @{text g} is also continuous: *}
+  txt \<open>We furthermore have to show that @{text g} is also continuous:\<close>
 
   have g_cont: "continuous E g norm" using linearformE
   proof
@@ -449,11 +449,11 @@
       by (simp only: p_def)
   qed
 
-  txt {* To complete the proof, we show that @{text "\<parallel>g\<parallel> = \<parallel>f\<parallel>"}. *}
+  txt \<open>To complete the proof, we show that @{text "\<parallel>g\<parallel> = \<parallel>f\<parallel>"}.\<close>
 
   have "\<parallel>g\<parallel>\<hyphen>E = \<parallel>f\<parallel>\<hyphen>F"
   proof (rule order_antisym)
-    txt {*
+    txt \<open>
       First we show @{text "\<parallel>g\<parallel> \<le> \<parallel>f\<parallel>"}.  The function norm @{text
       "\<parallel>g\<parallel>"} is defined as the smallest @{text "c \<in> \<real>"} such that
       \begin{center}
@@ -467,7 +467,7 @@
       @{text "\<forall>x \<in> E. \<bar>g x\<bar> \<le> \<parallel>f\<parallel> \<cdot> \<parallel>x\<parallel>"}
       \end{tabular}
       \end{center}
-    *}
+\<close>
 
     from g_cont _ ge_zero
     show "\<parallel>g\<parallel>\<hyphen>E \<le> \<parallel>f\<parallel>\<hyphen>F"
@@ -477,7 +477,7 @@
         by (simp only: p_def)
     qed
 
-    txt {* The other direction is achieved by a similar argument. *}
+    txt \<open>The other direction is achieved by a similar argument.\<close>
 
     show "\<parallel>f\<parallel>\<hyphen>F \<le> \<parallel>g\<parallel>\<hyphen>E"
     proof (rule normed_vectorspace_with_fn_norm.fn_norm_least
--- a/src/HOL/Hahn_Banach/Hahn_Banach_Ext_Lemmas.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Hahn_Banach/Hahn_Banach_Ext_Lemmas.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -2,13 +2,13 @@
     Author:     Gertrud Bauer, TU Munich
 *)
 
-header {* Extending non-maximal functions *}
+header \<open>Extending non-maximal functions\<close>
 
 theory Hahn_Banach_Ext_Lemmas
 imports Function_Norm
 begin
 
-text {*
+text \<open>
   In this section the following context is presumed.  Let @{text E} be
   a real vector space with a seminorm @{text q} on @{text E}. @{text
   F} is a subspace of @{text E} and @{text f} a linear function on
@@ -38,7 +38,7 @@
   @{text "\<forall>u \<in> F. \<forall>v \<in> F. a u \<le> b v"}
   \end{tabular}
   \end{center}
-*}
+\<close>
 
 lemma ex_xi:
   assumes "vectorspace F"
@@ -46,9 +46,9 @@
   shows "\<exists>xi::real. \<forall>y \<in> F. a y \<le> xi \<and> xi \<le> b y"
 proof -
   interpret vectorspace F by fact
-  txt {* From the completeness of the reals follows:
+  txt \<open>From the completeness of the reals follows:
     The set @{text "S = {a u. u \<in> F}"} has a supremum, if it is
-    non-empty and has an upper bound. *}
+    non-empty and has an upper bound.\<close>
 
   let ?S = "{a u | u. u \<in> F}"
   have "\<exists>xi. lub ?S xi"
@@ -81,11 +81,11 @@
   } ultimately show "\<exists>xi. \<forall>y \<in> F. a y \<le> xi \<and> xi \<le> b y" by blast
 qed
 
-text {*
+text \<open>
   \medskip The function @{text h'} is defined as a @{text "h' x = h y
   + a \<cdot> \<xi>"} where @{text "x = y + a \<cdot> \<xi>"} is a linear extension of
   @{text h} to @{text H'}.
-*}
+\<close>
 
 lemma h'_lf:
   assumes h'_def: "h' \<equiv> \<lambda>x. let (y, a) =
@@ -101,10 +101,10 @@
   interpret vectorspace E by fact
   show ?thesis
   proof
-    note E = `vectorspace E`
+    note E = \<open>vectorspace E\<close>
     have H': "vectorspace H'"
     proof (unfold H'_def)
-      from `x0 \<in> E`
+      from \<open>x0 \<in> E\<close>
       have "lin x0 \<unlhd> E" ..
       with HE show "vectorspace (H + lin x0)" using E ..
     qed
@@ -121,7 +121,7 @@
           unfolding H'_def sum_def lin_def by blast
         
         have ya: "y1 + y2 = y \<and> a1 + a2 = a" using E HE _ y x0
-        proof (rule decomp_H') txt_raw {* \label{decomp-H-use} *}
+        proof (rule decomp_H') txt_raw \<open>\label{decomp-H-use}\<close>
           from HE y1 y2 show "y1 + y2 \<in> H"
             by (rule subspace.add_closed)
           from x0 and HE y y1 y2
@@ -188,8 +188,8 @@
   qed
 qed
 
-text {* \medskip The linear extension @{text h'} of @{text h}
-  is bounded by the seminorm @{text p}. *}
+text \<open>\medskip The linear extension @{text h'} of @{text h}
+  is bounded by the seminorm @{text p}.\<close>
 
 lemma h'_norm_pres:
   assumes h'_def: "h' \<equiv> \<lambda>x. let (y, a) =
@@ -229,8 +229,8 @@
         also from x0 y' z have "p y = p (y + a \<cdot> x0)" by simp
         finally show ?thesis .
       next
-        txt {* In the case @{text "a < 0"}, we use @{text "a\<^sub>1"}
-          with @{text ya} taken as @{text "y / a"}: *}
+        txt \<open>In the case @{text "a < 0"}, we use @{text "a\<^sub>1"}
+          with @{text ya} taken as @{text "y / a"}:\<close>
         assume lz: "a < 0" then have nz: "a \<noteq> 0" by simp
         from a1 ay
         have "- p (inverse a \<cdot> y + x0) - h (inverse a \<cdot> y) \<le> xi" ..
@@ -251,8 +251,8 @@
         finally have "a * xi \<le> p (y + a \<cdot> x0) - h y" .
         then show ?thesis by simp
       next
-        txt {* In the case @{text "a > 0"}, we use @{text "a\<^sub>2"}
-          with @{text ya} taken as @{text "y / a"}: *}
+        txt \<open>In the case @{text "a > 0"}, we use @{text "a\<^sub>2"}
+          with @{text ya} taken as @{text "y / a"}:\<close>
         assume gz: "0 < a" then have nz: "a \<noteq> 0" by simp
         from a2 ay
         have "xi \<le> p (inverse a \<cdot> y + x0) - h (inverse a \<cdot> y)" ..
--- a/src/HOL/Hahn_Banach/Hahn_Banach_Sup_Lemmas.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Hahn_Banach/Hahn_Banach_Sup_Lemmas.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -2,13 +2,13 @@
     Author:     Gertrud Bauer, TU Munich
 *)
 
-header {* The supremum w.r.t.~the function order *}
+header \<open>The supremum w.r.t.~the function order\<close>
 
 theory Hahn_Banach_Sup_Lemmas
 imports Function_Norm Zorn_Lemma
 begin
 
-text {*
+text \<open>
   This section contains some lemmas that will be used in the proof of
   the Hahn-Banach Theorem.  In this section the following context is
   presumed.  Let @{text E} be a real vector space with a seminorm
@@ -22,7 +22,7 @@
   the function @{text f} and let @{text "graph H h"} be the supremum
   of @{text c}.  Every element in @{text H} is member of one of the
   elements of the chain.
-*}
+\<close>
 lemmas [dest?] = chainsD
 lemmas chainsE2 [elim?] = chainsD2 [elim_format]
 
@@ -53,13 +53,13 @@
   ultimately show ?thesis using * by blast
 qed
 
-text {*
+text \<open>
   \medskip Let @{text c} be a chain of norm-preserving extensions of
   the function @{text f} and let @{text "graph H h"} be the supremum
   of @{text c}.  Every element in the domain @{text H} of the supremum
   function is member of the domain @{text H'} of some function @{text
   h'}, such that @{text h} extends @{text h'}.
-*}
+\<close>
 
 lemma some_H'h':
   assumes M: "M = norm_pres_extensions E p F f"
@@ -81,12 +81,12 @@
   ultimately show ?thesis using * by blast
 qed
 
-text {*
+text \<open>
   \medskip Any two elements @{text x} and @{text y} in the domain
   @{text H} of the supremum function @{text h} are both in the domain
   @{text H'} of some function @{text h'}, such that @{text h} extends
   @{text h'}.
-*}
+\<close>
 
 lemma some_H'h'2:
   assumes M: "M = norm_pres_extensions E p F f"
@@ -99,8 +99,8 @@
     \<and> linearform H' h' \<and> H' \<unlhd> E \<and> F \<unlhd> H'
     \<and> graph F f \<subseteq> graph H' h' \<and> (\<forall>x \<in> H'. h' x \<le> p x)"
 proof -
-  txt {* @{text y} is in the domain @{text H''} of some function @{text h''},
-  such that @{text h} extends @{text h''}. *}
+  txt \<open>@{text y} is in the domain @{text H''} of some function @{text h''},
+  such that @{text h} extends @{text h''}.\<close>
 
   from M cM u and y obtain H' h' where
       y_hy: "(y, h y) \<in> graph H' h'"
@@ -110,8 +110,8 @@
       "graph F f \<subseteq> graph H' h'"  "\<forall>x \<in> H'. h' x \<le> p x"
     by (rule some_H'h't [elim_format]) blast
 
-  txt {* @{text x} is in the domain @{text H'} of some function @{text h'},
-    such that @{text h} extends @{text h'}. *}
+  txt \<open>@{text x} is in the domain @{text H'} of some function @{text h'},
+    such that @{text h} extends @{text h'}.\<close>
 
   from M cM u and x obtain H'' h'' where
       x_hx: "(x, h x) \<in> graph H'' h''"
@@ -121,10 +121,10 @@
       "graph F f \<subseteq> graph H'' h''"  "\<forall>x \<in> H''. h'' x \<le> p x"
     by (rule some_H'h't [elim_format]) blast
 
-  txt {* Since both @{text h'} and @{text h''} are elements of the chain,
+  txt \<open>Since both @{text h'} and @{text h''} are elements of the chain,
     @{text h''} is an extension of @{text h'} or vice versa. Thus both
     @{text x} and @{text y} are contained in the greater
-    one. \label{cases1}*}
+    one. \label{cases1}\<close>
 
   from cM c'' c' have "graph H'' h'' \<subseteq> graph H' h' \<or> graph H' h' \<subseteq> graph H'' h''"
     (is "?case1 \<or> ?case2") ..
@@ -151,10 +151,10 @@
   qed
 qed
 
-text {*
+text \<open>
   \medskip The relation induced by the graph of the supremum of a
   chain @{text c} is definite, i.~e.~t is the graph of a function.
-*}
+\<close>
 
 lemma sup_definite:
   assumes M_def: "M \<equiv> norm_pres_extensions E p F f"
@@ -175,9 +175,9 @@
   then obtain H2 h2 where G2_rep: "G2 = graph H2 h2"
     unfolding M_def by blast
 
-  txt {* @{text "G\<^sub>1"} is contained in @{text "G\<^sub>2"}
+  txt \<open>@{text "G\<^sub>1"} is contained in @{text "G\<^sub>2"}
     or vice versa, since both @{text "G\<^sub>1"} and @{text
-    "G\<^sub>2"} are members of @{text c}. \label{cases2}*}
+    "G\<^sub>2"} are members of @{text c}. \label{cases2}\<close>
 
   from cM G1 G2 have "G1 \<subseteq> G2 \<or> G2 \<subseteq> G1" (is "?case1 \<or> ?case2") ..
   then show ?thesis
@@ -200,14 +200,14 @@
   qed
 qed
 
-text {*
+text \<open>
   \medskip The limit function @{text h} is linear. Every element
   @{text x} in the domain of @{text h} is in the domain of a function
   @{text h'} in the chain of norm preserving extensions.  Furthermore,
   @{text h} is an extension of @{text h'} so the function values of
   @{text x} are identical for @{text h'} and @{text h}.  Finally, the
   function @{text h'} is linear by construction of @{text M}.
-*}
+\<close>
 
 lemma sup_lf:
   assumes M: "M = norm_pres_extensions E p F f"
@@ -255,12 +255,12 @@
   qed
 qed
 
-text {*
+text \<open>
   \medskip The limit of a non-empty chain of norm preserving
   extensions of @{text f} is an extension of @{text f}, since every
   element of the chain is an extension of @{text f} and the supremum
   is an extension for every element of the chain.
-*}
+\<close>
 
 lemma sup_ext:
   assumes graph: "graph H h = \<Union>c"
@@ -281,12 +281,12 @@
   finally show ?thesis .
 qed
 
-text {*
+text \<open>
   \medskip The domain @{text H} of the limit function is a superspace
   of @{text F}, since @{text F} is a subset of @{text H}. The
   existence of the @{text 0} element in @{text F} and the closure
   properties follow from the fact that @{text F} is a vector space.
-*}
+\<close>
 
 lemma sup_supF:
   assumes graph: "graph H h = \<Union>c"
@@ -306,10 +306,10 @@
   with FE show "a \<cdot> x \<in> F" by (rule subspace.mult_closed)
 qed
 
-text {*
+text \<open>
   \medskip The domain @{text H} of the limit function is a subspace of
   @{text E}.
-*}
+\<close>
 
 lemma sup_subE:
   assumes graph: "graph H h = \<Union>c"
@@ -362,10 +362,10 @@
   qed
 qed
 
-text {*
+text \<open>
   \medskip The limit function is bounded by the norm @{text p} as
   well, since all elements in the chain are bounded by @{text p}.
-*}
+\<close>
 
 lemma sup_norm_pres:
   assumes graph: "graph H h = \<Union>c"
@@ -383,7 +383,7 @@
   finally show "h x \<le> p x" .
 qed
 
-text {*
+text \<open>
   \medskip The following lemma is a property of linear forms on real
   vector spaces. It will be used for the lemma @{text abs_Hahn_Banach}
   (see page \pageref{abs-Hahn_Banach}). \label{abs-ineq-iff} For real
@@ -394,7 +394,7 @@
   @{text "\<forall>x \<in> H. h x \<le> p x"} \\
   \end{tabular}
   \end{center}
-*}
+\<close>
 
 lemma abs_ineq_iff:
   assumes "subspace H E" and "vectorspace E" and "seminorm E p"
@@ -405,7 +405,7 @@
   interpret vectorspace E by fact
   interpret seminorm E p by fact
   interpret linearform H h by fact
-  have H: "vectorspace H" using `vectorspace E` ..
+  have H: "vectorspace H" using \<open>vectorspace E\<close> ..
   {
     assume l: ?L
     show ?R
@@ -422,13 +422,13 @@
       fix x assume x: "x \<in> H"
       show "\<And>a b :: real. - a \<le> b \<Longrightarrow> b \<le> a \<Longrightarrow> \<bar>b\<bar> \<le> a"
         by arith
-      from `linearform H h` and H x
+      from \<open>linearform H h\<close> and H x
       have "- h x = h (- x)" by (rule linearform.neg [symmetric])
       also
       from H x have "- x \<in> H" by (rule vectorspace.neg_closed)
       with r have "h (- x) \<le> p (- x)" ..
       also have "\<dots> = p x"
-        using `seminorm E p` `vectorspace E`
+        using \<open>seminorm E p\<close> \<open>vectorspace E\<close>
       proof (rule seminorm.minus)
         from x show "x \<in> E" ..
       qed
--- a/src/HOL/Hahn_Banach/Linearform.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Hahn_Banach/Linearform.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -2,16 +2,16 @@
     Author:     Gertrud Bauer, TU Munich
 *)
 
-header {* Linearforms *}
+header \<open>Linearforms\<close>
 
 theory Linearform
 imports Vector_Space
 begin
 
-text {*
+text \<open>
   A \emph{linear form} is a function on a vector space into the reals
   that is additive and multiplicative.
-*}
+\<close>
 
 locale linearform =
   fixes V :: "'a\<Colon>{minus, plus, zero, uminus} set" and f
@@ -40,11 +40,11 @@
   assume x: "x \<in> V" and y: "y \<in> V"
   then have "x - y = x + - y" by (rule diff_eq1)
   also have "f \<dots> = f x + f (- y)" by (rule add) (simp_all add: x y)
-  also have "f (- y) = - f y" using `vectorspace V` y by (rule neg)
+  also have "f (- y) = - f y" using \<open>vectorspace V\<close> y by (rule neg)
   finally show ?thesis by simp
 qed
 
-text {* Every linear form yields @{text 0} for the @{text 0} vector. *}
+text \<open>Every linear form yields @{text 0} for the @{text 0} vector.\<close>
 
 lemma (in linearform) zero [iff]:
   assumes "vectorspace V"
@@ -52,7 +52,7 @@
 proof -
   interpret vectorspace V by fact
   have "f 0 = f (0 - 0)" by simp
-  also have "\<dots> = f 0 - f 0" using `vectorspace V` by (rule diff) simp_all
+  also have "\<dots> = f 0 - f 0" using \<open>vectorspace V\<close> by (rule diff) simp_all
   also have "\<dots> = 0" by simp
   finally show ?thesis .
 qed
--- a/src/HOL/Hahn_Banach/Normed_Space.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Hahn_Banach/Normed_Space.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -2,19 +2,19 @@
     Author:     Gertrud Bauer, TU Munich
 *)
 
-header {* Normed vector spaces *}
+header \<open>Normed vector spaces\<close>
 
 theory Normed_Space
 imports Subspace
 begin
 
-subsection {* Quasinorms *}
+subsection \<open>Quasinorms\<close>
 
-text {*
+text \<open>
   A \emph{seminorm} @{text "\<parallel>\<cdot>\<parallel>"} is a function on a real vector space
   into the reals that has the following properties: it is positive
   definite, absolute homogeneous and subadditive.
-*}
+\<close>
 
 locale seminorm =
   fixes V :: "'a\<Colon>{minus, plus, zero, uminus} set"
@@ -54,23 +54,23 @@
 qed
 
 
-subsection {* Norms *}
+subsection \<open>Norms\<close>
 
-text {*
+text \<open>
   A \emph{norm} @{text "\<parallel>\<cdot>\<parallel>"} is a seminorm that maps only the
   @{text 0} vector to @{text 0}.
-*}
+\<close>
 
 locale norm = seminorm +
   assumes zero_iff [iff]: "x \<in> V \<Longrightarrow> (\<parallel>x\<parallel> = 0) = (x = 0)"
 
 
-subsection {* Normed vector spaces *}
+subsection \<open>Normed vector spaces\<close>
 
-text {*
+text \<open>
   A vector space together with a norm is called a \emph{normed
   space}.
-*}
+\<close>
 
 locale normed_vectorspace = vectorspace + norm
 
@@ -90,9 +90,9 @@
   finally show ?thesis .
 qed
 
-text {*
+text \<open>
   Any subspace of a normed vector space is again a normed vectorspace.
-*}
+\<close>
 
 lemma subspace_normed_vs [intro?]:
   fixes F E norm
--- a/src/HOL/Hahn_Banach/Subspace.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Hahn_Banach/Subspace.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -2,19 +2,19 @@
     Author:     Gertrud Bauer, TU Munich
 *)
 
-header {* Subspaces *}
+header \<open>Subspaces\<close>
 
 theory Subspace
 imports Vector_Space "~~/src/HOL/Library/Set_Algebras"
 begin
 
-subsection {* Definition *}
+subsection \<open>Definition\<close>
 
-text {*
+text \<open>
   A non-empty subset @{text U} of a vector space @{text V} is a
   \emph{subspace} of @{text V}, iff @{text U} is closed under addition
   and scalar multiplication.
-*}
+\<close>
 
 locale subspace =
   fixes U :: "'a\<Colon>{minus, plus, zero, uminus} set" and V
@@ -49,11 +49,11 @@
   from x y show ?thesis by (simp add: diff_eq1 negate_eq1)
 qed
 
-text {*
+text \<open>
   \medskip Similar as for linear spaces, the existence of the zero
   element in every subspace follows from the non-emptiness of the
   carrier set and by vector space laws.
-*}
+\<close>
 
 lemma (in subspace) zero [intro]:
   assumes "vectorspace V"
@@ -63,7 +63,7 @@
   have "U \<noteq> {}" by (rule non_empty)
   then obtain x where x: "x \<in> U" by blast
   then have "x \<in> V" .. then have "0 = x - x" by simp
-  also from `vectorspace V` x x have "\<dots> \<in> U" by (rule diff_closed)
+  also from \<open>vectorspace V\<close> x x have "\<dots> \<in> U" by (rule diff_closed)
   finally show ?thesis .
 qed
 
@@ -76,7 +76,7 @@
   from x show ?thesis by (simp add: negate_eq1)
 qed
 
-text {* \medskip Further derived laws: every subspace is a vector space. *}
+text \<open>\medskip Further derived laws: every subspace is a vector space.\<close>
 
 lemma (in subspace) vectorspace [iff]:
   assumes "vectorspace V"
@@ -104,7 +104,7 @@
 qed
 
 
-text {* The subspace relation is reflexive. *}
+text \<open>The subspace relation is reflexive.\<close>
 
 lemma (in vectorspace) subspace_refl [intro]: "V \<unlhd> V"
 proof
@@ -117,7 +117,7 @@
   from x show "a \<cdot> x \<in> V" by simp
 qed
 
-text {* The subspace relation is transitive. *}
+text \<open>The subspace relation is transitive.\<close>
 
 lemma (in vectorspace) subspace_trans [trans]:
   "U \<unlhd> V \<Longrightarrow> V \<unlhd> W \<Longrightarrow> U \<unlhd> W"
@@ -136,14 +136,14 @@
 qed
 
 
-subsection {* Linear closure *}
+subsection \<open>Linear closure\<close>
 
-text {*
+text \<open>
   The \emph{linear closure} of a vector @{text x} is the set of all
   scalar multiples of @{text x}.
-*}
+\<close>
 
-definition lin :: "('a::{minus, plus, zero}) \<Rightarrow> 'a set"
+definition lin :: "('a::{minus,plus,zero}) \<Rightarrow> 'a set"
   where "lin x = {a \<cdot> x | a. True}"
 
 lemma linI [intro]: "y = a \<cdot> x \<Longrightarrow> y \<in> lin x"
@@ -156,7 +156,7 @@
   unfolding lin_def by blast
 
 
-text {* Every vector is contained in its linear closure. *}
+text \<open>Every vector is contained in its linear closure.\<close>
 
 lemma (in vectorspace) x_lin_x [iff]: "x \<in> V \<Longrightarrow> x \<in> lin x"
 proof -
@@ -172,7 +172,7 @@
   then show "0 = 0 \<cdot> x" by simp
 qed
 
-text {* Any linear closure is a subspace. *}
+text \<open>Any linear closure is a subspace.\<close>
 
 lemma (in vectorspace) lin_subspace [intro]:
   assumes x: "x \<in> V"
@@ -208,25 +208,25 @@
 qed
 
 
-text {* Any linear closure is a vector space. *}
+text \<open>Any linear closure is a vector space.\<close>
 
 lemma (in vectorspace) lin_vectorspace [intro]:
   assumes "x \<in> V"
   shows "vectorspace (lin x)"
 proof -
-  from `x \<in> V` have "subspace (lin x) V"
+  from \<open>x \<in> V\<close> have "subspace (lin x) V"
     by (rule lin_subspace)
   from this and vectorspace_axioms show ?thesis
     by (rule subspace.vectorspace)
 qed
 
 
-subsection {* Sum of two vectorspaces *}
+subsection \<open>Sum of two vectorspaces\<close>
 
-text {*
+text \<open>
   The \emph{sum} of two vectorspaces @{text U} and @{text V} is the
   set of all sums of elements from @{text U} and @{text V}.
-*}
+\<close>
 
 lemma sum_def: "U + V = {u + v | u v. u \<in> U \<and> v \<in> V}"
   unfolding set_plus_def by auto
@@ -243,7 +243,7 @@
     "u \<in> U \<Longrightarrow> v \<in> V \<Longrightarrow> u + v \<in> U + V"
   unfolding sum_def by blast
 
-text {* @{text U} is a subspace of @{text "U + V"}. *}
+text \<open>@{text U} is a subspace of @{text "U + V"}.\<close>
 
 lemma subspace_sum1 [iff]:
   assumes "vectorspace U" "vectorspace V"
@@ -267,7 +267,7 @@
   qed
 qed
 
-text {* The sum of two subspaces is again a subspace. *}
+text \<open>The sum of two subspaces is again a subspace.\<close>
 
 lemma sum_subspace [intro?]:
   assumes "subspace U E" "vectorspace E" "subspace V E"
@@ -280,8 +280,8 @@
   proof
     have "0 \<in> U + V"
     proof
-      show "0 \<in> U" using `vectorspace E` ..
-      show "0 \<in> V" using `vectorspace E` ..
+      show "0 \<in> U" using \<open>vectorspace E\<close> ..
+      show "0 \<in> V" using \<open>vectorspace E\<close> ..
       show "(0::'a) = 0 + 0" by simp
     qed
     then show "U + V \<noteq> {}" by blast
@@ -316,22 +316,22 @@
   qed
 qed
 
-text{* The sum of two subspaces is a vectorspace. *}
+text\<open>The sum of two subspaces is a vectorspace.\<close>
 
 lemma sum_vs [intro?]:
     "U \<unlhd> E \<Longrightarrow> V \<unlhd> E \<Longrightarrow> vectorspace E \<Longrightarrow> vectorspace (U + V)"
   by (rule subspace.vectorspace) (rule sum_subspace)
 
 
-subsection {* Direct sums *}
+subsection \<open>Direct sums\<close>
 
-text {*
+text \<open>
   The sum of @{text U} and @{text V} is called \emph{direct}, iff the
   zero element is the only common element of @{text U} and @{text
   V}. For every element @{text x} of the direct sum of @{text U} and
   @{text V} the decomposition in @{text "x = u + v"} with
   @{text "u \<in> U"} and @{text "v \<in> V"} is unique.
-*}
+\<close>
 
 lemma decomp:
   assumes "vectorspace E" "subspace U E" "subspace V E"
@@ -347,9 +347,9 @@
   show ?thesis
   proof
     have U: "vectorspace U"  (* FIXME: use interpret *)
-      using `subspace U E` `vectorspace E` by (rule subspace.vectorspace)
+      using \<open>subspace U E\<close> \<open>vectorspace E\<close> by (rule subspace.vectorspace)
     have V: "vectorspace V"
-      using `subspace V E` `vectorspace E` by (rule subspace.vectorspace)
+      using \<open>subspace V E\<close> \<open>vectorspace E\<close> by (rule subspace.vectorspace)
     from u1 u2 v1 v2 and sum have eq: "u1 - u2 = v2 - v1"
       by (simp add: add_diff_swap)
     from u1 u2 have u: "u1 - u2 \<in> U"
@@ -374,14 +374,14 @@
   qed
 qed
 
-text {*
+text \<open>
   An application of the previous lemma will be used in the proof of
   the Hahn-Banach Theorem (see page \pageref{decomp-H-use}): for any
   element @{text "y + a \<cdot> x\<^sub>0"} of the direct sum of a
   vectorspace @{text H} and the linear closure of @{text "x\<^sub>0"}
   the components @{text "y \<in> H"} and @{text a} are uniquely
   determined.
-*}
+\<close>
 
 lemma decomp_H':
   assumes "vectorspace E" "subspace H E"
@@ -414,31 +414,31 @@
             from x have "x \<in> H" ..
             with xx' have "inverse a \<cdot> a \<cdot> x' \<in> H" by simp
             with a and x' have "x' \<in> H" by (simp add: mult_assoc2)
-            with `x' \<notin> H` show ?thesis by contradiction
+            with \<open>x' \<notin> H\<close> show ?thesis by contradiction
           qed
           then show "x \<in> {0}" ..
         qed
         show "{0} \<subseteq> H \<inter> lin x'"
         proof -
-          have "0 \<in> H" using `vectorspace E` ..
-          moreover have "0 \<in> lin x'" using `x' \<in> E` ..
+          have "0 \<in> H" using \<open>vectorspace E\<close> ..
+          moreover have "0 \<in> lin x'" using \<open>x' \<in> E\<close> ..
           ultimately show ?thesis by blast
         qed
       qed
-      show "lin x' \<unlhd> E" using `x' \<in> E` ..
-    qed (rule `vectorspace E`, rule `subspace H E`, rule y1, rule y2, rule eq)
+      show "lin x' \<unlhd> E" using \<open>x' \<in> E\<close> ..
+    qed (rule \<open>vectorspace E\<close>, rule \<open>subspace H E\<close>, rule y1, rule y2, rule eq)
     then show "y1 = y2" ..
     from c have "a1 \<cdot> x' = a2 \<cdot> x'" ..
     with x' show "a1 = a2" by (simp add: mult_right_cancel)
   qed
 qed
 
-text {*
+text \<open>
   Since for any element @{text "y + a \<cdot> x'"} of the direct sum of a
   vectorspace @{text H} and the linear closure of @{text x'} the
   components @{text "y \<in> H"} and @{text a} are unique, it follows from
   @{text "y \<in> H"} that @{text "a = 0"}.
-*}
+\<close>
 
 lemma decomp_H'_H:
   assumes "vectorspace E" "subspace H E"
@@ -456,16 +456,16 @@
     proof (rule decomp_H')
       from ya x' show "y + a \<cdot> x' = t + 0 \<cdot> x'" by simp
       from ya show "y \<in> H" ..
-    qed (rule `vectorspace E`, rule `subspace H E`, rule t, (rule x')+)
+    qed (rule \<open>vectorspace E\<close>, rule \<open>subspace H E\<close>, rule t, (rule x')+)
     with t x' show "(y, a) = (y + a \<cdot> x', 0)" by simp
   qed
 qed
 
-text {*
+text \<open>
   The components @{text "y \<in> H"} and @{text a} in @{text "y + a \<cdot> x'"}
   are unique, so the function @{text h'} defined by
   @{text "h' (y + a \<cdot> x') = h y + a \<cdot> \<xi>"} is definite.
-*}
+\<close>
 
 lemma h'_definite:
   fixes H
@@ -498,7 +498,7 @@
         from xq show "fst q \<in> H" ..
         from xp and xq show "fst p + snd p \<cdot> x' = fst q + snd q \<cdot> x'"
           by simp
-      qed (rule `vectorspace E`, rule `subspace H E`, (rule x')+)
+      qed (rule \<open>vectorspace E\<close>, rule \<open>subspace H E\<close>, (rule x')+)
       then show ?thesis by (cases p, cases q) simp
     qed
   qed
--- a/src/HOL/Hahn_Banach/Vector_Space.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Hahn_Banach/Vector_Space.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -2,32 +2,27 @@
     Author:     Gertrud Bauer, TU Munich
 *)
 
-header {* Vector spaces *}
+header \<open>Vector spaces\<close>
 
 theory Vector_Space
 imports Complex_Main Bounds
 begin
 
-subsection {* Signature *}
+subsection \<open>Signature\<close>
 
-text {*
+text \<open>
   For the definition of real vector spaces a type @{typ 'a} of the
   sort @{text "{plus, minus, zero}"} is considered, on which a real
   scalar multiplication @{text \<cdot>} is declared.
-*}
+\<close>
 
 consts
-  prod  :: "real \<Rightarrow> 'a::{plus, minus, zero} \<Rightarrow> 'a"     (infixr "'(*')" 70)
-
-notation (xsymbols)
-  prod  (infixr "\<cdot>" 70)
-notation (HTML output)
-  prod  (infixr "\<cdot>" 70)
+  prod :: "real \<Rightarrow> 'a::{plus,minus,zero} \<Rightarrow> 'a"  (infixr "\<cdot>" 70)
 
 
-subsection {* Vector space laws *}
+subsection \<open>Vector space laws\<close>
 
-text {*
+text \<open>
   A \emph{vector space} is a non-empty set @{text V} of elements from
   @{typ 'a} with the following vector space laws: The set @{text V} is
   closed under addition and scalar multiplication, addition is
@@ -36,7 +31,7 @@
   addition.  Addition and multiplication are distributive; scalar
   multiplication is associative and the real number @{text "1"} is
   the neutral element of scalar multiplication.
-*}
+\<close>
 
 locale vectorspace =
   fixes V
@@ -83,8 +78,8 @@
 theorems add_ac = add_assoc add_commute add_left_commute
 
 
-text {* The existence of the zero element of a vector space
-  follows from the non-emptiness of carrier set. *}
+text \<open>The existence of the zero element of a vector space
+  follows from the non-emptiness of carrier set.\<close>
 
 lemma zero [iff]: "0 \<in> V"
 proof -
@@ -127,7 +122,7 @@
   diff_mult_distrib1 diff_mult_distrib2
 
 
-text {* \medskip Further derived laws: *}
+text \<open>\medskip Further derived laws:\<close>
 
 lemma mult_zero_left [simp]: "x \<in> V \<Longrightarrow> 0 \<cdot> x = 0"
 proof -
@@ -236,11 +231,11 @@
 proof (rule classical)
   assume a: "a \<noteq> 0"
   from x a have "x = (inverse a * a) \<cdot> x" by simp
-  also from `x \<in> V` have "\<dots> = inverse a \<cdot> (a \<cdot> x)" by (rule mult_assoc)
+  also from \<open>x \<in> V\<close> have "\<dots> = inverse a \<cdot> (a \<cdot> x)" by (rule mult_assoc)
   also from ax have "\<dots> = inverse a \<cdot> 0" by simp
   also have "\<dots> = 0" by simp
   finally have "x = 0" .
-  with `x \<noteq> 0` show "a = 0" by contradiction
+  with \<open>x \<noteq> 0\<close> show "a = 0" by contradiction
 qed
 
 lemma mult_left_cancel:
@@ -330,7 +325,7 @@
 proof -
   from assms have "- c + (a + b) = - c + (c + d)"
     by (simp add: add_left_cancel)
-  also have "\<dots> = d" using `c \<in> V` `d \<in> V` by (rule minus_add_cancel)
+  also have "\<dots> = d" using \<open>c \<in> V\<close> \<open>d \<in> V\<close> by (rule minus_add_cancel)
   finally have eq: "- c + (a + b) = d" .
   from vs have "a - c = (- c + (a + b)) + - b"
     by (simp add: add_ac diff_eq1)
--- a/src/HOL/Hahn_Banach/Zorn_Lemma.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Hahn_Banach/Zorn_Lemma.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -2,13 +2,13 @@
     Author:     Gertrud Bauer, TU Munich
 *)
 
-header {* Zorn's Lemma *}
+header \<open>Zorn's Lemma\<close>
 
 theory Zorn_Lemma
 imports Main
 begin
 
-text {*
+text \<open>
   Zorn's Lemmas states: if every linear ordered subset of an ordered
   set @{text S} has an upper bound in @{text S}, then there exists a
   maximal element in @{text S}.  In our application, @{text S} is a
@@ -17,7 +17,7 @@
   of Zorn's lemma can be modified: if @{text S} is non-empty, it
   suffices to show that for every non-empty chain @{text c} in @{text
   S} the union of @{text c} also lies in @{text S}.
-*}
+\<close>
 
 theorem Zorn's_Lemma:
   assumes r: "\<And>c. c \<in> chains S \<Longrightarrow> \<exists>x. x \<in> c \<Longrightarrow> \<Union>c \<in> S"
@@ -30,14 +30,14 @@
     show "\<exists>y \<in> S. \<forall>z \<in> c. z \<subseteq> y"
     proof cases
 
-      txt {* If @{text c} is an empty chain, then every element in
-        @{text S} is an upper bound of @{text c}. *}
+      txt \<open>If @{text c} is an empty chain, then every element in
+        @{text S} is an upper bound of @{text c}.\<close>
 
       assume "c = {}"
       with aS show ?thesis by fast
 
-      txt {* If @{text c} is non-empty, then @{text "\<Union>c"} is an upper
-        bound of @{text c}, lying in @{text S}. *}
+      txt \<open>If @{text c} is non-empty, then @{text "\<Union>c"} is an upper
+        bound of @{text c}, lying in @{text S}.\<close>
 
     next
       assume "c \<noteq> {}"
@@ -46,7 +46,7 @@
         show "\<forall>z \<in> c. z \<subseteq> \<Union>c" by fast
         show "\<Union>c \<in> S"
         proof (rule r)
-          from `c \<noteq> {}` show "\<exists>x. x \<in> c" by fast
+          from \<open>c \<noteq> {}\<close> show "\<exists>x. x \<in> c" by fast
           show "c \<in> chains S" by fact
         qed
       qed
--- a/src/HOL/Multivariate_Analysis/Topology_Euclidean_Space.thy	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/HOL/Multivariate_Analysis/Topology_Euclidean_Space.thy	Tue Oct 21 21:55:45 2014 +0200
@@ -1015,7 +1015,7 @@
     and "(\<forall>i\<in>Basis. a\<bullet>i \<le> c\<bullet>i \<and> d\<bullet>i \<le> b\<bullet>i) \<Longrightarrow> box c d \<subseteq> cbox a b"
      and "(\<forall>i\<in>Basis. a\<bullet>i \<le> c\<bullet>i \<and> d\<bullet>i \<le> b\<bullet>i) \<Longrightarrow> box c d \<subseteq> box a b"
   unfolding subset_eq[unfolded Ball_def] unfolding mem_box
-   by (best intro: order_trans less_le_trans le_less_trans less_imp_le)+
+  by (best intro: order_trans less_le_trans le_less_trans less_imp_le)+
 
 lemma box_subset_cbox:
   fixes a :: "'a::euclidean_space"
@@ -1153,7 +1153,9 @@
 
 lemma UN_box_eq_UNIV: "(\<Union>i::nat. box (- (real i *\<^sub>R One)) (real i *\<^sub>R One)) = UNIV"
 proof -
-  { fix x b :: 'a assume [simp]: "b \<in> Basis"
+  {
+    fix x b :: 'a
+    assume [simp]: "b \<in> Basis"
     have "\<bar>x \<bullet> b\<bar> \<le> real (natceiling \<bar>x \<bullet> b\<bar>)"
       by (rule real_natceiling_ge)
     also have "\<dots> \<le> real (natceiling (Max ((\<lambda>b. \<bar>x \<bullet> b\<bar>)`Basis)))"
@@ -1679,7 +1681,7 @@
   by (simp add: frontier_def)
 
 lemma frontier_subset_eq: "frontier S \<subseteq> S \<longleftrightarrow> closed S"
-proof-
+proof -
   {
     assume "frontier S \<subseteq> S"
     then have "closure S \<subseteq> S"
@@ -1690,13 +1692,14 @@
   then show ?thesis using frontier_subset_closed[of S] ..
 qed
 
-lemma frontier_complement: "frontier(- S) = frontier S"
+lemma frontier_complement: "frontier (- S) = frontier S"
   by (auto simp add: frontier_def closure_complement interior_complement)
 
 lemma frontier_disjoint_eq: "frontier S \<inter> S = {} \<longleftrightarrow> open S"
   using frontier_complement frontier_subset_eq[of "- S"]
   unfolding open_closed by auto
 
+
 subsection {* Filters and the ``eventually true'' quantifier *}
 
 definition indirection :: "'a::real_normed_vector \<Rightarrow> 'a \<Rightarrow> 'a filter"
--- a/src/Pure/Isar/outer_syntax.scala	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Pure/Isar/outer_syntax.scala	Tue Oct 21 21:55:45 2014 +0200
@@ -63,11 +63,11 @@
   /* overall document structure */
 
   sealed abstract class Document { def length: Int }
-  case class Document_Block(val name: String, val body: List[Document]) extends Document
+  case class Document_Block(name: String, text: String, body: List[Document]) extends Document
   {
     val length: Int = (0 /: body)(_ + _.length)
   }
-  case class Document_Atom(val command: Command) extends Document
+  case class Document_Atom(command: Command) extends Document
   {
     def length: Int = command.length
   }
@@ -192,8 +192,8 @@
           if (tok.is_command) {
             if (command_kind(tok, Keyword.theory_goal)) (2, 1)
             else if (command_kind(tok, Keyword.theory)) (1, 0)
-            else if (command_kind(tok, Keyword.proof_goal) || tok.source == "{") (y + 2, y + 1)
-            else if (command_kind(tok, Keyword.qed) || tok.source == "}") (y + 1, y - 1)
+            else if (command_kind(tok, Keyword.proof_goal) || tok.is_begin_block) (y + 2, y + 1)
+            else if (command_kind(tok, Keyword.qed) || tok.is_end_block) (y + 1, y - 1)
             else if (command_kind(tok, Keyword.qed_global)) (1, 0)
             else (x, y)
           }
@@ -216,11 +216,7 @@
     }
   }
 
-  def scan_line(
-    input: CharSequence,
-    context: Scan.Line_Context,
-    structure: Outer_Syntax.Line_Structure)
-    : (List[Token], Scan.Line_Context, Outer_Syntax.Line_Structure) =
+  def scan_line(input: CharSequence, context: Scan.Line_Context): (List[Token], Scan.Line_Context) =
   {
     var in: Reader[Char] = new CharSequenceReader(input)
     val toks = new mutable.ListBuffer[Token]
@@ -232,8 +228,7 @@
           error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
       }
     }
-    val tokens = toks.toList
-    (tokens, ctxt, line_structure(tokens, structure))
+    (toks.toList, ctxt)
   }
 
 
@@ -293,32 +288,32 @@
     }
   }
 
-  def parse_document(node_name: Document.Node.Name, text: CharSequence): Outer_Syntax.Document =
+  def parse_document(node_name: Document.Node.Name, text: CharSequence):
+    List[Outer_Syntax.Document] =
   {
     /* stack operations */
 
     def buffer(): mutable.ListBuffer[Outer_Syntax.Document] =
       new mutable.ListBuffer[Outer_Syntax.Document]
 
-    var stack: List[(Int, String, mutable.ListBuffer[Outer_Syntax.Document])] =
-      List((0, node_name.toString, buffer()))
+    var stack: List[(Int, Command, mutable.ListBuffer[Outer_Syntax.Document])] =
+      List((0, Command.empty, buffer()))
 
     @tailrec def close(level: Int => Boolean)
     {
       stack match {
-        case (lev, name, body) :: (_, _, body2) :: rest if level(lev) =>
-          body2 += Outer_Syntax.Document_Block(name, body.toList)
+        case (lev, command, body) :: (_, _, body2) :: rest if level(lev) =>
+          body2 += Outer_Syntax.Document_Block(command.name, command.source, body.toList)
           stack = stack.tail
           close(level)
         case _ =>
       }
     }
 
-    def result(): Outer_Syntax.Document =
+    def result(): List[Outer_Syntax.Document] =
     {
       close(_ => true)
-      val (_, name, body) = stack.head
-      Outer_Syntax.Document_Block(name, body.toList)
+      stack.head._3.toList
     }
 
     def add(command: Command)
@@ -326,7 +321,7 @@
       heading_level(command) match {
         case Some(i) =>
           close(_ > i)
-          stack = (i + 1, command.source, buffer()) :: stack
+          stack = (i + 1, command, buffer()) :: stack
         case None =>
       }
       stack.head._3 += Outer_Syntax.Document_Atom(command)
--- a/src/Pure/Isar/token.scala	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Pure/Isar/token.scala	Tue Oct 21 21:55:45 2014 +0200
@@ -190,7 +190,10 @@
     source.startsWith(Symbol.open_decoded))
 
   def is_begin: Boolean = is_keyword && source == "begin"
-  def is_end: Boolean = is_keyword && source == "end"
+  def is_end: Boolean = is_command && source == "end"
+
+  def is_begin_block: Boolean = is_command && source == "{"
+  def is_end_block: Boolean = is_command && source == "}"
 
   def content: String =
     if (kind == Token.Kind.STRING) Scan.Parsers.quoted_content("\"", source)
--- a/src/Pure/PIDE/resources.ML	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Pure/PIDE/resources.ML	Tue Oct 21 21:55:45 2014 +0200
@@ -224,7 +224,7 @@
     space_explode "/" name
     |> map Latex.output_ascii
     |> space_implode (Latex.output_ascii "/" ^ "\\discretionary{}{}{}")
-    |> Thy_Output.enclose_isabelle_tt ctxt
+    |> enclose "\\isatt{" "}"
   end;
 
 in
--- a/src/Pure/PIDE/text.scala	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Pure/PIDE/text.scala	Tue Oct 21 21:55:45 2014 +0200
@@ -51,6 +51,8 @@
     def is_singularity: Boolean = start == stop
     def inflate_singularity: Range = if (is_singularity) Range(start, start + 1) else this
 
+    def touches(i: Offset): Boolean = start <= i && i <= stop
+
     def contains(i: Offset): Boolean = start == i || start < i && i < stop
     def contains(that: Range): Boolean = this.contains(that.start) && that.stop <= this.stop
     def overlaps(that: Range): Boolean = this.contains(that.start) || that.contains(this.start)
--- a/src/Pure/Thy/thy_output.ML	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Pure/Thy/thy_output.ML	Tue Oct 21 21:55:45 2014 +0200
@@ -35,7 +35,6 @@
     Token.src -> 'a list -> Pretty.T list
   val output: Proof.context -> Pretty.T list -> string
   val verbatim_text: Proof.context -> string -> string
-  val enclose_isabelle_tt: Proof.context -> string -> string
 end;
 
 structure Thy_Output: THY_OUTPUT =
@@ -470,10 +469,6 @@
 fun tweak_line ctxt s =
   if Config.get ctxt display then s else Symbol.strip_blanks s;
 
-fun tweak_lines ctxt s =
-  if Config.get ctxt display then s
-  else s |> split_lines |> map Symbol.strip_blanks |> space_implode " ";
-
 fun pretty_text ctxt =
   Pretty.chunks o map Pretty.str o map (tweak_line ctxt) o split_lines;
 
@@ -643,18 +638,18 @@
 
 (* verbatim text *)
 
-fun enclose_isabelle_tt ctxt =
-  if Config.get ctxt display
-  then enclose "\\begin{isabellett}%\n" "%\n\\end{isabellett}"
-  else enclose "\\isatt{" "}";
+fun verbatim_text ctxt =
+  if Config.get ctxt display then
+    Latex.output_ascii #>
+    enclose "\\begin{isabellett}%\n" "%\n\\end{isabellett}"
+  else
+    split_lines #>
+    map (Latex.output_ascii #> enclose "\\isatt{" "}") #>
+    space_implode "\\isasep\\isanewline%\n";
 
-fun verbatim_text ctxt =
-  tweak_lines ctxt
-  #> Latex.output_ascii
-  #> enclose_isabelle_tt ctxt;
-
-val _ = Theory.setup
-  (antiquotation @{binding verbatim} (Scan.lift Args.text) (verbatim_text o #context));
+val _ =
+  Theory.setup
+    (antiquotation @{binding verbatim} (Scan.lift Args.text) (verbatim_text o #context));
 
 
 (* ML text *)
--- a/src/Tools/jEdit/etc/options	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Tools/jEdit/etc/options	Tue Oct 21 21:55:45 2014 +0200
@@ -30,6 +30,9 @@
 public option jedit_text_overview_limit : int = 65536
   -- "maximum amount of text to visualize in overview column"
 
+public option jedit_structure_limit : int = 1000
+  -- "maximum number of lines to scan for language structure"
+
 public option jedit_symbols_search_limit : int = 50
   -- "maximum number of symbols in search result"
 
--- a/src/Tools/jEdit/lib/Tools/jedit	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Tools/jEdit/lib/Tools/jedit	Tue Oct 21 21:55:45 2014 +0200
@@ -46,6 +46,7 @@
   "src/simplifier_trace_window.scala"
   "src/sledgehammer_dockable.scala"
   "src/spell_checker.scala"
+  "src/structure_matching.scala"
   "src/symbols_dockable.scala"
   "src/syslog_dockable.scala"
   "src/text_overview.scala"
--- a/src/Tools/jEdit/src/bibtex_jedit.scala	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Tools/jEdit/src/bibtex_jedit.scala	Tue Oct 21 21:55:45 2014 +0200
@@ -231,9 +231,8 @@
   {
     override def supportsCompletion = false
 
-    private class Asset(
-        label: String, label_html: String, start: Text.Offset, stop: Text.Offset, source: String)
-      extends Isabelle_Sidekick.Asset(label, start, stop) {
+    private class Asset(label: String, label_html: String, range: Text.Range, source: String)
+      extends Isabelle_Sidekick.Asset(label, range) {
         override def getShortString: String = label_html
         override def getLongString: String = source
       }
@@ -252,7 +251,8 @@
             val label = kind + (if (name == "") "" else " " + name)
             val label_html =
               "<html><b>" + kind + "</b>" + (if (name == "") "" else " " + name) + "</html>"
-            val asset = new Asset(label, label_html, offset, offset + source.size, source)
+            val range = Text.Range(offset, offset + source.size)
+            val asset = new Asset(label, label_html, range, source)
             data.root.add(new DefaultMutableTreeNode(asset))
           }
           offset += source.size
--- a/src/Tools/jEdit/src/document_view.scala	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Tools/jEdit/src/document_view.scala	Tue Oct 21 21:55:45 2014 +0200
@@ -251,6 +251,8 @@
     text_area.addKeyListener(key_listener)
     text_area.addCaretListener(caret_listener)
     text_area.addLeftOfScrollBar(overview)
+    Isabelle.structure_matchers(JEdit_Lib.buffer_mode(text_area.getBuffer)).
+      foreach(text_area.addStructureMatcher(_))
     session.raw_edits += main
     session.commands_changed += main
   }
@@ -261,6 +263,8 @@
 
     session.raw_edits -= main
     session.commands_changed -= main
+    Isabelle.structure_matchers(JEdit_Lib.buffer_mode(text_area.getBuffer)).
+      foreach(text_area.removeStructureMatcher(_))
     text_area.removeLeftOfScrollBar(overview); overview.delay_repaint.revoke()
     text_area.removeCaretListener(caret_listener); delay_caret_update.revoke()
     text_area.removeKeyListener(key_listener)
--- a/src/Tools/jEdit/src/fold_handling.scala	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Tools/jEdit/src/fold_handling.scala	Tue Oct 21 21:55:45 2014 +0200
@@ -29,16 +29,16 @@
       }
 
     override def getFoldLevel(buffer: JEditBuffer, line: Int, seg: Segment): Int =
-      Token_Markup.buffer_line_structure(buffer, line).depth max 0
+      Token_Markup.buffer_line_context(buffer, line).structure.depth max 0
 
     override def getPrecedingFoldLevels(
       buffer: JEditBuffer, line: Int, seg: Segment, level: Int): java.util.List[Integer] =
     {
-      val struct = Token_Markup.buffer_line_structure(buffer, line)
+      val struct = Token_Markup.buffer_line_context(buffer, line).structure
       val result =
         if (line > 0 && struct.command)
           Range.inclusive(line - 1, 0, -1).iterator.
-            map(Token_Markup.buffer_line_structure(buffer, _)).
+            map(i => Token_Markup.buffer_line_context(buffer, i).structure).
             takeWhile(_.improper).map(_ => struct.depth max 0).toList
         else Nil
 
--- a/src/Tools/jEdit/src/isabelle.scala	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Tools/jEdit/src/isabelle.scala	Tue Oct 21 21:55:45 2014 +0200
@@ -15,7 +15,7 @@
 import scala.swing.event.ButtonClicked
 
 import org.gjt.sp.jedit.{jEdit, View, Buffer}
-import org.gjt.sp.jedit.textarea.JEditTextArea
+import org.gjt.sp.jedit.textarea.{JEditTextArea, StructureMatcher}
 import org.gjt.sp.jedit.syntax.TokenMarker
 import org.gjt.sp.jedit.gui.{DockableWindowManager, CompleteWord}
 import org.gjt.sp.jedit.options.PluginOptions
@@ -73,6 +73,12 @@
   def token_marker(name: String): Option[TokenMarker] = markers.get(name)
 
 
+  /* structure matchers */
+
+  def structure_matchers(name: String): List[StructureMatcher] =
+    if (name == "isabelle") List(Structure_Matching.Isabelle_Matcher) else Nil
+
+
   /* dockable windows */
 
   private def wm(view: View): DockableWindowManager = view.getDockableWindowManager
--- a/src/Tools/jEdit/src/isabelle_sidekick.scala	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Tools/jEdit/src/isabelle_sidekick.scala	Tue Oct 21 21:55:45 2014 +0200
@@ -30,15 +30,17 @@
     data
   }
 
-  class Asset(name: String, start: Text.Offset, end: Text.Offset) extends IAsset
+  class Keyword_Asset(keyword: String, text: String, range: Text.Range) extends IAsset
   {
-    protected var _name = name
-    protected var _start = int_to_pos(start)
-    protected var _end = int_to_pos(end)
+    protected var _name = text
+    protected var _start = int_to_pos(range.start)
+    protected var _end = int_to_pos(range.stop)
     override def getIcon: Icon = null
     override def getShortString: String =
       "<html><span style=\"font-family: " + Font_Info.main_family() + ";\">" +
-      HTML.encode(_name) + "</span></html>"
+      (if (keyword != "" && _name.startsWith(keyword))
+        "<b>" + HTML.encode(keyword) + "</b>" + HTML.encode(_name.substring(keyword.length))
+       else HTML.encode(_name)) + "</span></html>"
     override def getLongString: String = _name
     override def getName: String = _name
     override def setName(name: String) = _name = name
@@ -49,6 +51,8 @@
     override def toString: String = _name
   }
 
+  class Asset(name: String, range: Text.Range) extends Keyword_Asset("", name, range)
+
   def swing_markup_tree(tree: Markup_Tree, parent: DefaultMutableTreeNode,
     swing_node: Text.Info[List[XML.Elem]] => DefaultMutableTreeNode)
   {
@@ -101,27 +105,29 @@
 {
   override def parser(buffer: Buffer, syntax: Outer_Syntax, data: SideKickParsedData): Boolean =
   {
-    def make_tree(offset: Text.Offset, document: Outer_Syntax.Document)
-        : List[DefaultMutableTreeNode] =
-      document match {
-        case Outer_Syntax.Document_Block(name, body) =>
-          val node =
-            new DefaultMutableTreeNode(
-              new Isabelle_Sidekick.Asset(
-                Library.first_line(name), offset, offset + document.length))
-          (offset /: body)((i, doc) =>
-            {
-              for (nd <- make_tree(i, doc)) node.add(nd)
-              i + doc.length
-            })
-          List(node)
-        case _ => Nil
+    def make_tree(
+      parent: DefaultMutableTreeNode,
+      offset: Text.Offset,
+      documents: List[Outer_Syntax.Document])
+    {
+      (offset /: documents) { case (i, document) =>
+        document match {
+          case Outer_Syntax.Document_Block(name, text, body) =>
+            val range = Text.Range(i, i + document.length)
+            val node =
+              new DefaultMutableTreeNode(
+                new Isabelle_Sidekick.Keyword_Asset(name, Library.first_line(text), range))
+            parent.add(node)
+            make_tree(node, i, body)
+          case _ =>
+        }
+        i + document.length
       }
+    }
 
     node_name(buffer) match {
       case Some(name) =>
-        val document = syntax.parse_document(name, JEdit_Lib.buffer_text(buffer))
-        for (node <- make_tree(0, document)) data.root.add(node)
+        make_tree(data.root, 0, syntax.parse_document(name, JEdit_Lib.buffer_text(buffer)))
         true
       case None => false
     }
@@ -169,7 +175,7 @@
                     .mkString
 
                 new DefaultMutableTreeNode(
-                  new Isabelle_Sidekick.Asset(command.toString, range.start, range.stop) {
+                  new Isabelle_Sidekick.Asset(command.toString, range) {
                     override def getShortString: String = content
                     override def getLongString: String = info_text
                     override def toString: String = quote(content) + " " + range.toString
@@ -189,7 +195,7 @@
   private val Heading2 = """^\*\*\*\w*(.*)\w*\*\*\*\w*$""".r
 
   private def make_node(s: String, start: Text.Offset, stop: Text.Offset): DefaultMutableTreeNode =
-    new DefaultMutableTreeNode(new Isabelle_Sidekick.Asset(s, start, stop))
+    new DefaultMutableTreeNode(new Isabelle_Sidekick.Asset(s, Text.Range(start, stop)))
 
   override def parser(buffer: Buffer, syntax: Outer_Syntax, data: SideKickParsedData): Boolean =
   {
--- a/src/Tools/jEdit/src/jEdit.props	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Tools/jEdit/src/jEdit.props	Tue Oct 21 21:55:45 2014 +0200
@@ -180,6 +180,7 @@
 fallbackEncodings=UTF-8 ISO-8859-15 US-ASCII
 firstTime=false
 focus-buffer-switcher.shortcut2=A+CIRCUMFLEX
+foldPainter=Circle
 home.shortcut=
 insert-newline-indent.shortcut=
 insert-newline.shortcut=ENTER
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/jEdit/src/structure_matching.scala	Tue Oct 21 21:55:45 2014 +0200
@@ -0,0 +1,121 @@
+/*  Title:      Tools/jEdit/src/structure_matching.scala
+    Author:     Makarius
+
+Structure matcher for Isabelle/Isar outer syntax.
+*/
+
+package isabelle.jedit
+
+
+import isabelle._
+
+import org.gjt.sp.jedit.textarea.{TextArea, StructureMatcher}
+
+
+object Structure_Matching
+{
+  object Isabelle_Matcher extends StructureMatcher
+  {
+    def find_block(
+      open: Token => Boolean,
+      close: Token => Boolean,
+      reset: Token => Boolean,
+      it: Iterator[Text.Info[Token]]): Option[(Text.Range, Text.Range)] =
+    {
+      val range1 = it.next.range
+      it.scanLeft((range1, 1))(
+        { case ((r, d), Text.Info(range, tok)) =>
+            if (open(tok)) (range, d + 1)
+            else if (close(tok)) (range, d - 1)
+            else if (reset(tok)) (range, 0)
+            else (r, d) }
+      ).collectFirst({ case (range2, 0) => (range1, range2) })
+    }
+
+    def find_pair(text_area: TextArea): Option[(Text.Range, Text.Range)] =
+    {
+      val buffer = text_area.getBuffer
+      val caret_line = text_area.getCaretLine
+      val caret = text_area.getCaretPosition
+
+      PIDE.session.recent_syntax match {
+        case syntax: Outer_Syntax
+        if syntax != Outer_Syntax.empty =>
+
+          val limit = PIDE.options.value.int("jedit_structure_limit") max 0
+
+          def iterator(line: Int, lim: Int = limit): Iterator[Text.Info[Token]] =
+            Token_Markup.line_token_iterator(syntax, buffer, line, line + lim).
+              filter(_.info.is_proper)
+
+          def rev_iterator(line: Int, lim: Int = limit): Iterator[Text.Info[Token]] =
+            Token_Markup.line_token_reverse_iterator(syntax, buffer, line, line - lim).
+              filter(_.info.is_proper)
+
+          def caret_iterator(): Iterator[Text.Info[Token]] =
+            iterator(caret_line).dropWhile(info => !info.range.touches(caret))
+
+          def rev_caret_iterator(): Iterator[Text.Info[Token]] =
+            rev_iterator(caret_line).dropWhile(info => !info.range.touches(caret))
+
+          iterator(caret_line, 1).find(info => info.range.touches(caret))
+          match {
+            case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.theory_goal) =>
+              find_block(
+                syntax.command_kind(_, Keyword.proof_goal),
+                syntax.command_kind(_, Keyword.qed),
+                syntax.command_kind(_, Keyword.qed_global),
+                caret_iterator())
+
+            case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.proof_goal) =>
+              find_block(
+                syntax.command_kind(_, Keyword.proof_goal),
+                syntax.command_kind(_, Keyword.qed),
+                _ => false,
+                caret_iterator())
+
+            case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.qed_global) =>
+              rev_caret_iterator().find(info => syntax.command_kind(info.info, Keyword.theory))
+              match {
+                case Some(Text.Info(range2, tok))
+                if syntax.command_kind(tok, Keyword.theory_goal) => Some((range1, range2))
+                case _ => None
+              }
+
+            case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.qed) =>
+              find_block(
+                syntax.command_kind(_, Keyword.qed),
+                t =>
+                  syntax.command_kind(t, Keyword.proof_goal) ||
+                  syntax.command_kind(t, Keyword.theory_goal),
+                _ => false,
+                rev_caret_iterator())
+
+            case Some(Text.Info(range1, tok)) if tok.is_begin =>
+              find_block(_.is_begin, _.is_end, _ => false, caret_iterator())
+
+            case Some(Text.Info(range1, tok)) if tok.is_end =>
+              find_block(_.is_end, _.is_begin, _ => false, rev_caret_iterator())
+
+            case _ => None
+          }
+        case _ => None
+      }
+    }
+
+    def getMatch(text_area: TextArea): StructureMatcher.Match =
+      find_pair(text_area) match {
+        case Some((_, range)) =>
+          val line = text_area.getBuffer.getLineOfOffset(range.start)
+          new StructureMatcher.Match(Structure_Matching.Isabelle_Matcher,
+            line, range.start, line, range.stop)
+        case None => null
+      }
+
+    def selectMatch(text_area: TextArea)
+    {
+      // FIXME
+    }
+  }
+}
+
--- a/src/Tools/jEdit/src/token_markup.scala	Tue Oct 21 21:10:44 2014 +0200
+++ b/src/Tools/jEdit/src/token_markup.scala	Tue Oct 21 21:55:45 2014 +0200
@@ -177,6 +177,11 @@
 
   private val context_rules = new ParserRuleSet("isabelle", "MAIN")
 
+  object Line_Context
+  {
+    val init = new Line_Context(Some(Scan.Finished), Outer_Syntax.Line_Structure.init)
+  }
+
   class Line_Context(
       val context: Option[Scan.Line_Context],
       val structure: Outer_Syntax.Line_Structure)
@@ -190,7 +195,7 @@
       }
   }
 
-  def buffer_line_context(buffer: JEditBuffer, line: Int): Option[Line_Context] =
+  def buffer_line_context(buffer: JEditBuffer, line: Int): Line_Context =
     Untyped.get(buffer, "lineMgr") match {
       case line_mgr: LineManager =>
         def context =
@@ -198,18 +203,54 @@
             case c: Line_Context => Some(c)
             case _ => None
           }
-        context orElse {
+        context getOrElse {
           buffer.markTokens(line, DummyTokenHandler.INSTANCE)
-          context
+          context getOrElse Line_Context.init
         }
-      case _ => None
+      case _ => Line_Context.init
     }
 
-  def buffer_line_structure(buffer: JEditBuffer, line: Int): Outer_Syntax.Line_Structure =
-    buffer_line_context(buffer, line) match {
-      case Some(c) => c.structure
-      case _ => Outer_Syntax.Line_Structure.init
-    }
+
+  /* line tokens */
+
+  def try_line_tokens(syntax: Outer_Syntax, buffer: JEditBuffer, line: Int): Option[List[Token]] =
+  {
+    val line_context =
+      if (line == 0) Line_Context.init
+      else buffer_line_context(buffer, line - 1)
+    for {
+      ctxt <- line_context.context
+      text <- JEdit_Lib.try_get_text(buffer, JEdit_Lib.line_range(buffer, line))
+    } yield syntax.scan_line(text, ctxt)._1
+  }
+
+  def line_token_iterator(
+      syntax: Outer_Syntax,
+      buffer: JEditBuffer,
+      start_line: Int,
+      end_line: Int): Iterator[Text.Info[Token]] =
+    for {
+      line <- Range(start_line max 0, end_line min buffer.getLineCount).iterator
+      tokens <- try_line_tokens(syntax, buffer, line).iterator
+      starts =
+        tokens.iterator.scanLeft(buffer.getLineStartOffset(line))(
+          (i, tok) => i + tok.source.length)
+      (i, tok) <- starts zip tokens.iterator
+    } yield Text.Info(Text.Range(i, i + tok.source.length), tok)
+
+  def line_token_reverse_iterator(
+      syntax: Outer_Syntax,
+      buffer: JEditBuffer,
+      start_line: Int,
+      end_line: Int): Iterator[Text.Info[Token]] =
+    for {
+      line <- Range(start_line min (buffer.getLineCount - 1), end_line max -1, -1).iterator
+      tokens <- try_line_tokens(syntax, buffer, line).iterator
+      stops =
+        tokens.reverseIterator.scanLeft(buffer.getLineEndOffset(line) min buffer.getLength)(
+          (i, tok) => i - tok.source.length)
+      (i, tok) <- stops zip tokens.reverseIterator
+    } yield Text.Info(Text.Range(i - tok.source.length, i), tok)
 
 
   /* token marker */
@@ -219,24 +260,22 @@
     override def markTokens(context: TokenMarker.LineContext,
         handler: TokenHandler, raw_line: Segment): TokenMarker.LineContext =
     {
-      val (opt_ctxt, structure) =
-        context match {
-          case c: Line_Context => (c.context, c.structure)
-          case _ => (Some(Scan.Finished), Outer_Syntax.Line_Structure.init)
-        }
       val line = if (raw_line == null) new Segment else raw_line
+      val line_context = context match { case c: Line_Context => c case _ => Line_Context.init }
+      val structure = line_context.structure
 
       val context1 =
       {
         val (styled_tokens, context1) =
-          (opt_ctxt, Isabelle.mode_syntax(mode)) match {
+          (line_context.context, Isabelle.mode_syntax(mode)) match {
             case (Some(ctxt), _) if mode == "isabelle-ml" || mode == "sml" =>
               val (tokens, ctxt1) = ML_Lex.tokenize_line(mode == "sml", line, ctxt)
               val styled_tokens = tokens.map(tok => (Rendering.ml_token_markup(tok), tok.source))
               (styled_tokens, new Line_Context(Some(ctxt1), structure))
 
             case (Some(ctxt), Some(syntax)) if syntax.has_tokens =>
-              val (tokens, ctxt1, structure1) = syntax.scan_line(line, ctxt, structure)
+              val (tokens, ctxt1) = syntax.scan_line(line, ctxt)
+              val structure1 = syntax.line_structure(tokens, structure)
               val styled_tokens =
                 tokens.map(tok => (Rendering.token_markup(syntax, tok), tok.source))
               (styled_tokens, new Line_Context(Some(ctxt1), structure1))