merged
authorhaftmann
Mon, 21 Dec 2015 21:34:14 +0100
changeset 61892 5c68d06f97b3
parent 61891 76189756ff65 (current diff)
parent 61889 42d902e074e8 (diff)
child 61894 f5a2aed23206
merged
NEWS
--- a/Admin/components/bundled-windows	Sat Dec 19 17:03:17 2015 +0100
+++ b/Admin/components/bundled-windows	Mon Dec 21 21:34:14 2015 +0100
@@ -1,3 +1,3 @@
 #additional components to be bundled for release
-cygwin-20151210
+cygwin-20151221
 windows_app-20150821
--- a/Admin/components/components.sha1	Sat Dec 19 17:03:17 2015 +0100
+++ b/Admin/components/components.sha1	Mon Dec 21 21:34:14 2015 +0100
@@ -18,6 +18,7 @@
 629b8fbe35952d1551cd2a7ff08db697f6dff870  cygwin-20141024.tar.gz
 ce93d0b3b2743c4f4e5bba30c2889b3b7bc22f2c  cygwin-20150410.tar.gz
 fa712dd5ec66ad16add1779d68aa171ff5694064  cygwin-20151210.tar.gz
+056b843d5a3b69ecf8a52c06f2ce6e696dd275f9  cygwin-20151221.tar.gz
 0fe549949a025d65d52d6deca30554de8fca3b6e  e-1.5.tar.gz
 2e293256a134eb8e5b1a283361b15eb812fbfbf1  e-1.6-1.tar.gz
 e1919e72416cbd7ac8de5455caba8901acc7b44d  e-1.6-2.tar.gz
--- a/Admin/lib/Tools/makedist_cygwin	Sat Dec 19 17:03:17 2015 +0100
+++ b/Admin/lib/Tools/makedist_cygwin	Mon Dec 21 21:34:14 2015 +0100
@@ -55,7 +55,7 @@
   --site "$CYGWIN_MIRROR" --no-verify \
   --local-package-dir 'C:\temp' \
   --root "$(cygpath -w "$TARGET")" \
-  --packages perl,perl-libwww-perl,rlwrap,unzip,vim \
+  --packages perl,perl-libwww-perl,rlwrap,unzip,nano \
   --no-shortcuts --no-startmenu --no-desktop --quiet-mode
 
 [ "$?" = 0 -a -e "$TARGET/etc" ] || exit 2
@@ -68,8 +68,6 @@
   rm -f "$TARGET/etc/$NAME"
 done
 
-ln -s cygperl5_14.dll "$TARGET/bin/cygperl5_14_2.dll"
-
 rm "$TARGET/Cygwin.bat"
 
 
--- a/NEWS	Sat Dec 19 17:03:17 2015 +0100
+++ b/NEWS	Mon Dec 21 21:34:14 2015 +0100
@@ -672,6 +672,13 @@
 tools, but can also cause INCOMPATIBILITY for tools that don't observe
 the proof context discipline.
 
+* The following combinators for low-level profiling of the ML runtime
+system are available:
+
+  profile_time          (*CPU time*)
+  profile_time_thread   (*CPU time on this thread*)
+  profile_allocations   (*overall heap allocations*)
+
 
 *** System ***
 
--- a/etc/options	Sat Dec 19 17:03:17 2015 +0100
+++ b/etc/options	Mon Dec 21 21:34:14 2015 +0100
@@ -125,7 +125,7 @@
 public option editor_output_delay : real = 0.1
   -- "delay for prover output (markup, common messages etc.)"
 
-public option editor_prune_delay : real = 60.0
+public option editor_prune_delay : real = 30.0
   -- "delay to prune history (delete old versions)"
 
 public option editor_update_delay : real = 0.5
--- a/src/Doc/Isar_Ref/Proof_Script.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Doc/Isar_Ref/Proof_Script.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -102,8 +102,8 @@
   apply} sequences.
 
   The current goal state, which is essentially a hidden part of the Isar/VM
-  configurtation, is turned into a proof context and remaining conclusion.
-  This correponds to @{command fix}~/ @{command assume}~/ @{command show} in
+  configuration, is turned into a proof context and remaining conclusion.
+  This corresponds to @{command fix}~/ @{command assume}~/ @{command show} in
   structured proofs, but the text of the parameters, premises and conclusion
   is not given explicitly.
 
--- a/src/Doc/antiquote_setup.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Doc/antiquote_setup.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -130,13 +130,15 @@
               Output.output
                 (Thy_Output.string_of_margin ctxt
                   (Pretty.indent (Config.get ctxt Thy_Output.indent) p)) ^
-              "\\rulename{" ^ Output.output (Pretty.str_of (Thy_Output.pretty_text ctxt name)) ^ "}")
+              "\\rulename{" ^
+              Output.output (Pretty.unformatted_string_of (Thy_Output.pretty_text ctxt name)) ^ "}")
             #> space_implode "\\par\\smallskip%\n"
             #> Latex.environment "isabelle"
           else
             map (fn (p, name) =>
-              Output.output (Pretty.str_of p) ^
-              "\\rulename{" ^ Output.output (Pretty.str_of (Thy_Output.pretty_text ctxt name)) ^ "}")
+              Output.output (Pretty.unformatted_string_of p) ^
+              "\\rulename{" ^
+              Output.output (Pretty.unformatted_string_of (Thy_Output.pretty_text ctxt name)) ^ "}")
             #> space_implode "\\par\\smallskip%\n"
             #> enclose "\\isa{" "}")));
 
--- a/src/HOL/Hahn_Banach/Function_Norm.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Hahn_Banach/Function_Norm.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -11,8 +11,8 @@
 subsection \<open>Continuous linear forms\<close>
 
 text \<open>
-  A linear form \<open>f\<close> on a normed vector space \<open>(V, \<parallel>\<cdot>\<parallel>)\<close> is \<^emph>\<open>continuous\<close>,
-  iff it is bounded, i.e.
+  A linear form \<open>f\<close> on a normed vector space \<open>(V, \<parallel>\<cdot>\<parallel>)\<close> is \<^emph>\<open>continuous\<close>, iff
+  it is bounded, i.e.
   \begin{center}
   \<open>\<exists>c \<in> R. \<forall>x \<in> V. \<bar>f x\<bar> \<le> c \<cdot> \<parallel>x\<parallel>\<close>
   \end{center}
@@ -52,20 +52,20 @@
   \<open>\<parallel>f\<parallel> = \<sup>x \<noteq> 0. \<bar>f x\<bar> / \<parallel>x\<parallel>\<close>
   \end{center}
 
-  For the case \<open>V = {0}\<close> the supremum would be taken from an empty set.
-  Since \<open>\<real>\<close> is unbounded, there would be no supremum. To avoid this
-  situation it must be guaranteed that there is an element in this set. This
-  element must be \<open>{} \<ge> 0\<close> so that \<open>fn_norm\<close> has the norm properties.
-  Furthermore it does not have to change the norm in all other cases, so it
-  must be \<open>0\<close>, as all other elements are \<open>{} \<ge> 0\<close>.
+  For the case \<open>V = {0}\<close> the supremum would be taken from an empty set. Since
+  \<open>\<real>\<close> is unbounded, there would be no supremum. To avoid this situation it
+  must be guaranteed that there is an element in this set. This element must
+  be \<open>{} \<ge> 0\<close> so that \<open>fn_norm\<close> has the norm properties. Furthermore it does
+  not have to change the norm in all other cases, so it must be \<open>0\<close>, as all
+  other elements are \<open>{} \<ge> 0\<close>.
 
   Thus we define the set \<open>B\<close> where the supremum is taken from as follows:
   \begin{center}
   \<open>{0} \<union> {\<bar>f x\<bar> / \<parallel>x\<parallel>. x \<noteq> 0 \<and> x \<in> F}\<close>
   \end{center}
 
-  \<open>fn_norm\<close> is equal to the supremum of \<open>B\<close>, if the supremum exists
-  (otherwise it is undefined).
+  \<open>fn_norm\<close> is equal to the supremum of \<open>B\<close>, if the supremum exists (otherwise
+  it is undefined).
 \<close>
 
 locale fn_norm =
--- a/src/HOL/Hahn_Banach/Function_Order.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Hahn_Banach/Function_Order.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -11,14 +11,12 @@
 subsection \<open>The graph of a function\<close>
 
 text \<open>
-  We define the \<^emph>\<open>graph\<close> of a (real) function \<open>f\<close> with
-  domain \<open>F\<close> as the set
+  We define the \<^emph>\<open>graph\<close> of a (real) function \<open>f\<close> with domain \<open>F\<close> as the set
   \begin{center}
   \<open>{(x, f x). x \<in> F}\<close>
   \end{center}
-  So we are modeling partial functions by specifying the domain and
-  the mapping function. We use the term ``function'' also for its
-  graph.
+  So we are modeling partial functions by specifying the domain and the
+  mapping function. We use the term ``function'' also for its graph.
 \<close>
 
 type_synonym 'a graph = "('a \<times> real) set"
@@ -41,8 +39,8 @@
 subsection \<open>Functions ordered by domain extension\<close>
 
 text \<open>
-  A function \<open>h'\<close> is an extension of \<open>h\<close>, iff the graph of
-  \<open>h\<close> is a subset of the graph of \<open>h'\<close>.
+  A function \<open>h'\<close> is an extension of \<open>h\<close>, iff the graph of \<open>h\<close> is a subset of
+  the graph of \<open>h'\<close>.
 \<close>
 
 lemma graph_extI:
@@ -93,8 +91,8 @@
 subsection \<open>Norm-preserving extensions of a function\<close>
 
 text \<open>
-  Given a linear form \<open>f\<close> on the space \<open>F\<close> and a seminorm \<open>p\<close> on \<open>E\<close>. The
-  set of all linear extensions of \<open>f\<close>, to superspaces \<open>H\<close> of \<open>F\<close>, which are
+  Given a linear form \<open>f\<close> on the space \<open>F\<close> and a seminorm \<open>p\<close> on \<open>E\<close>. The set
+  of all linear extensions of \<open>f\<close>, to superspaces \<open>H\<close> of \<open>F\<close>, which are
   bounded by \<open>p\<close>, is defined as follows.
 \<close>
 
--- a/src/HOL/Hahn_Banach/Hahn_Banach.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Hahn_Banach/Hahn_Banach.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -18,28 +18,26 @@
 
 paragraph \<open>Hahn-Banach Theorem.\<close>
 text \<open>
-  Let \<open>F\<close> be a subspace of a real vector space \<open>E\<close>, let \<open>p\<close> be a semi-norm
-  on \<open>E\<close>, and \<open>f\<close> be a linear form defined on \<open>F\<close> such that \<open>f\<close> is bounded
-  by \<open>p\<close>, i.e. \<open>\<forall>x \<in> F. f x \<le> p x\<close>. Then \<open>f\<close> can be extended to a linear
-  form \<open>h\<close> on \<open>E\<close> such that \<open>h\<close> is norm-preserving, i.e. \<open>h\<close> is also bounded
-  by \<open>p\<close>.
+  Let \<open>F\<close> be a subspace of a real vector space \<open>E\<close>, let \<open>p\<close> be a semi-norm on
+  \<open>E\<close>, and \<open>f\<close> be a linear form defined on \<open>F\<close> such that \<open>f\<close> is bounded by
+  \<open>p\<close>, i.e. \<open>\<forall>x \<in> F. f x \<le> p x\<close>. Then \<open>f\<close> can be extended to a linear form \<open>h\<close>
+  on \<open>E\<close> such that \<open>h\<close> is norm-preserving, i.e. \<open>h\<close> is also bounded by \<open>p\<close>.
 \<close>
 
 paragraph \<open>Proof Sketch.\<close>
 text \<open>
-  \<^enum> Define \<open>M\<close> as the set of norm-preserving extensions of \<open>f\<close> to subspaces
-  of \<open>E\<close>. The linear forms in \<open>M\<close> are ordered by domain extension.
+  \<^enum> Define \<open>M\<close> as the set of norm-preserving extensions of \<open>f\<close> to subspaces of
+  \<open>E\<close>. The linear forms in \<open>M\<close> are ordered by domain extension.
 
   \<^enum> We show that every non-empty chain in \<open>M\<close> has an upper bound in \<open>M\<close>.
 
-  \<^enum> With Zorn's Lemma we conclude that there is a maximal function \<open>g\<close> in
-  \<open>M\<close>.
+  \<^enum> With Zorn's Lemma we conclude that there is a maximal function \<open>g\<close> in \<open>M\<close>.
 
   \<^enum> The domain \<open>H\<close> of \<open>g\<close> is the whole space \<open>E\<close>, as shown by classical
   contradiction:
 
-    \<^item> Assuming \<open>g\<close> is not defined on whole \<open>E\<close>, it can still be extended in
-    a norm-preserving way to a super-space \<open>H'\<close> of \<open>H\<close>.
+    \<^item> Assuming \<open>g\<close> is not defined on whole \<open>E\<close>, it can still be extended in a
+    norm-preserving way to a super-space \<open>H'\<close> of \<open>H\<close>.
 
     \<^item> Thus \<open>g\<close> can not be maximal. Contradiction!
 \<close>
@@ -292,14 +290,13 @@
 
 text \<open>
   The following alternative formulation of the Hahn-Banach
-  Theorem\label{abs-Hahn-Banach} uses the fact that for a real linear form
-  \<open>f\<close> and a seminorm \<open>p\<close> the following inequality are
-  equivalent:\footnote{This was shown in lemma @{thm [source] abs_ineq_iff}
-  (see page \pageref{abs-ineq-iff}).}
+  Theorem\label{abs-Hahn-Banach} uses the fact that for a real linear form \<open>f\<close>
+  and a seminorm \<open>p\<close> the following inequality are equivalent:\footnote{This
+  was shown in lemma @{thm [source] abs_ineq_iff} (see page
+  \pageref{abs-ineq-iff}).}
   \begin{center}
   \begin{tabular}{lll}
-  \<open>\<forall>x \<in> H. \<bar>h x\<bar> \<le> p x\<close> & and &
-  \<open>\<forall>x \<in> H. h x \<le> p x\<close> \\
+  \<open>\<forall>x \<in> H. \<bar>h x\<bar> \<le> p x\<close> & and & \<open>\<forall>x \<in> H. h x \<le> p x\<close> \\
   \end{tabular}
   \end{center}
 \<close>
@@ -336,9 +333,8 @@
 subsection \<open>The Hahn-Banach Theorem for normed spaces\<close>
 
 text \<open>
-  Every continuous linear form \<open>f\<close> on a subspace \<open>F\<close> of a norm space \<open>E\<close>,
-  can be extended to a continuous linear form \<open>g\<close> on \<open>E\<close> such that \<open>\<parallel>f\<parallel> =
-  \<parallel>g\<parallel>\<close>.
+  Every continuous linear form \<open>f\<close> on a subspace \<open>F\<close> of a norm space \<open>E\<close>, can
+  be extended to a continuous linear form \<open>g\<close> on \<open>E\<close> such that \<open>\<parallel>f\<parallel> = \<parallel>g\<parallel>\<close>.
 \<close>
 
 theorem norm_Hahn_Banach:
@@ -420,10 +416,10 @@
          OF F_norm, folded B_def fn_norm_def])
   qed
 
-  txt \<open>Using the fact that \<open>p\<close> is a seminorm and \<open>f\<close> is bounded by \<open>p\<close> we
-    can apply the Hahn-Banach Theorem for real vector spaces. So \<open>f\<close> can be
-    extended in a norm-preserving way to some function \<open>g\<close> on the whole
-    vector space \<open>E\<close>.\<close>
+  txt \<open>Using the fact that \<open>p\<close> is a seminorm and \<open>f\<close> is bounded by \<open>p\<close> we can
+    apply the Hahn-Banach Theorem for real vector spaces. So \<open>f\<close> can be
+    extended in a norm-preserving way to some function \<open>g\<close> on the whole vector
+    space \<open>E\<close>.\<close>
 
   with E FE linearform q obtain g where
       linearformE: "linearform E g"
--- a/src/HOL/Hahn_Banach/Hahn_Banach_Ext_Lemmas.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Hahn_Banach/Hahn_Banach_Ext_Lemmas.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -9,14 +9,13 @@
 begin
 
 text \<open>
-  In this section the following context is presumed. Let \<open>E\<close> be a real
-  vector space with a seminorm \<open>q\<close> on \<open>E\<close>. \<open>F\<close> is a subspace of \<open>E\<close> and \<open>f\<close>
-  a linear function on \<open>F\<close>. We consider a subspace \<open>H\<close> of \<open>E\<close> that is a
-  superspace of \<open>F\<close> and a linear form \<open>h\<close> on \<open>H\<close>. \<open>H\<close> is a not equal to \<open>E\<close>
-  and \<open>x\<^sub>0\<close> is an element in \<open>E - H\<close>. \<open>H\<close> is extended to the direct sum \<open>H'
-  = H + lin x\<^sub>0\<close>, so for any \<open>x \<in> H'\<close> the decomposition of \<open>x = y + a \<cdot> x\<close>
-  with \<open>y \<in> H\<close> is unique. \<open>h'\<close> is defined on \<open>H'\<close> by \<open>h' x = h y + a \<cdot> \<xi>\<close>
-  for a certain \<open>\<xi>\<close>.
+  In this section the following context is presumed. Let \<open>E\<close> be a real vector
+  space with a seminorm \<open>q\<close> on \<open>E\<close>. \<open>F\<close> is a subspace of \<open>E\<close> and \<open>f\<close> a linear
+  function on \<open>F\<close>. We consider a subspace \<open>H\<close> of \<open>E\<close> that is a superspace of
+  \<open>F\<close> and a linear form \<open>h\<close> on \<open>H\<close>. \<open>H\<close> is a not equal to \<open>E\<close> and \<open>x\<^sub>0\<close> is an
+  element in \<open>E - H\<close>. \<open>H\<close> is extended to the direct sum \<open>H' = H + lin x\<^sub>0\<close>, so
+  for any \<open>x \<in> H'\<close> the decomposition of \<open>x = y + a \<cdot> x\<close> with \<open>y \<in> H\<close> is
+  unique. \<open>h'\<close> is defined on \<open>H'\<close> by \<open>h' x = h y + a \<cdot> \<xi>\<close> for a certain \<open>\<xi>\<close>.
 
   Subsequently we show some properties of this extension \<open>h'\<close> of \<open>h\<close>.
 
@@ -82,8 +81,8 @@
 
 text \<open>
   \<^medskip>
-  The function \<open>h'\<close> is defined as a \<open>h' x = h y + a \<cdot> \<xi>\<close> where
-  \<open>x = y + a \<cdot> \<xi>\<close> is a linear extension of \<open>h\<close> to \<open>H'\<close>.
+  The function \<open>h'\<close> is defined as a \<open>h' x = h y + a \<cdot> \<xi>\<close> where \<open>x = y + a \<cdot> \<xi>\<close>
+  is a linear extension of \<open>h\<close> to \<open>H'\<close>.
 \<close>
 
 lemma h'_lf:
--- a/src/HOL/Hahn_Banach/Hahn_Banach_Sup_Lemmas.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Hahn_Banach/Hahn_Banach_Sup_Lemmas.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -10,17 +10,16 @@
 
 text \<open>
   This section contains some lemmas that will be used in the proof of the
-  Hahn-Banach Theorem. In this section the following context is presumed.
-  Let \<open>E\<close> be a real vector space with a seminorm \<open>p\<close> on \<open>E\<close>. \<open>F\<close> is a
-  subspace of \<open>E\<close> and \<open>f\<close> a linear form on \<open>F\<close>. We consider a chain \<open>c\<close> of
-  norm-preserving extensions of \<open>f\<close>, such that \<open>\<Union>c = graph H h\<close>. We will
-  show some properties about the limit function \<open>h\<close>, i.e.\ the supremum of
-  the chain \<open>c\<close>.
+  Hahn-Banach Theorem. In this section the following context is presumed. Let
+  \<open>E\<close> be a real vector space with a seminorm \<open>p\<close> on \<open>E\<close>. \<open>F\<close> is a subspace of
+  \<open>E\<close> and \<open>f\<close> a linear form on \<open>F\<close>. We consider a chain \<open>c\<close> of norm-preserving
+  extensions of \<open>f\<close>, such that \<open>\<Union>c = graph H h\<close>. We will show some properties
+  about the limit function \<open>h\<close>, i.e.\ the supremum of the chain \<open>c\<close>.
 
   \<^medskip>
-  Let \<open>c\<close> be a chain of norm-preserving extensions of the function \<open>f\<close> and
-  let \<open>graph H h\<close> be the supremum of \<open>c\<close>. Every element in \<open>H\<close> is member of
-  one of the elements of the chain.
+  Let \<open>c\<close> be a chain of norm-preserving extensions of the function \<open>f\<close> and let
+  \<open>graph H h\<close> be the supremum of \<open>c\<close>. Every element in \<open>H\<close> is member of one of
+  the elements of the chain.
 \<close>
 
 lemmas [dest?] = chainsD
@@ -55,10 +54,10 @@
 
 text \<open>
   \<^medskip>
-  Let \<open>c\<close> be a chain of norm-preserving extensions of the function \<open>f\<close> and
-  let \<open>graph H h\<close> be the supremum of \<open>c\<close>. Every element in the domain \<open>H\<close> of
-  the supremum function is member of the domain \<open>H'\<close> of some function \<open>h'\<close>,
-  such that \<open>h\<close> extends \<open>h'\<close>.
+  Let \<open>c\<close> be a chain of norm-preserving extensions of the function \<open>f\<close> and let
+  \<open>graph H h\<close> be the supremum of \<open>c\<close>. Every element in the domain \<open>H\<close> of the
+  supremum function is member of the domain \<open>H'\<close> of some function \<open>h'\<close>, such
+  that \<open>h\<close> extends \<open>h'\<close>.
 \<close>
 
 lemma some_H'h':
@@ -83,9 +82,9 @@
 
 text \<open>
   \<^medskip>
-  Any two elements \<open>x\<close> and \<open>y\<close> in the domain \<open>H\<close> of the supremum function
-  \<open>h\<close> are both in the domain \<open>H'\<close> of some function \<open>h'\<close>, such that \<open>h\<close>
-  extends \<open>h'\<close>.
+  Any two elements \<open>x\<close> and \<open>y\<close> in the domain \<open>H\<close> of the supremum function \<open>h\<close>
+  are both in the domain \<open>H'\<close> of some function \<open>h'\<close>, such that \<open>h\<close> extends
+  \<open>h'\<close>.
 \<close>
 
 lemma some_H'h'2:
@@ -99,8 +98,8 @@
     \<and> linearform H' h' \<and> H' \<unlhd> E \<and> F \<unlhd> H'
     \<and> graph F f \<subseteq> graph H' h' \<and> (\<forall>x \<in> H'. h' x \<le> p x)"
 proof -
-  txt \<open>\<open>y\<close> is in the domain \<open>H''\<close> of some function \<open>h''\<close>,
-  such that \<open>h\<close> extends \<open>h''\<close>.\<close>
+  txt \<open>\<open>y\<close> is in the domain \<open>H''\<close> of some function \<open>h''\<close>, such that \<open>h\<close>
+    extends \<open>h''\<close>.\<close>
 
   from M cM u and y obtain H' h' where
       y_hy: "(y, h y) \<in> graph H' h'"
@@ -121,10 +120,9 @@
       "graph F f \<subseteq> graph H'' h''"  "\<forall>x \<in> H''. h'' x \<le> p x"
     by (rule some_H'h't [elim_format]) blast
 
-  txt \<open>Since both \<open>h'\<close> and \<open>h''\<close> are elements of the chain,
-    \<open>h''\<close> is an extension of \<open>h'\<close> or vice versa. Thus both
-    \<open>x\<close> and \<open>y\<close> are contained in the greater
-    one. \label{cases1}\<close>
+  txt \<open>Since both \<open>h'\<close> and \<open>h''\<close> are elements of the chain, \<open>h''\<close> is an
+    extension of \<open>h'\<close> or vice versa. Thus both \<open>x\<close> and \<open>y\<close> are contained in
+    the greater one. \label{cases1}\<close>
 
   from cM c'' c' consider "graph H'' h'' \<subseteq> graph H' h'" | "graph H' h' \<subseteq> graph H'' h''"
     by (blast dest: chainsD)
@@ -205,11 +203,11 @@
 
 text \<open>
   \<^medskip>
-  The limit function \<open>h\<close> is linear. Every element \<open>x\<close> in the domain of \<open>h\<close>
-  is in the domain of a function \<open>h'\<close> in the chain of norm preserving
-  extensions. Furthermore, \<open>h\<close> is an extension of \<open>h'\<close> so the function
-  values of \<open>x\<close> are identical for \<open>h'\<close> and \<open>h\<close>. Finally, the function \<open>h'\<close>
-  is linear by construction of \<open>M\<close>.
+  The limit function \<open>h\<close> is linear. Every element \<open>x\<close> in the domain of \<open>h\<close> is
+  in the domain of a function \<open>h'\<close> in the chain of norm preserving extensions.
+  Furthermore, \<open>h\<close> is an extension of \<open>h'\<close> so the function values of \<open>x\<close> are
+  identical for \<open>h'\<close> and \<open>h\<close>. Finally, the function \<open>h'\<close> is linear by
+  construction of \<open>M\<close>.
 \<close>
 
 lemma sup_lf:
@@ -286,8 +284,8 @@
 
 text \<open>
   \<^medskip>
-  The domain \<open>H\<close> of the limit function is a superspace of \<open>F\<close>, since \<open>F\<close> is
-  a subset of \<open>H\<close>. The existence of the \<open>0\<close> element in \<open>F\<close> and the closure
+  The domain \<open>H\<close> of the limit function is a superspace of \<open>F\<close>, since \<open>F\<close> is a
+  subset of \<open>H\<close>. The existence of the \<open>0\<close> element in \<open>F\<close> and the closure
   properties follow from the fact that \<open>F\<close> is a vector space.
 \<close>
 
@@ -367,8 +365,8 @@
 
 text \<open>
   \<^medskip>
-  The limit function is bounded by the norm \<open>p\<close> as well, since all elements
-  in the chain are bounded by \<open>p\<close>.
+  The limit function is bounded by the norm \<open>p\<close> as well, since all elements in
+  the chain are bounded by \<open>p\<close>.
 \<close>
 
 lemma sup_norm_pres:
@@ -389,10 +387,10 @@
 
 text \<open>
   \<^medskip>
-  The following lemma is a property of linear forms on real vector spaces.
-  It will be used for the lemma \<open>abs_Hahn_Banach\<close> (see page
-  \pageref{abs-Hahn-Banach}). \label{abs-ineq-iff} For real vector spaces
-  the following inequality are equivalent:
+  The following lemma is a property of linear forms on real vector spaces. It
+  will be used for the lemma \<open>abs_Hahn_Banach\<close> (see page
+  \pageref{abs-Hahn-Banach}). \label{abs-ineq-iff} For real vector spaces the
+  following inequality are equivalent:
   \begin{center}
   \begin{tabular}{lll}
   \<open>\<forall>x \<in> H. \<bar>h x\<bar> \<le> p x\<close> & and &
--- a/src/HOL/Hahn_Banach/Normed_Space.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Hahn_Banach/Normed_Space.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -11,9 +11,9 @@
 subsection \<open>Quasinorms\<close>
 
 text \<open>
-  A \<^emph>\<open>seminorm\<close> \<open>\<parallel>\<cdot>\<parallel>\<close> is a function on a real vector space into the reals
-  that has the following properties: it is positive definite, absolute
-  homogeneous and subadditive.
+  A \<^emph>\<open>seminorm\<close> \<open>\<parallel>\<cdot>\<parallel>\<close> is a function on a real vector space into the reals that
+  has the following properties: it is positive definite, absolute homogeneous
+  and subadditive.
 \<close>
 
 locale seminorm =
@@ -57,8 +57,7 @@
 subsection \<open>Norms\<close>
 
 text \<open>
-  A \<^emph>\<open>norm\<close> \<open>\<parallel>\<cdot>\<parallel>\<close> is a seminorm that maps only the
-  \<open>0\<close> vector to \<open>0\<close>.
+  A \<^emph>\<open>norm\<close> \<open>\<parallel>\<cdot>\<parallel>\<close> is a seminorm that maps only the \<open>0\<close> vector to \<open>0\<close>.
 \<close>
 
 locale norm = seminorm +
@@ -68,8 +67,7 @@
 subsection \<open>Normed vector spaces\<close>
 
 text \<open>
-  A vector space together with a norm is called a \<^emph>\<open>normed
-  space\<close>.
+  A vector space together with a norm is called a \<^emph>\<open>normed space\<close>.
 \<close>
 
 locale normed_vectorspace = vectorspace + norm
--- a/src/HOL/Hahn_Banach/Subspace.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Hahn_Banach/Subspace.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -139,8 +139,8 @@
 subsection \<open>Linear closure\<close>
 
 text \<open>
-  The \<^emph>\<open>linear closure\<close> of a vector \<open>x\<close> is the set of all scalar multiples
-  of \<open>x\<close>.
+  The \<^emph>\<open>linear closure\<close> of a vector \<open>x\<close> is the set of all scalar multiples of
+  \<open>x\<close>.
 \<close>
 
 definition lin :: "('a::{minus,plus,zero}) \<Rightarrow> 'a set"
@@ -318,7 +318,7 @@
   qed
 qed
 
-text\<open>The sum of two subspaces is a vectorspace.\<close>
+text \<open>The sum of two subspaces is a vectorspace.\<close>
 
 lemma sum_vs [intro?]:
     "U \<unlhd> E \<Longrightarrow> V \<unlhd> E \<Longrightarrow> vectorspace E \<Longrightarrow> vectorspace (U + V)"
@@ -328,10 +328,10 @@
 subsection \<open>Direct sums\<close>
 
 text \<open>
-  The sum of \<open>U\<close> and \<open>V\<close> is called \<^emph>\<open>direct\<close>, iff the zero element is the
-  only common element of \<open>U\<close> and \<open>V\<close>. For every element \<open>x\<close> of the direct
-  sum of \<open>U\<close> and \<open>V\<close> the decomposition in \<open>x = u + v\<close> with \<open>u \<in> U\<close> and
-  \<open>v \<in> V\<close> is unique.
+  The sum of \<open>U\<close> and \<open>V\<close> is called \<^emph>\<open>direct\<close>, iff the zero element is the only
+  common element of \<open>U\<close> and \<open>V\<close>. For every element \<open>x\<close> of the direct sum of
+  \<open>U\<close> and \<open>V\<close> the decomposition in \<open>x = u + v\<close> with \<open>u \<in> U\<close> and \<open>v \<in> V\<close> is
+  unique.
 \<close>
 
 lemma decomp:
@@ -434,8 +434,8 @@
 
 text \<open>
   Since for any element \<open>y + a \<cdot> x'\<close> of the direct sum of a vectorspace \<open>H\<close>
-  and the linear closure of \<open>x'\<close> the components \<open>y \<in> H\<close> and \<open>a\<close> are unique,
-  it follows from \<open>y \<in> H\<close> that \<open>a = 0\<close>.
+  and the linear closure of \<open>x'\<close> the components \<open>y \<in> H\<close> and \<open>a\<close> are unique, it
+  follows from \<open>y \<in> H\<close> that \<open>a = 0\<close>.
 \<close>
 
 lemma decomp_H'_H:
--- a/src/HOL/Hahn_Banach/Vector_Space.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Hahn_Banach/Vector_Space.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -23,13 +23,13 @@
 subsection \<open>Vector space laws\<close>
 
 text \<open>
-  A \<^emph>\<open>vector space\<close> is a non-empty set \<open>V\<close> of elements from @{typ 'a} with
-  the following vector space laws: The set \<open>V\<close> is closed under addition and
-  scalar multiplication, addition is associative and commutative; \<open>- x\<close> is
-  the inverse of \<open>x\<close> wrt.\ addition and \<open>0\<close> is the neutral element of
-  addition. Addition and multiplication are distributive; scalar
-  multiplication is associative and the real number \<open>1\<close> is the neutral
-  element of scalar multiplication.
+  A \<^emph>\<open>vector space\<close> is a non-empty set \<open>V\<close> of elements from @{typ 'a} with the
+  following vector space laws: The set \<open>V\<close> is closed under addition and scalar
+  multiplication, addition is associative and commutative; \<open>- x\<close> is the
+  inverse of \<open>x\<close> wrt.\ addition and \<open>0\<close> is the neutral element of addition.
+  Addition and multiplication are distributive; scalar multiplication is
+  associative and the real number \<open>1\<close> is the neutral element of scalar
+  multiplication.
 \<close>
 
 locale vectorspace =
@@ -78,8 +78,10 @@
 lemmas add_ac = add_assoc add_commute add_left_commute
 
 
-text \<open>The existence of the zero element of a vector space follows from the
-  non-emptiness of carrier set.\<close>
+text \<open>
+  The existence of the zero element of a vector space follows from the
+  non-emptiness of carrier set.
+\<close>
 
 lemma zero [iff]: "0 \<in> V"
 proof -
--- a/src/HOL/Hahn_Banach/Zorn_Lemma.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Hahn_Banach/Zorn_Lemma.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -9,13 +9,13 @@
 begin
 
 text \<open>
-  Zorn's Lemmas states: if every linear ordered subset of an ordered
-  set \<open>S\<close> has an upper bound in \<open>S\<close>, then there exists a
-  maximal element in \<open>S\<close>.  In our application, \<open>S\<close> is a
-  set of sets ordered by set inclusion. Since the union of a chain of
-  sets is an upper bound for all elements of the chain, the conditions
-  of Zorn's lemma can be modified: if \<open>S\<close> is non-empty, it
-  suffices to show that for every non-empty chain \<open>c\<close> in \<open>S\<close> the union of \<open>c\<close> also lies in \<open>S\<close>.
+  Zorn's Lemmas states: if every linear ordered subset of an ordered set \<open>S\<close>
+  has an upper bound in \<open>S\<close>, then there exists a maximal element in \<open>S\<close>. In
+  our application, \<open>S\<close> is a set of sets ordered by set inclusion. Since the
+  union of a chain of sets is an upper bound for all elements of the chain,
+  the conditions of Zorn's lemma can be modified: if \<open>S\<close> is non-empty, it
+  suffices to show that for every non-empty chain \<open>c\<close> in \<open>S\<close> the union of \<open>c\<close>
+  also lies in \<open>S\<close>.
 \<close>
 
 theorem Zorn's_Lemma:
@@ -28,16 +28,14 @@
     fix c assume "c \<in> chains S"
     show "\<exists>y \<in> S. \<forall>z \<in> c. z \<subseteq> y"
     proof cases
-
-      txt \<open>If \<open>c\<close> is an empty chain, then every element in
-        \<open>S\<close> is an upper bound of \<open>c\<close>.\<close>
+      txt \<open>If \<open>c\<close> is an empty chain, then every element in \<open>S\<close> is an upper
+        bound of \<open>c\<close>.\<close>
 
       assume "c = {}"
       with aS show ?thesis by fast
 
-      txt \<open>If \<open>c\<close> is non-empty, then \<open>\<Union>c\<close> is an upper
-        bound of \<open>c\<close>, lying in \<open>S\<close>.\<close>
-
+      txt \<open>If \<open>c\<close> is non-empty, then \<open>\<Union>c\<close> is an upper bound of \<open>c\<close>, lying in
+        \<open>S\<close>.\<close>
     next
       assume "c \<noteq> {}"
       show ?thesis
--- a/src/HOL/Hilbert_Choice.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Hilbert_Choice.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -6,7 +6,7 @@
 section \<open>Hilbert's Epsilon-Operator and the Axiom of Choice\<close>
 
 theory Hilbert_Choice
-imports Nat Wellfounded
+imports Wellfounded
 keywords "specification" :: thy_goal
 begin
 
--- a/src/HOL/Library/ContNotDenum.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Library/ContNotDenum.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -143,4 +143,20 @@
   using uncountable_open_interval [of a b]
   by (metis countable_Un_iff ivl_disj_un_singleton(4))
 
+lemma real_interval_avoid_countable_set:
+  fixes a b :: real and A :: "real set"
+  assumes "a < b" and "countable A"
+  shows "\<exists>x\<in>{a<..<b}. x \<notin> A"
+proof -
+  from `countable A` have "countable (A \<inter> {a<..<b})" by auto
+  moreover with `a < b` have "\<not> countable {a<..<b}" 
+    by (simp add: uncountable_open_interval)
+  ultimately have "A \<inter> {a<..<b} \<noteq> {a<..<b}" by auto
+  hence "A \<inter> {a<..<b} \<subset> {a<..<b}" 
+    by (intro psubsetI, auto)
+  hence "\<exists>x. x \<in> {a<..<b} - A \<inter> {a<..<b}"
+    by (rule psubset_imp_ex_mem)
+  thus ?thesis by auto
+qed
+
 end
--- a/src/HOL/Library/Extended_Real.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Library/Extended_Real.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -2450,6 +2450,16 @@
 lemma lim_ereal[simp]: "((\<lambda>n. ereal (f n)) ---> ereal x) net \<longleftrightarrow> (f ---> x) net"
   by (auto dest!: lim_real_of_ereal)
 
+lemma convergent_real_imp_convergent_ereal:
+  assumes "convergent a"
+  shows "convergent (\<lambda>n. ereal (a n))" and "lim (\<lambda>n. ereal (a n)) = ereal (lim a)"
+proof -
+  from assms obtain L where L: "a ----> L" unfolding convergent_def ..
+  hence lim: "(\<lambda>n. ereal (a n)) ----> ereal L" using lim_ereal by auto
+  thus "convergent (\<lambda>n. ereal (a n))" unfolding convergent_def ..
+  thus "lim (\<lambda>n. ereal (a n)) = ereal (lim a)" using lim L limI by metis
+qed
+
 lemma tendsto_PInfty: "(f ---> \<infinity>) F \<longleftrightarrow> (\<forall>r. eventually (\<lambda>x. ereal r < f x) F)"
 proof -
   {
@@ -3223,7 +3233,6 @@
   thus "(\<Sum>x. ereal (f x)) \<noteq> -\<infinity>" by simp_all
 qed
 
-
 lemma SUP_ereal_add_directed:
   fixes f g :: "'a \<Rightarrow> ereal"
   assumes nonneg: "\<And>i. i \<in> I \<Longrightarrow> 0 \<le> f i" "\<And>i. i \<in> I \<Longrightarrow> 0 \<le> g i"
@@ -3293,50 +3302,6 @@
     done
 qed
 
-subsection \<open>More Limits\<close>
-
-lemma convergent_limsup_cl:
-  fixes X :: "nat \<Rightarrow> 'a::{complete_linorder,linorder_topology}"
-  shows "convergent X \<Longrightarrow> limsup X = lim X"
-  by (auto simp: convergent_def limI lim_imp_Limsup)
-
-lemma lim_increasing_cl:
-  assumes "\<And>n m. n \<ge> m \<Longrightarrow> f n \<ge> f m"
-  obtains l where "f ----> (l::'a::{complete_linorder,linorder_topology})"
-proof
-  show "f ----> (SUP n. f n)"
-    using assms
-    by (intro increasing_tendsto)
-       (auto simp: SUP_upper eventually_sequentially less_SUP_iff intro: less_le_trans)
-qed
-
-lemma lim_decreasing_cl:
-  assumes "\<And>n m. n \<ge> m \<Longrightarrow> f n \<le> f m"
-  obtains l where "f ----> (l::'a::{complete_linorder,linorder_topology})"
-proof
-  show "f ----> (INF n. f n)"
-    using assms
-    by (intro decreasing_tendsto)
-       (auto simp: INF_lower eventually_sequentially INF_less_iff intro: le_less_trans)
-qed
-
-lemma compact_complete_linorder:
-  fixes X :: "nat \<Rightarrow> 'a::{complete_linorder,linorder_topology}"
-  shows "\<exists>l r. subseq r \<and> (X \<circ> r) ----> l"
-proof -
-  obtain r where "subseq r" and mono: "monoseq (X \<circ> r)"
-    using seq_monosub[of X]
-    unfolding comp_def
-    by auto
-  then have "(\<forall>n m. m \<le> n \<longrightarrow> (X \<circ> r) m \<le> (X \<circ> r) n) \<or> (\<forall>n m. m \<le> n \<longrightarrow> (X \<circ> r) n \<le> (X \<circ> r) m)"
-    by (auto simp add: monoseq_def)
-  then obtain l where "(X \<circ> r) ----> l"
-     using lim_increasing_cl[of "X \<circ> r"] lim_decreasing_cl[of "X \<circ> r"]
-     by auto
-  then show ?thesis
-    using \<open>subseq r\<close> by auto
-qed
-
 lemma ereal_dense3:
   fixes x y :: ereal
   shows "x < y \<Longrightarrow> \<exists>r::rat. x < real_of_rat r \<and> real_of_rat r < y"
--- a/src/HOL/Library/Liminf_Limsup.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Library/Liminf_Limsup.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -351,5 +351,53 @@
   finally show ?thesis
     by (auto simp: Liminf_def)
 qed
+subsection \<open>More Limits\<close>
+
+lemma convergent_limsup_cl:
+  fixes X :: "nat \<Rightarrow> 'a::{complete_linorder,linorder_topology}"
+  shows "convergent X \<Longrightarrow> limsup X = lim X"
+  by (auto simp: convergent_def limI lim_imp_Limsup)
+
+lemma convergent_liminf_cl:
+  fixes X :: "nat \<Rightarrow> 'a::{complete_linorder,linorder_topology}"
+  shows "convergent X \<Longrightarrow> liminf X = lim X"
+  by (auto simp: convergent_def limI lim_imp_Liminf)
+
+lemma lim_increasing_cl:
+  assumes "\<And>n m. n \<ge> m \<Longrightarrow> f n \<ge> f m"
+  obtains l where "f ----> (l::'a::{complete_linorder,linorder_topology})"
+proof
+  show "f ----> (SUP n. f n)"
+    using assms
+    by (intro increasing_tendsto)
+       (auto simp: SUP_upper eventually_sequentially less_SUP_iff intro: less_le_trans)
+qed
+
+lemma lim_decreasing_cl:
+  assumes "\<And>n m. n \<ge> m \<Longrightarrow> f n \<le> f m"
+  obtains l where "f ----> (l::'a::{complete_linorder,linorder_topology})"
+proof
+  show "f ----> (INF n. f n)"
+    using assms
+    by (intro decreasing_tendsto)
+       (auto simp: INF_lower eventually_sequentially INF_less_iff intro: le_less_trans)
+qed
+
+lemma compact_complete_linorder:
+  fixes X :: "nat \<Rightarrow> 'a::{complete_linorder,linorder_topology}"
+  shows "\<exists>l r. subseq r \<and> (X \<circ> r) ----> l"
+proof -
+  obtain r where "subseq r" and mono: "monoseq (X \<circ> r)"
+    using seq_monosub[of X]
+    unfolding comp_def
+    by auto
+  then have "(\<forall>n m. m \<le> n \<longrightarrow> (X \<circ> r) m \<le> (X \<circ> r) n) \<or> (\<forall>n m. m \<le> n \<longrightarrow> (X \<circ> r) n \<le> (X \<circ> r) m)"
+    by (auto simp add: monoseq_def)
+  then obtain l where "(X \<circ> r) ----> l"
+     using lim_increasing_cl[of "X \<circ> r"] lim_decreasing_cl[of "X \<circ> r"]
+     by auto
+  then show ?thesis
+    using \<open>subseq r\<close> by auto
+qed
 
 end
--- a/src/HOL/Multivariate_Analysis/Convex_Euclidean_Space.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Multivariate_Analysis/Convex_Euclidean_Space.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -3391,6 +3391,60 @@
   ultimately show ?thesis by auto
 qed
 
+lemma continuous_ge_on_Ioo:
+  assumes "continuous_on {c..d} g" "\<And>x. x \<in> {c<..<d} \<Longrightarrow> g x \<ge> a" "c < d" "x \<in> {c..d}"
+  shows "g (x::real) \<ge> (a::real)"
+proof-
+  from assms(3) have "{c..d} = closure {c<..<d}" by (rule closure_greaterThanLessThan[symmetric])
+  also from assms(2) have "{c<..<d} \<subseteq> (g -` {a..} \<inter> {c..d})" by auto
+  hence "closure {c<..<d} \<subseteq> closure (g -` {a..} \<inter> {c..d})" by (rule closure_mono)
+  also from assms(1) have "closed (g -` {a..} \<inter> {c..d})"
+    by (auto simp: continuous_on_closed_vimage)
+  hence "closure (g -` {a..} \<inter> {c..d}) = g -` {a..} \<inter> {c..d}" by simp
+  finally show ?thesis using \<open>x \<in> {c..d}\<close> by auto 
+qed 
+
+lemma interior_real_semiline':
+  fixes a :: real
+  shows "interior {..a} = {..<a}"
+proof -
+  {
+    fix y
+    assume "a > y"
+    then have "y \<in> interior {..a}"
+      apply (simp add: mem_interior)
+      apply (rule_tac x="(a-y)" in exI)
+      apply (auto simp add: dist_norm)
+      done
+  }
+  moreover
+  {
+    fix y
+    assume "y \<in> interior {..a}"
+    then obtain e where e: "e > 0" "cball y e \<subseteq> {..a}"
+      using mem_interior_cball[of y "{..a}"] by auto
+    moreover from e have "y + e \<in> cball y e"
+      by (auto simp add: cball_def dist_norm)
+    ultimately have "a \<ge> y + e" by auto
+    then have "a > y" using e by auto
+  }
+  ultimately show ?thesis by auto
+qed
+
+lemma interior_atLeastAtMost_real: "interior {a..b} = {a<..<b :: real}"
+proof-
+  have "{a..b} = {a..} \<inter> {..b}" by auto
+  also have "interior ... = {a<..} \<inter> {..<b}" 
+    by (simp add: interior_real_semiline interior_real_semiline')
+  also have "... = {a<..<b}" by auto
+  finally show ?thesis .
+qed
+
+lemma frontier_real_Iic:
+  fixes a :: real
+  shows "frontier {..a} = {a}"
+  unfolding frontier_def by (auto simp add: interior_real_semiline')
+
 lemma rel_interior_real_box:
   fixes a b :: real
   assumes "a < b"
--- a/src/HOL/Multivariate_Analysis/Derivative.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Multivariate_Analysis/Derivative.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -2451,6 +2451,20 @@
   unfolding has_vector_derivative_def
   by (rule has_derivative_at_within)
 
+lemma has_vector_derivative_weaken:
+  fixes x D and f g s t
+  assumes f: "(f has_vector_derivative D) (at x within t)"
+    and "x \<in> s" "s \<subseteq> t" 
+    and "\<And>x. x \<in> s \<Longrightarrow> f x = g x"
+  shows "(g has_vector_derivative D) (at x within s)"
+proof -
+  have "(f has_vector_derivative D) (at x within s) \<longleftrightarrow> (g has_vector_derivative D) (at x within s)"
+    unfolding has_vector_derivative_def has_derivative_iff_norm
+    using assms by (intro conj_cong Lim_cong_within refl) auto
+  then show ?thesis
+    using has_vector_derivative_within_subset[OF f `s \<subseteq> t`] by simp
+qed
+
 lemma has_vector_derivative_transform_within:
   assumes "0 < d"
     and "x \<in> s"
--- a/src/HOL/Multivariate_Analysis/Topology_Euclidean_Space.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Multivariate_Analysis/Topology_Euclidean_Space.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -8,11 +8,12 @@
 
 theory Topology_Euclidean_Space
 imports
-  Complex_Main
+  "~~/src/HOL/Library/Indicator_Function"
   "~~/src/HOL/Library/Countable_Set"
   "~~/src/HOL/Library/FuncSet"
   Linear_Algebra
   Norm_Arith
+  
 begin
 
 lemma image_affinity_interval:
@@ -5736,6 +5737,60 @@
     done
 qed
 
+lemma isCont_indicator: 
+  fixes x :: "'a::t2_space"
+  shows "isCont (indicator A :: 'a \<Rightarrow> real) x = (x \<notin> frontier A)"
+proof auto
+  fix x
+  assume cts_at: "isCont (indicator A :: 'a \<Rightarrow> real) x" and fr: "x \<in> frontier A"
+  with continuous_at_open have 1: "\<forall>V::real set. open V \<and> indicator A x \<in> V \<longrightarrow>
+    (\<exists>U::'a set. open U \<and> x \<in> U \<and> (\<forall>y\<in>U. indicator A y \<in> V))" by auto
+  show False
+  proof (cases "x \<in> A")
+    assume x: "x \<in> A"
+    hence "indicator A x \<in> ({0<..<2} :: real set)" by simp
+    hence "\<exists>U. open U \<and> x \<in> U \<and> (\<forall>y\<in>U. indicator A y \<in> ({0<..<2} :: real set))"
+      using 1 open_greaterThanLessThan by blast
+    then guess U .. note U = this
+    hence "\<forall>y\<in>U. indicator A y > (0::real)"
+      unfolding greaterThanLessThan_def by auto
+    hence "U \<subseteq> A" using indicator_eq_0_iff by force
+    hence "x \<in> interior A" using U interiorI by auto
+    thus ?thesis using fr unfolding frontier_def by simp
+  next
+    assume x: "x \<notin> A"
+    hence "indicator A x \<in> ({-1<..<1} :: real set)" by simp
+    hence "\<exists>U. open U \<and> x \<in> U \<and> (\<forall>y\<in>U. indicator A y \<in> ({-1<..<1} :: real set))"
+      using 1 open_greaterThanLessThan by blast
+    then guess U .. note U = this
+    hence "\<forall>y\<in>U. indicator A y < (1::real)"
+      unfolding greaterThanLessThan_def by auto
+    hence "U \<subseteq> -A" by auto
+    hence "x \<in> interior (-A)" using U interiorI by auto
+    thus ?thesis using fr interior_complement unfolding frontier_def by auto
+  qed
+next
+  assume nfr: "x \<notin> frontier A"
+  hence "x \<in> interior A \<or> x \<in> interior (-A)"
+    by (auto simp: frontier_def closure_interior)
+  thus "isCont ((indicator A)::'a \<Rightarrow> real) x"
+  proof
+    assume int: "x \<in> interior A"
+    hence "\<exists>U. open U \<and> x \<in> U \<and> U \<subseteq> A" unfolding interior_def by auto
+    then guess U .. note U = this
+    hence "\<forall>y\<in>U. indicator A y = (1::real)" unfolding indicator_def by auto
+    hence "continuous_on U (indicator A)" by (simp add: continuous_on_const indicator_eq_1_iff)
+    thus ?thesis using U continuous_on_eq_continuous_at by auto
+  next
+    assume ext: "x \<in> interior (-A)"
+    hence "\<exists>U. open U \<and> x \<in> U \<and> U \<subseteq> -A" unfolding interior_def by auto
+    then guess U .. note U = this
+    hence "\<forall>y\<in>U. indicator A y = (0::real)" unfolding indicator_def by auto
+    hence "continuous_on U (indicator A)" by (smt U continuous_on_topological indicator_def)
+    thus ?thesis using U continuous_on_eq_continuous_at by auto
+  qed
+qed
+
 text \<open>Making a continuous function avoid some value in a neighbourhood.\<close>
 
 lemma continuous_within_avoid:
--- a/src/HOL/Probability/Bochner_Integration.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Probability/Bochner_Integration.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -904,6 +904,12 @@
 translations
   "\<integral> x. f \<partial>M" == "CONST lebesgue_integral M (\<lambda>x. f)"
 
+syntax
+  "_ascii_lebesgue_integral" :: "pttrn \<Rightarrow> 'a measure \<Rightarrow> real \<Rightarrow> real" ("(3LINT (1_)/|(_)./ _)" [0,110,60] 60)
+
+translations
+  "LINT x|M. f" == "CONST lebesgue_integral M (\<lambda>x. f)"
+
 lemma has_bochner_integral_integral_eq: "has_bochner_integral M f x \<Longrightarrow> integral\<^sup>L M f = x"
   by (metis the_equality has_bochner_integral_eq lebesgue_integral_def)
 
@@ -2581,6 +2587,33 @@
     by (simp add: integral_distr[symmetric, OF measurable_pair_swap' f] distr_pair_swap[symmetric])
 qed
 
+lemma (in pair_sigma_finite) Fubini_integrable:
+  fixes f :: "_ \<Rightarrow> _::{banach, second_countable_topology}"
+  assumes f[measurable]: "f \<in> borel_measurable (M1 \<Otimes>\<^sub>M M2)"
+    and integ1: "integrable M1 (\<lambda>x. \<integral> y. norm (f (x, y)) \<partial>M2)"
+    and integ2: "AE x in M1. integrable M2 (\<lambda>y. f (x, y))"
+  shows "integrable (M1 \<Otimes>\<^sub>M M2) f"
+proof (rule integrableI_bounded)
+  have "(\<integral>\<^sup>+ p. norm (f p) \<partial>(M1 \<Otimes>\<^sub>M M2)) = (\<integral>\<^sup>+ x. (\<integral>\<^sup>+ y. norm (f (x, y)) \<partial>M2) \<partial>M1)"
+    by (simp add: M2.nn_integral_fst [symmetric])
+  also have "\<dots> = (\<integral>\<^sup>+ x. \<bar>\<integral>y. norm (f (x, y)) \<partial>M2\<bar> \<partial>M1)"
+    apply (intro nn_integral_cong_AE)
+    using integ2
+  proof eventually_elim
+    fix x assume "integrable M2 (\<lambda>y. f (x, y))"
+    then have f: "integrable M2 (\<lambda>y. norm (f (x, y)))"
+      by simp
+    then have "(\<integral>\<^sup>+y. ereal (norm (f (x, y))) \<partial>M2) = ereal (LINT y|M2. norm (f (x, y)))"
+      by (rule nn_integral_eq_integral) simp
+    also have "\<dots> = ereal \<bar>LINT y|M2. norm (f (x, y))\<bar>"
+      using f by (auto intro!: abs_of_nonneg[symmetric] integral_nonneg_AE)
+    finally show "(\<integral>\<^sup>+y. ereal (norm (f (x, y))) \<partial>M2) = ereal \<bar>LINT y|M2. norm (f (x, y))\<bar>" .
+  qed
+  also have "\<dots> < \<infinity>"
+    using integ1 by (simp add: integrable_iff_bounded integral_nonneg_AE)
+  finally show "(\<integral>\<^sup>+ p. norm (f p) \<partial>(M1 \<Otimes>\<^sub>M M2)) < \<infinity>" .
+qed fact
+
 lemma (in pair_sigma_finite) emeasure_pair_measure_finite:
   assumes A: "A \<in> sets (M1 \<Otimes>\<^sub>M M2)" and finite: "emeasure (M1 \<Otimes>\<^sub>M M2) A < \<infinity>"
   shows "AE x in M1. emeasure M2 {y\<in>space M2. (x, y) \<in> A} < \<infinity>"
--- a/src/HOL/Probability/Borel_Space.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Probability/Borel_Space.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -1467,6 +1467,35 @@
     by simp
 qed
 
+lemma is_real_interval:
+  assumes S: "is_interval S"
+  shows "\<exists>a b::real. S = {} \<or> S = UNIV \<or> S = {..<b} \<or> S = {..b} \<or> S = {a<..} \<or> S = {a..} \<or>
+    S = {a<..<b} \<or> S = {a<..b} \<or> S = {a..<b} \<or> S = {a..b}"
+  using S unfolding is_interval_1 by (blast intro: interval_cases)
+
+lemma real_interval_borel_measurable:
+  assumes "is_interval (S::real set)"
+  shows "S \<in> sets borel"
+proof -
+  from assms is_real_interval have "\<exists>a b::real. S = {} \<or> S = UNIV \<or> S = {..<b} \<or> S = {..b} \<or>
+    S = {a<..} \<or> S = {a..} \<or> S = {a<..<b} \<or> S = {a<..b} \<or> S = {a..<b} \<or> S = {a..b}" by auto
+  then guess a ..
+  then guess b ..
+  thus ?thesis
+    by auto
+qed
+
+lemma borel_measurable_mono:
+  fixes f :: "real \<Rightarrow> real"
+  assumes "mono f"
+  shows "f \<in> borel_measurable borel"
+proof (subst borel_measurable_iff_ge, auto simp add:)
+  fix a :: real
+  have "is_interval {w. a \<le> f w}"
+    unfolding is_interval_1 using assms by (auto dest: monoD intro: order.trans)
+  thus "{w. a \<le> f w} \<in> sets borel" using real_interval_borel_measurable by auto  
+qed
+
 no_notation
   eucl_less (infix "<e" 50)
 
--- a/src/HOL/Probability/Giry_Monad.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Probability/Giry_Monad.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -91,9 +91,9 @@
   from mono have inj: "inj_on g {a..b}" by (rule strict_mono_on_imp_inj_on)
   from mono have mono': "mono_on g {a..b}" by (rule strict_mono_on_imp_mono_on)
   from mono' derivg have "\<And>x. x \<in> {a<..<b} \<Longrightarrow> g' x \<ge> 0"
-    by (rule mono_on_imp_deriv_nonneg) (auto simp: interior_atLeastAtMost_real)
+    by (rule mono_on_imp_deriv_nonneg) auto
   from contg' this have derivg_nonneg: "\<And>x. x \<in> {a..b} \<Longrightarrow> g' x \<ge> 0"
-    by (rule continuous_ge_on_Iii) (simp_all add: \<open>a < b\<close>)
+    by (rule continuous_ge_on_Ioo) (simp_all add: \<open>a < b\<close>)
 
   from derivg have contg: "continuous_on {a..b} g" by (rule has_real_derivative_imp_continuous_on)
   have A: "h -` {a..b} = {g a..g b}"
--- a/src/HOL/Probability/Interval_Integral.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Probability/Interval_Integral.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -389,11 +389,11 @@
            simp add: interval_lebesgue_integral_def einterval_iff)
 
 lemma interval_integral_Ioi:
-  "\<bar>a\<bar> < \<infinity> \<Longrightarrow> (LBINT x=a..\<infinity>. f x) = (LBINT x : {real a <..}. f x)"
+  "\<bar>a\<bar> < \<infinity> \<Longrightarrow> (LBINT x=a..\<infinity>. f x) = (LBINT x : {real_of_ereal a <..}. f x)"
   by (auto simp add: interval_lebesgue_integral_def einterval_iff)
 
 lemma interval_integral_Ioo:
-  "a \<le> b \<Longrightarrow> \<bar>a\<bar> < \<infinity> ==> \<bar>b\<bar> < \<infinity> \<Longrightarrow> (LBINT x=a..b. f x) = (LBINT x : {real a <..< real b}. f x)"
+  "a \<le> b \<Longrightarrow> \<bar>a\<bar> < \<infinity> ==> \<bar>b\<bar> < \<infinity> \<Longrightarrow> (LBINT x=a..b. f x) = (LBINT x : {real_of_ereal a <..< real_of_ereal b}. f x)"
   by (auto simp add: interval_lebesgue_integral_def einterval_iff)
 
 lemma interval_integral_discrete_difference:
--- a/src/HOL/Probability/Lebesgue_Integral_Substitution.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Probability/Lebesgue_Integral_Substitution.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -16,65 +16,6 @@
     "\<lbrakk>f \<in> measurable borel M; A \<in> sets M\<rbrakk> \<Longrightarrow> f -` A \<in> sets borel"
   by (drule (1) measurable_sets) simp
 
-lemma closure_Iii: 
-  assumes "a < b"
-  shows "closure {a<..<b::real} = {a..b}"
-proof-
-  have "{a<..<b} = ball ((a+b)/2) ((b-a)/2)" by (auto simp: dist_real_def field_simps not_less)
-  also from assms have "closure ... = cball ((a+b)/2) ((b-a)/2)" by (intro closure_ball) simp
-  also have "... = {a..b}" by (auto simp: dist_real_def field_simps not_less)
-  finally show ?thesis .
-qed
-
-lemma continuous_ge_on_Iii:
-  assumes "continuous_on {c..d} g" "\<And>x. x \<in> {c<..<d} \<Longrightarrow> g x \<ge> a" "c < d" "x \<in> {c..d}"
-  shows "g (x::real) \<ge> (a::real)"
-proof-
-  from assms(3) have "{c..d} = closure {c<..<d}" by (rule closure_Iii[symmetric])
-  also from assms(2) have "{c<..<d} \<subseteq> (g -` {a..} \<inter> {c..d})" by auto
-  hence "closure {c<..<d} \<subseteq> closure (g -` {a..} \<inter> {c..d})" by (rule closure_mono)
-  also from assms(1) have "closed (g -` {a..} \<inter> {c..d})"
-    by (auto simp: continuous_on_closed_vimage)
-  hence "closure (g -` {a..} \<inter> {c..d}) = g -` {a..} \<inter> {c..d}" by simp
-  finally show ?thesis using \<open>x \<in> {c..d}\<close> by auto 
-qed 
-
-lemma interior_real_semiline':
-  fixes a :: real
-  shows "interior {..a} = {..<a}"
-proof -
-  {
-    fix y
-    assume "a > y"
-    then have "y \<in> interior {..a}"
-      apply (simp add: mem_interior)
-      apply (rule_tac x="(a-y)" in exI)
-      apply (auto simp add: dist_norm)
-      done
-  }
-  moreover
-  {
-    fix y
-    assume "y \<in> interior {..a}"
-    then obtain e where e: "e > 0" "cball y e \<subseteq> {..a}"
-      using mem_interior_cball[of y "{..a}"] by auto
-    moreover from e have "y + e \<in> cball y e"
-      by (auto simp add: cball_def dist_norm)
-    ultimately have "a \<ge> y + e" by auto
-    then have "a > y" using e by auto
-  }
-  ultimately show ?thesis by auto
-qed
-
-lemma interior_atLeastAtMost_real: "interior {a..b} = {a<..<b :: real}"
-proof-
-  have "{a..b} = {a..} \<inter> {..b}" by auto
-  also have "interior ... = {a<..} \<inter> {..<b}" 
-    by (simp add: interior_real_semiline interior_real_semiline')
-  also have "... = {a<..<b}" by auto
-  finally show ?thesis .
-qed
-
 lemma nn_integral_indicator_singleton[simp]:
   assumes [measurable]: "{y} \<in> sets M"
   shows "(\<integral>\<^sup>+x. f x * indicator {y} x \<partial>M) = max 0 (f y) * emeasure M {y}"
--- a/src/HOL/Probability/Measure_Space.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Probability/Measure_Space.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -1410,6 +1410,9 @@
 lemma measure_nonneg: "0 \<le> measure M A"
   using emeasure_nonneg[of M A] unfolding measure_def by (auto intro: real_of_ereal_pos)
 
+lemma zero_less_measure_iff: "0 < measure M A \<longleftrightarrow> measure M A \<noteq> 0"
+  using measure_nonneg[of M A] by (auto simp add: le_less)
+
 lemma measure_le_0_iff: "measure M X \<le> 0 \<longleftrightarrow> measure M X = 0"
   using measure_nonneg[of M X] by auto
 
--- a/src/HOL/Probability/Set_Integral.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Probability/Set_Integral.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -14,13 +14,6 @@
     Notation
 *)
 
-syntax
-"_ascii_lebesgue_integral" :: "pttrn \<Rightarrow> 'a measure \<Rightarrow> real \<Rightarrow> real"
-("(3LINT (1_)/|(_)./ _)" [0,110,60] 60)
-
-translations
-"LINT x|M. f" == "CONST lebesgue_integral M (\<lambda>x. f)"
-
 abbreviation "set_borel_measurable M A f \<equiv> (\<lambda>x. indicator A x *\<^sub>R f x) \<in> borel_measurable M"
 
 abbreviation "set_integrable M A f \<equiv> integrable M (\<lambda>x. indicator A x *\<^sub>R f x)"
--- a/src/HOL/Probability/measurable.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Probability/measurable.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -204,7 +204,7 @@
 fun measurable_tac ctxt facts =
   let
     fun debug_fact msg thm () =
-      msg ^ " " ^ Pretty.str_of (Syntax.pretty_term ctxt (Thm.prop_of thm))
+      msg ^ " " ^ Pretty.unformatted_string_of (Syntax.pretty_term ctxt (Thm.prop_of thm))
 
     fun IF' c t i = COND (c i) (t i) no_tac
 
--- a/src/HOL/TPTP/atp_problem_import.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/TPTP/atp_problem_import.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -315,6 +315,7 @@
       | "DFG" => (DFG Monomorphic, "mono_native", liftingN)
       | _ => error ("Unknown format " ^ quote format_str ^
         " (expected \"FOF\", \"TF0\", \"TH0\", or \"DFG\")"))
+    val generate_info = false
     val uncurried_aliases = false
     val readable_names = true
     val presimp = true
@@ -322,7 +323,8 @@
       map (apfst (rpair (Global, Non_Rec_Def))) defs @
       map (apfst (rpair (Global, General))) nondefs
     val (atp_problem, _, _, _) =
-      generate_atp_problem ctxt format Hypothesis (type_enc_of_string Strict type_enc) Translator
+      generate_atp_problem ctxt generate_info format Hypothesis (type_enc_of_string Strict type_enc)
+        Translator
         lam_trans uncurried_aliases readable_names presimp [] conj facts
 
     val ord = effective_term_order ctxt spassN
--- a/src/HOL/TPTP/atp_theory_export.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/TPTP/atp_theory_export.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -75,12 +75,11 @@
                 | SOME failure => string_of_atp_failure failure))
   in outcome end
 
-fun is_problem_line_reprovable ctxt format prelude axioms deps
-                               (Formula (ident', _, phi, _, _)) =
+fun is_problem_line_reprovable ctxt format prelude axioms deps (Formula (ident', _, phi, _, _)) =
     is_none (run_atp ctxt format
-        ((factsN, Formula (ident', Conjecture, phi, NONE, []) ::
-                  map_filter (Symtab.lookup axioms) deps) ::
-         prelude))
+      ((factsN,
+        Formula (ident', Conjecture, phi, NONE, []) :: map_filter (Symtab.lookup axioms) deps) ::
+       prelude))
   | is_problem_line_reprovable _ _ _ _ _ _ = false
 
 fun inference_term _ [] = NONE
@@ -93,7 +92,7 @@
     |> SOME
 
 fun add_inferences_to_problem_line ctxt format check_infs prelude axioms infers
-        (line as Formula ((ident, alt), Axiom, phi, NONE, tms)) =
+      (line as Formula ((ident, alt), Axiom, phi, NONE, info)) =
     let
       val deps =
         case these (AList.lookup (op =) infers ident) of
@@ -106,7 +105,7 @@
           else
             deps
     in
-      Formula ((ident, alt), Lemma, phi, inference_term check_infs deps, tms)
+      Formula ((ident, alt), Lemma, phi, inference_term check_infs deps, info)
     end
   | add_inferences_to_problem_line _ _ _ _ _ _ line = line
 
@@ -170,7 +169,7 @@
       facts
       |> map (fn ((_, loc), th) =>
         ((Thm.get_name_hint th, loc), th |> Thm.prop_of |> mono ? monomorphize_term ctxt))
-      |> generate_atp_problem ctxt format Axiom type_enc Exporter combsN false false true []
+      |> generate_atp_problem ctxt true format Axiom type_enc Exporter combsN false false true []
         @{prop False}
       |> #1 |> sort_by (heading_sort_key o fst)
     val prelude = fst (split_last problem)
@@ -269,13 +268,13 @@
 val hol_base_name = encode_meta "HOL"
 
 fun should_generate_problem thy base_name (Formula ((_, alt), _, _, _, _)) =
-  case try (Global_Theory.get_thm thy) alt of
+  (case try (Global_Theory.get_thm thy) alt of
     SOME th =>
-    (* This is a crude hack to detect theorems stated and proved by the user (as
-       opposed to those derived by various packages). In addition, we leave out
-       everything in "HOL" as too basic to be interesting. *)
+    (* This is a crude hack to detect theorems stated and proved by the user (as opposed to those
+       derived by various packages). In addition, we leave out everything in "HOL" as too basic to
+       be interesting. *)
     Thm.legacy_get_kind th <> "" andalso base_name <> hol_base_name
-  | NONE => false
+  | NONE => false)
 
 (* Convention: theoryname__goalname *)
 fun problem_name_of base_name n alt =
@@ -342,13 +341,12 @@
       | write_problem_files n seen_facts includes seen_ss
           ((base_name, fact_line :: fact_lines) :: groups) =
         let
-          val (ident, alt, pname, sname, conj) =
+          val (alt, pname, sname, conj) =
             (case fact_line of
               Formula ((ident, alt), _, phi, _, _) =>
-              (ident, alt, problem_name_of base_name n (encode_meta alt),
+              (alt, problem_name_of base_name n (encode_meta alt),
                suggestion_name_of base_name n (encode_meta alt),
                Formula ((ident, alt), Conjecture, phi, NONE, [])))
-
           val fact = the (Symtab.lookup fact_tab alt)
           val fact_s = tptp_string_of_line format fact_line
         in
--- a/src/HOL/Tools/ATP/atp_problem.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Tools/ATP/atp_problem.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -98,7 +98,7 @@
   val tptp_empty_list : string
   val dfg_individual_type : string
   val isabelle_info_prefix : string
-  val isabelle_info : string -> int -> (string, 'a) atp_term list
+  val isabelle_info : bool -> string -> int -> (string, 'a) atp_term list
   val extract_isabelle_status : (string, 'a) atp_term list -> string option
   val extract_isabelle_rank : (string, 'a) atp_term list -> int
   val inductionN : string
@@ -138,6 +138,7 @@
     ('c -> 'e) -> ('a, 'b, 'c, 'd) atp_formula -> ('a, 'b, 'e, 'd) atp_formula
   val strip_atype : 'a atp_type -> 'a list * ('a atp_type list * 'a atp_type)
   val is_format_higher_order : atp_format -> bool
+  val tptp_string_of_format : atp_format -> string
   val tptp_string_of_line : atp_format -> string atp_problem_line -> string
   val lines_of_atp_problem :
     atp_format -> term_order -> (unit -> (string * int) list)
@@ -271,12 +272,16 @@
 val default_rank = 1000
 val default_term_order_weight = 1
 
-(* Currently, only SPASS 3.8ds can process Isabelle metainformation. *)
-fun isabelle_info status rank =
-  [] |> rank <> default_rank
-        ? cons (ATerm ((isabelle_info_prefix ^ rankN, []),
-                       [ATerm ((string_of_int rank, []), [])]))
-     |> status <> "" ? cons (ATerm ((isabelle_info_prefix ^ status, []), []))
+(* Currently, only SPASS 3.8ds and (to a lesser extent) Metis can process Isabelle
+   metainformation. *)
+fun isabelle_info generate_info status rank =
+  if generate_info then
+    [] |> rank <> default_rank
+          ? cons (ATerm ((isabelle_info_prefix ^ rankN, []),
+                         [ATerm ((string_of_int rank, []), [])]))
+       |> status <> "" ? cons (ATerm ((isabelle_info_prefix ^ status, []), []))
+  else
+    []
 
 fun extract_isabelle_status (ATerm ((s, []), []) :: _) =
     try (unprefix isabelle_info_prefix) s
@@ -542,13 +547,16 @@
   | tptp_string_of_line format (Sym_Decl (ident, sym, ty)) =
     tptp_string_of_format format ^ "(" ^ ident ^ ", type,\n    " ^ sym ^
     " : " ^ string_of_type format ty ^ ").\n"
-  | tptp_string_of_line format (Formula ((ident, alt), kind, phi, source, _)) =
+  | tptp_string_of_line format (Formula ((ident, alt), kind, phi, source, info)) =
     tptp_string_of_format format ^ "(" ^ ident ^ ", " ^
     tptp_string_of_role kind ^ "," ^ "\n    (" ^
     tptp_string_of_formula format phi ^ ")" ^
     (case source of
       SOME tm => ", " ^ tptp_string_of_term format tm
-    | NONE => "") ^
+    | NONE => if null info then "" else ", []") ^
+    (case info of
+      [] => ""
+    | tms => ", [" ^ commas (map (tptp_string_of_term format) tms) ^ "]") ^
     ")." ^ maybe_alt alt ^ "\n"
 
 fun tptp_lines format =
--- a/src/HOL/Tools/ATP/atp_problem_generate.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Tools/ATP/atp_problem_generate.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -112,9 +112,10 @@
     Proof.context -> type_enc -> string -> term list -> term list * term list
   val string_of_status : status -> string
   val factsN : string
-  val generate_atp_problem : Proof.context -> atp_format -> atp_formula_role -> type_enc -> mode ->
-    string -> bool -> bool -> bool -> term list -> term -> ((string * stature) * term) list ->
-    string atp_problem * string Symtab.table * (string * term) list * int Symtab.table
+  val generate_atp_problem : Proof.context -> bool -> atp_format -> atp_formula_role -> type_enc ->
+    mode -> string -> bool -> bool -> bool -> term list -> term ->
+    ((string * stature) * term) list -> string atp_problem * string Symtab.table
+    * (string * term) list * int Symtab.table
   val atp_problem_selection_weights : string atp_problem -> (string * real) list
   val atp_problem_term_order_info : string atp_problem -> (string * int) list
 end;
@@ -2017,7 +2018,7 @@
 (* Each fact is given a unique fact number to avoid name clashes (e.g., because
    of monomorphization). The TPTP forbids name clashes, and some of the remote
    provers might care. *)
-fun line_of_fact ctxt prefix encode alt freshen pos mono type_enc rank
+fun line_of_fact ctxt generate_info prefix encode alt freshen pos mono type_enc rank
         (j, {name, stature = (_, status), role, iformula, atomic_types}) =
   Formula ((prefix ^ (if freshen then string_of_int j ^ "_" else "") ^
             encode name, alt name),
@@ -2027,23 +2028,22 @@
                   should_guard_var_in_formula (if pos then SOME true else NONE)
            |> close_formula_universally
            |> bound_tvars type_enc true atomic_types,
-           NONE, isabelle_info (string_of_status status) (rank j))
+           NONE, isabelle_info generate_info (string_of_status status) (rank j))
 
-fun lines_of_subclass type_enc sub super =
+fun lines_of_subclass generate_info type_enc sub super =
   Formula ((subclass_prefix ^ ascii_of sub ^ "___" ^ ascii_of super, ""), Axiom,
            AConn (AImplies,
                   [sub, super] |> map (fn s => class_atom type_enc (s, tvar_a)))
            |> bound_tvars type_enc false [tvar_a],
-           NONE, isabelle_info inductiveN helper_rank)
+           NONE, isabelle_info generate_info inductiveN helper_rank)
 
-fun lines_of_subclass_pair type_enc (sub, supers) =
+fun lines_of_subclass_pair generate_info type_enc (sub, supers) =
   if polymorphism_of_type_enc type_enc = Type_Class_Polymorphic then
-    [Class_Decl (class_decl_prefix ^ ascii_of sub, `make_class sub,
-                 map (`make_class) supers)]
+    [Class_Decl (class_decl_prefix ^ ascii_of sub, `make_class sub, map (`make_class) supers)]
   else
-    map (lines_of_subclass type_enc sub) supers
+    map (lines_of_subclass generate_info type_enc sub) supers
 
-fun line_of_tcon_clause type_enc (name, prems, (cl, T)) =
+fun line_of_tcon_clause generate_info type_enc (name, prems, (cl, T)) =
   if polymorphism_of_type_enc type_enc = Type_Class_Polymorphic then
     Class_Memb (class_memb_prefix ^ name,
       map (fn (cls, T) => (T |> dest_TVar |> tvar_name, map (`make_class) cls)) prems,
@@ -2053,14 +2053,12 @@
              mk_ahorn (maps (class_atoms type_enc) prems)
                       (class_atom type_enc (cl, T))
              |> bound_tvars type_enc true (snd (dest_Type T)),
-             NONE, isabelle_info inductiveN helper_rank)
+             NONE, isabelle_info generate_info inductiveN helper_rank)
 
-fun line_of_conjecture ctxt mono type_enc
-                       ({name, role, iformula, atomic_types, ...} : ifact) =
+fun line_of_conjecture ctxt mono type_enc ({name, role, iformula, atomic_types, ...} : ifact) =
   Formula ((conjecture_prefix ^ name, ""), role,
            iformula
-           |> formula_of_iformula ctxt mono type_enc
-                  should_guard_var_in_formula (SOME false)
+           |> formula_of_iformula ctxt mono type_enc should_guard_var_in_formula (SOME false)
            |> close_formula_universally
            |> bound_tvars type_enc true atomic_types, NONE, [])
 
@@ -2230,7 +2228,7 @@
           ? fold (add_fact_monotonic_types ctxt mono type_enc) facts
   end
 
-fun line_of_guards_mono_type ctxt mono type_enc T =
+fun line_of_guards_mono_type ctxt generate_info mono type_enc T =
   Formula ((guards_sym_formula_prefix ^ ascii_of (mangled_type type_enc T), ""),
            Axiom,
            IConst (`make_bound_var "X", T, [])
@@ -2240,21 +2238,21 @@
                                   (SOME true)
            |> close_formula_universally
            |> bound_tvars type_enc true (atomic_types_of T),
-           NONE, isabelle_info inductiveN helper_rank)
+           NONE, isabelle_info generate_info inductiveN helper_rank)
 
-fun line_of_tags_mono_type ctxt mono type_enc T =
+fun line_of_tags_mono_type ctxt generate_info mono type_enc T =
   let val x_var = ATerm ((`make_bound_var "X", []), []) in
     Formula ((tags_sym_formula_prefix ^ ascii_of (mangled_type type_enc T), ""), Axiom,
              eq_formula type_enc (atomic_types_of T) [] false
                   (tag_with_type ctxt mono type_enc NONE T x_var) x_var,
-             NONE, isabelle_info non_rec_defN helper_rank)
+             NONE, isabelle_info generate_info non_rec_defN helper_rank)
   end
 
-fun lines_of_mono_types ctxt mono type_enc =
+fun lines_of_mono_types ctxt generate_info mono type_enc =
   (case type_enc of
     Native _ => K []
-  | Guards _ => map (line_of_guards_mono_type ctxt mono type_enc)
-  | Tags _ => map (line_of_tags_mono_type ctxt mono type_enc))
+  | Guards _ => map (line_of_guards_mono_type ctxt generate_info mono type_enc)
+  | Tags _ => map (line_of_tags_mono_type ctxt generate_info mono type_enc))
 
 fun decl_line_of_sym ctxt mono type_enc s (s', T_args, T, pred_sym, ary, _) =
   let
@@ -2280,8 +2278,8 @@
 
 fun honor_conj_sym_role in_conj = (if in_conj then Hypothesis else Axiom, I)
 
-fun line_of_guards_sym_decl ctxt mono type_enc n s j
-                            (s', T_args, T, _, ary, in_conj) =
+fun line_of_guards_sym_decl ctxt generate_info mono type_enc n s j
+    (s', T_args, T, _, ary, in_conj) =
   let
     val thy = Proof_Context.theory_of ctxt
     val (role, maybe_negate) = honor_conj_sym_role in_conj
@@ -2310,11 +2308,11 @@
              |> close_formula_universally
              |> bound_tvars type_enc (n > 1) (atomic_types_of T)
              |> maybe_negate,
-             NONE, isabelle_info inductiveN helper_rank)
+             NONE, isabelle_info generate_info inductiveN helper_rank)
   end
 
-fun lines_of_tags_sym_decl ctxt mono type_enc n s
-                           (j, (s', T_args, T, pred_sym, ary, in_conj)) =
+fun lines_of_tags_sym_decl ctxt generate_info mono type_enc n s
+    (j, (s', T_args, T, pred_sym, ary, in_conj)) =
   let
     val thy = Proof_Context.theory_of ctxt
     val level = level_of_type_enc type_enc
@@ -2330,7 +2328,7 @@
     val tag_with = tag_with_type ctxt mono type_enc NONE
     fun formula c =
       [Formula ((ident, ""), role, eq (tag_with res_T c) c, NONE,
-                isabelle_info non_rec_defN helper_rank)]
+                isabelle_info generate_info non_rec_defN helper_rank)]
   in
     if pred_sym orelse not (should_encode_type ctxt mono level res_T) then
       []
@@ -2355,7 +2353,7 @@
     end
   | rationalize_decls _ decls = decls
 
-fun lines_of_sym_decls ctxt mono type_enc (s, decls) =
+fun lines_of_sym_decls ctxt generate_info mono type_enc (s, decls) =
   (case type_enc of
     Native _ => [decl_line_of_sym ctxt mono type_enc s (hd decls)]
   | Guards (_, level) =>
@@ -2365,21 +2363,23 @@
       val n = length decls
       val decls = decls |> filter (should_encode_type ctxt mono level o result_type_of_decl)
     in
-      (0 upto length decls - 1, decls) |-> map2 (line_of_guards_sym_decl ctxt mono type_enc n s)
+      (0 upto length decls - 1, decls)
+      |-> map2 (line_of_guards_sym_decl ctxt generate_info mono type_enc n s)
     end
   | Tags (_, level) =>
     if is_type_level_uniform level then
       []
     else
       let val n = length decls in
-        (0 upto n - 1 ~~ decls) |> maps (lines_of_tags_sym_decl ctxt mono type_enc n s)
+        (0 upto n - 1 ~~ decls)
+        |> maps (lines_of_tags_sym_decl ctxt generate_info mono type_enc n s)
       end)
 
-fun lines_of_sym_decl_table ctxt mono type_enc mono_Ts sym_decl_tab =
+fun lines_of_sym_decl_table ctxt generate_info mono type_enc mono_Ts sym_decl_tab =
   let
     val syms = sym_decl_tab |> Symtab.dest |> sort_by fst
-    val mono_lines = lines_of_mono_types ctxt mono type_enc mono_Ts
-    val decl_lines = maps (lines_of_sym_decls ctxt mono type_enc) syms
+    val mono_lines = lines_of_mono_types ctxt generate_info mono type_enc mono_Ts
+    val decl_lines = maps (lines_of_sym_decls ctxt generate_info mono type_enc) syms
   in mono_lines @ decl_lines end
 
 fun datatypes_of_sym_table ctxt ctrss (DFG Polymorphic) (type_enc as Native _) uncurried_aliases
@@ -2426,8 +2426,8 @@
 
 fun pair_append (xs1, xs2) (ys1, ys2) = (xs1 @ ys1, xs2 @ ys2)
 
-fun do_uncurried_alias_lines_of_sym ctxt ctrss mono type_enc sym_tab0 sym_tab
-                                    base_s0 types in_conj =
+fun do_uncurried_alias_lines_of_sym ctxt generate_info ctrss mono type_enc sym_tab0 sym_tab base_s0
+    types in_conj =
   let
     fun do_alias ary =
       let
@@ -2464,31 +2464,32 @@
       in
         ([tm1, tm2],
          [Formula ((uncurried_alias_eq_prefix ^ s2, ""), role, eq |> maybe_negate, NONE,
-            isabelle_info non_rec_defN helper_rank)])
+            isabelle_info generate_info non_rec_defN helper_rank)])
         |> (if ary - 1 = base_ary orelse Symtab.defined sym_tab s1 then I
             else pair_append (do_alias (ary - 1)))
       end
   in do_alias end
 
-fun uncurried_alias_lines_of_sym ctxt ctrss mono type_enc sym_tab0 sym_tab
+fun uncurried_alias_lines_of_sym ctxt generate_info ctrss mono type_enc sym_tab0 sym_tab
         (s, {min_ary, types, in_conj, ...} : sym_info) =
   (case unprefix_and_unascii const_prefix s of
     SOME mangled_s =>
     if String.isSubstring uncurried_alias_sep mangled_s then
       let val base_s0 = mangled_s |> unmangled_invert_const in
-        do_uncurried_alias_lines_of_sym ctxt ctrss mono type_enc sym_tab0 sym_tab base_s0 types
-          in_conj min_ary
+        do_uncurried_alias_lines_of_sym ctxt generate_info ctrss mono type_enc sym_tab0 sym_tab
+          base_s0 types in_conj min_ary
       end
     else
       ([], [])
   | NONE => ([], []))
 
-fun uncurried_alias_lines_of_sym_table ctxt ctrss mono type_enc uncurried_aliases sym_tab0 sym_tab =
+fun uncurried_alias_lines_of_sym_table ctxt generate_info ctrss mono type_enc uncurried_aliases
+    sym_tab0 sym_tab =
   ([], [])
   |> uncurried_aliases
-    ? Symtab.fold_rev
-        (pair_append o uncurried_alias_lines_of_sym ctxt ctrss mono type_enc sym_tab0 sym_tab)
-        sym_tab
+    ? Symtab.fold_rev (pair_append
+        o uncurried_alias_lines_of_sym ctxt generate_info ctrss mono type_enc sym_tab0 sym_tab)
+      sym_tab
 
 val implicit_declsN = "Could-be-implicit typings"
 val explicit_declsN = "Explicit typings"
@@ -2563,8 +2564,8 @@
 
 val app_op_and_predicator_threshold = 45
 
-fun generate_atp_problem ctxt format prem_role type_enc mode lam_trans uncurried_aliases
-    readable_names presimp hyp_ts concl_t facts =
+fun generate_atp_problem ctxt generate_info format prem_role type_enc mode lam_trans
+    uncurried_aliases readable_names presimp hyp_ts concl_t facts =
   let
     val thy = Proof_Context.theory_of ctxt
     val type_enc = type_enc |> adjust_type_enc format
@@ -2595,7 +2596,8 @@
     val (ho_stuff, sym_tab) =
       sym_table_of_facts ctxt type_enc Min_App_Op conjs facts
     val (uncurried_alias_eq_tms, uncurried_alias_eq_lines) =
-      uncurried_alias_lines_of_sym_table ctxt ctrss mono type_enc uncurried_aliases sym_tab0 sym_tab
+      uncurried_alias_lines_of_sym_table ctxt generate_info ctrss mono type_enc uncurried_aliases
+        sym_tab0 sym_tab
     val (_, sym_tab) =
       (ho_stuff, sym_tab)
       |> fold (add_iterm_syms_to_sym_table ctxt Min_App_Op false false)
@@ -2610,7 +2612,7 @@
     val sym_decl_lines =
       (conjs, helpers @ facts, uncurried_alias_eq_tms)
       |> sym_decl_table_of_facts thy type_enc sym_tab
-      |> lines_of_sym_decl_table ctxt mono type_enc mono_Ts
+      |> lines_of_sym_decl_table ctxt generate_info mono type_enc mono_Ts
     val datatype_decl_lines = map decl_line_of_datatype datatypes
     val decl_lines = class_decl_lines @ sym_decl_lines @ datatype_decl_lines
     val num_facts = length facts
@@ -2618,13 +2620,14 @@
     val pos = mode <> Exporter
     val rank_of = rank_of_fact_num num_facts
     val fact_lines =
-      map (line_of_fact ctxt fact_prefix ascii_of I freshen pos mono type_enc rank_of)
+      map (line_of_fact ctxt generate_info fact_prefix ascii_of I freshen pos mono type_enc rank_of)
         (0 upto num_facts - 1 ~~ facts)
-    val subclass_lines = maps (lines_of_subclass_pair type_enc) subclass_pairs
-    val tcon_lines = map (line_of_tcon_clause type_enc) tcon_clauses
+    val subclass_lines = maps (lines_of_subclass_pair generate_info type_enc) subclass_pairs
+    val tcon_lines = map (line_of_tcon_clause generate_info type_enc) tcon_clauses
     val helper_lines =
       0 upto length helpers - 1 ~~ helpers
-      |> map (line_of_fact ctxt helper_prefix I (K "") false true mono type_enc (K default_rank))
+      |> map (line_of_fact ctxt generate_info helper_prefix I (K "") false true mono type_enc
+        (K default_rank))
     val free_type_lines = lines_of_free_types type_enc (facts @ conjs)
     val conj_lines = map (line_of_conjecture ctxt mono type_enc) conjs
     (* Reordering these might confuse the proof reconstruction code. *)
--- a/src/HOL/Tools/Lifting/lifting_setup.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Tools/Lifting/lifting_setup.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -368,16 +368,13 @@
             in
               if null non_trivial_assms then ()
               else
-                let
-                  val pretty_msg = Pretty.block ([Pretty.str "Non-trivial assumptions in ",
-                    Pretty.str thm_name,
-                    Pretty.str " transfer rule found:",
-                    Pretty.brk 1] @ 
-                    ((Pretty.commas o map (Syntax.pretty_term ctxt)) non_trivial_assms) @
-                                       [Pretty.str "."])
-                in
-                  warning (Pretty.str_of pretty_msg)
-                end
+                Pretty.block ([Pretty.str "Non-trivial assumptions in ",
+                  Pretty.str thm_name,
+                  Pretty.str " transfer rule found:",
+                  Pretty.brk 1] @ 
+                  Pretty.commas (map (Syntax.pretty_term ctxt) non_trivial_assms))
+                |> Pretty.string_of
+                |> warning
             end
         end
   
--- a/src/HOL/Tools/Metis/metis_generate.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Tools/Metis/metis_generate.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -227,7 +227,7 @@
     *)
     val lam_trans = if lam_trans = combsN then no_lamsN else lam_trans
     val (atp_problem, _, lifted, sym_tab) =
-      generate_atp_problem ctxt CNF Hypothesis type_enc Metis lam_trans false false false []
+      generate_atp_problem ctxt true CNF Hypothesis type_enc Metis lam_trans false false false []
         @{prop False} props
     (*
     val _ = tracing ("ATP PROBLEM: " ^
--- a/src/HOL/Tools/Nitpick/nitpick_hol.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Tools/Nitpick/nitpick_hol.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -575,11 +575,6 @@
 fun dest_n_tuple 1 t = [t]
   | dest_n_tuple n t = HOLogic.dest_prod t ||> dest_n_tuple (n - 1) |> op ::
 
-type typedef_info =
-  {rep_type: typ, abs_type: typ, Rep_name: string, Abs_name: string,
-   prop_of_Rep: thm, set_name: string option, Abs_inverse: thm option,
-   Rep_inverse: thm option}
-
 fun typedef_info ctxt s =
   if is_frac_type ctxt (Type (s, [])) then
     SOME {abs_type = Type (s, []), rep_type = @{typ "int * int"},
--- a/src/HOL/Tools/Nitpick/nitpick_util.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Tools/Nitpick/nitpick_util.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -280,9 +280,8 @@
 val maybe_quote = ATP_Util.maybe_quote
 
 fun pretty_maybe_quote keywords pretty =
-  let val s = Pretty.str_of pretty in
-    if maybe_quote keywords s = s then pretty else Pretty.quote pretty
-  end
+  let val s = Pretty.unformatted_string_of pretty
+  in if maybe_quote keywords s = s then pretty else Pretty.quote pretty end
 
 val hashw = ATP_Util.hashw
 val hashw_string = ATP_Util.hashw_string
--- a/src/HOL/Tools/SMT/smtlib.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Tools/SMT/smtlib.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -186,6 +186,6 @@
   | pretty_tree (S trees) =
       Pretty.enclose "(" ")" (Pretty.separate "" (map pretty_tree trees))
 
-val str_of = Pretty.str_of o pretty_tree
+val str_of = Pretty.unformatted_string_of o pretty_tree
 
 end;
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -163,7 +163,8 @@
 fun fact_of_ref ctxt keywords chained css (xthm as (xref, args)) =
   let
     val ths = Attrib.eval_thms ctxt [xthm]
-    val bracket = implode (map (enclose "[" "]" o Pretty.str_of o Token.pretty_src ctxt) args)
+    val bracket =
+      implode (map (enclose "[" "]" o Pretty.unformatted_string_of o Token.pretty_src ctxt) args)
 
     fun nth_name j =
       (case xref of
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_prover_atp.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_prover_atp.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -235,6 +235,7 @@
             val num_facts =
               Real.ceil (max_fact_factor * Real.fromInt best_max_facts) + max_fact_factor_fudge
               |> Integer.min (length facts)
+            val generate_info = (case format of DFG _ => true | _ => false)
             val strictness = if strict then Strict else Non_Strict
             val type_enc = type_enc |> choose_type_enc strictness best_type_enc format
             val sound = is_type_enc_sound type_enc
@@ -273,8 +274,8 @@
                       generate_waldmeister_problem ctxt hyp_ts concl_t
                       #> (fn (a, b, c, d, e) => (a, b, c, d, SOME e))
                     else
-                      generate_atp_problem ctxt format prem_role type_enc atp_mode lam_trans
-                        uncurried_aliases readable_names true hyp_ts concl_t
+                      generate_atp_problem ctxt generate_info format prem_role type_enc atp_mode
+                        lam_trans uncurried_aliases readable_names true hyp_ts concl_t
                       #> (fn (a, b, c, d) => (a, b, c, d, NONE)))
 
             fun sel_weights () = atp_problem_selection_weights atp_problem
--- a/src/HOL/Tools/record.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Tools/record.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -1805,6 +1805,13 @@
      distinct_discsss = [], exhaust_discs = [], exhaust_sels = [], collapses = [], expands = [],
      split_sels = [], split_sel_asms = [], case_eq_ifs = []};
 
+fun lhs_of_equation (Const (@{const_name Pure.eq}, _) $ t $ _) = t
+  | lhs_of_equation (@{const Trueprop} $ (Const (@{const_name HOL.eq}, _) $ t $ _)) = t;
+
+fun add_spec_rule rule =
+  let val head = head_of (lhs_of_equation (Thm.prop_of rule)) in
+    Spec_Rules.add_global Spec_Rules.Equational ([head], [rule])
+  end;
 
 (* definition *)
 
@@ -2040,7 +2047,7 @@
                 map (Thm.no_attributes o apfst (Binding.concealed o Binding.name))) upd_specs
           ||>> (Global_Theory.add_defs false o
                 map (rpair [Code.add_default_eqn_attribute]
-                o apfst (Binding.concealed o Binding.name))) (*FIXME Spec_Rules (?)*)
+                o apfst (Binding.concealed o Binding.name)))
             [make_spec, fields_spec, extend_spec, truncate_spec]);
     val defs_ctxt = Proof_Context.init_global defs_thy;
 
@@ -2052,7 +2059,7 @@
 
     (*selectors*)
     val sel_conv_props =
-       map (fn (c, x as Free (_, T)) => mk_sel r_rec0 (c, T) === x) named_vars_more;
+      map (fn (c, x as Free (_, T)) => mk_sel r_rec0 (c, T) === x) named_vars_more;
 
     (*updates*)
     fun mk_upd_prop i (c, T) =
@@ -2239,8 +2246,6 @@
         sel_convs' upd_convs' sel_defs' upd_defs' fold_congs' unfold_congs' splits' derived_defs'
         surjective' equality' induct_scheme' induct' cases_scheme' cases' simps' iffs';
 
-    val sels = map (fst o Logic.dest_equals o Thm.prop_of) sel_defs;
-
     val final_thy =
       thms_thy'
       |> put_record name info
@@ -2253,6 +2258,7 @@
       |> add_fieldext (extension_name, snd extension) names
       |> add_code ext_tyco vs extT ext simps' ext_inject
       |> add_ctr_sugar (Const ext) cases_scheme' ext_inject sel_convs'
+      |> fold add_spec_rule (sel_convs' @ upd_convs' @ derived_defs')
       |> Sign.restore_naming thy0;
 
   in final_thy end;
--- a/src/HOL/Tools/try0.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Tools/try0.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -163,7 +163,7 @@
 
 fun string_of_xthm (xref, args) =
   Facts.string_of_ref xref ^
-  implode (map (enclose "[" "]" o Pretty.str_of o Token.pretty_src @{context}) args);
+  implode (map (enclose "[" "]" o Pretty.unformatted_string_of o Token.pretty_src @{context}) args);
 
 val parse_fact_refs =
   Scan.repeat1 (Scan.unless (Parse.name -- Args.colon) (Parse.xthm >> string_of_xthm));
--- a/src/HOL/Transcendental.thy	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/HOL/Transcendental.thy	Mon Dec 21 21:34:14 2015 +0100
@@ -4680,7 +4680,7 @@
   DERIV_arctan[THEN DERIV_chain2, derivative_intros]
   DERIV_arctan[THEN DERIV_chain2, unfolded has_field_derivative_def, derivative_intros]
 
-lemma filterlim_tan_at_right: "filterlim tan at_bot (at_right (- pi/2))"
+lemma filterlim_tan_at_right: "filterlim tan at_bot (at_right (- (pi/2)))"
   by (rule filterlim_at_bot_at_right[where Q="\<lambda>x. - pi/2 < x \<and> x < pi/2" and P="\<lambda>x. True" and g=arctan])
      (auto simp: arctan le_less eventually_at dist_real_def simp del: less_divide_eq_numeral1
            intro!: tan_monotone exI[of _ "pi/2"])
--- a/src/Pure/General/binding.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/General/binding.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -156,7 +156,7 @@
       [Pretty.str (Long_Name.implode (map #1 (prefix @ qualifier) @ [name]))]
     |> Pretty.quote;
 
-val print = Pretty.str_of o pretty;
+val print = Pretty.unformatted_string_of o pretty;
 
 val pp = pretty o set_pos Position.none;
 
--- a/src/Pure/General/output.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/General/output.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -10,6 +10,9 @@
   val tracing: string -> unit
   val warning: string -> unit
   val legacy_feature: string -> unit
+  val profile_time: ('a -> 'b) -> 'a -> 'b
+  val profile_time_thread: ('a -> 'b) -> 'a -> 'b
+  val profile_allocations: ('a -> 'b) -> 'a -> 'b
 end;
 
 signature OUTPUT =
@@ -131,6 +134,40 @@
 fun protocol_message props ss = ! protocol_message_fn props (map output ss);
 fun try_protocol_message props ss = protocol_message props ss handle Protocol_Message _ => ();
 
+
+(* profiling *)
+
+local
+
+fun output_profile title entries =
+  let
+    val body = entries
+      |> sort (int_ord o apply2 fst)
+      |> map (fn (count, name) =>
+          let
+            val c = string_of_int count;
+            val prefix = replicate_string (Int.max (0, 10 - size c)) " ";
+          in prefix ^ c ^ " " ^ name end);
+    val total = fold (curry (op +) o fst) entries 0;
+  in
+    if total = 0 then ()
+    else warning (cat_lines (title :: (body @ ["total: " ^ string_of_int total])))
+  end;
+
+in
+
+fun profile_time f x =
+  ML_Profiling.profile_time (output_profile "time profile:") f x;
+
+fun profile_time_thread f x =
+  ML_Profiling.profile_time_thread (output_profile "time profile (this thread):") f x;
+
+fun profile_allocations f x =
+  ML_Profiling.profile_allocations (output_profile "allocations profile:") f x;
+
+end;
+
+
 end;
 
 structure Basic_Output: BASIC_OUTPUT = Output;
--- a/src/Pure/General/path.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/General/path.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -171,7 +171,7 @@
   let val s = implode_path path
   in Pretty.mark (Markup.path s) (Pretty.str (quote s)) end;
 
-val print = Pretty.str_of o pretty;
+val print = Pretty.unformatted_string_of o pretty;
 
 
 (* base element *)
--- a/src/Pure/General/pretty.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/General/pretty.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -12,28 +12,27 @@
 breaking information.  A "break" inserts a newline if the text until
 the next break is too long to fit on the current line.  After the newline,
 text is indented to the level of the enclosing block.  Normally, if a block
-is broken then all enclosing blocks will also be broken.  Only "inconsistent
-breaks" are provided.
+is broken then all enclosing blocks will also be broken.
 
-The stored length of a block is used in breakdist (to treat each inner block as
+The stored length of a block is used in break_dist (to treat each inner block as
 a unit for breaking).
 *)
 
 signature PRETTY =
 sig
-  val spaces: int -> string
   val default_indent: string -> int -> Output.output
   val add_mode: string -> (string -> int -> Output.output) -> unit
   type T
+  val make_block: Output.output * Output.output -> bool -> int -> T list -> T
   val str: string -> T
   val brk: int -> T
+  val brk_indent: int -> int -> T
   val fbrk: T
   val breaks: T list -> T list
   val fbreaks: T list -> T list
   val blk: int * T list -> T
   val block: T list -> T
   val strs: string list -> T
-  val raw_markup: Output.output * Output.output -> int * T list -> T
   val markup: Markup.T -> T list -> T
   val mark: Markup.T -> T -> T
   val mark_str: Markup.T * string -> T
@@ -67,7 +66,7 @@
   val writeln: T -> unit
   val symbolic_output: T -> Output.output
   val symbolic_string_of: T -> string
-  val str_of: T -> string
+  val unformatted_string_of: T -> string
   val markup_chunks: Markup.T -> T list -> T
   val chunks: T list -> T
   val chunks2: T list -> T
@@ -81,23 +80,9 @@
 structure Pretty: PRETTY =
 struct
 
-(** spaces **)
-
-local
-  val small_spaces = Vector.tabulate (65, fn i => replicate_string i Symbol.space);
-in
-  fun spaces k =
-    if k < 64 then Vector.sub (small_spaces, k)
-    else
-      replicate_string (k div 64) (Vector.sub (small_spaces, 64)) ^
-        Vector.sub (small_spaces, k mod 64);
-end;
-
-
-
 (** print mode operations **)
 
-fun default_indent (_: string) = spaces;
+fun default_indent (_: string) = Symbol.spaces;
 
 local
   val default = {indent = default_indent};
@@ -115,7 +100,7 @@
 
 fun mode_indent x y = #indent (get_mode ()) x y;
 
-val output_spaces = Output.output o spaces;
+val output_spaces = Output.output o Symbol.spaces;
 val add_indent = Buffer.add o output_spaces;
 
 
@@ -123,41 +108,51 @@
 (** printing items: compound phrases, strings, and breaks **)
 
 abstype T =
-    Block of (Output.output * Output.output) * T list * int * int
-      (*markup output, body, indentation, length*)
-  | String of Output.output * int  (*text, length*)
-  | Break of bool * int  (*mandatory flag, width if not taken*)
+    Block of (Output.output * Output.output) * bool * int * T list * int
+      (*markup output, consistent, indentation, body, length*)
+  | Break of bool * int * int  (*mandatory flag, width if not taken, extra indentation if taken*)
+  | Str of Output.output * int  (*text, length*)
 with
 
-fun length (Block (_, _, _, len)) = len
-  | length (String (_, len)) = len
-  | length (Break (_, wd)) = wd;
+fun length (Block (_, _, _, _, len)) = len
+  | length (Break (_, wd, _)) = wd
+  | length (Str (_, len)) = len;
+
+fun make_block markup consistent indent body =
+  let
+    fun body_length prts len =
+      let
+        val (line, rest) = take_prefix (fn Break (true, _, _) => false | _ => true) prts;
+        val len' = Int.max (fold (Integer.add o length) line 0, len);
+      in
+        (case rest of
+          Break (true, _, ind) :: rest' =>
+            body_length (Break (false, indent + ind, 0) :: rest') len'
+        | [] => len')
+      end;
+  in Block (markup, consistent, indent, body, body_length body 0) end;
+
+fun markup_block markup indent es =
+  make_block (Markup.output markup) false indent es;
 
 
 
 (** derived operations to create formatting expressions **)
 
-val str = String o Output.output_width;
+val str = Str o Output.output_width;
 
-fun brk wd = Break (false, wd);
-val fbrk = Break (true, 1);
+fun brk wd = Break (false, wd, 0);
+fun brk_indent wd ind = Break (false, wd, ind);
+val fbrk = Break (true, 1, 0);
 
 fun breaks prts = Library.separate (brk 1) prts;
 fun fbreaks prts = Library.separate fbrk prts;
 
-fun raw_markup m (indent, es) =
-  let
-    fun sum [] k = k
-      | sum (e :: es) k = sum es (length e + k);
-  in Block (m, es, indent, sum es 0) end;
-
-fun markup_block m arg = raw_markup (Markup.output m) arg;
-
-val blk = markup_block Markup.empty;
+fun blk (ind, es) = markup_block Markup.empty ind es;
 fun block prts = blk (2, prts);
 val strs = block o breaks o map str;
 
-fun markup m prts = markup_block m (0, prts);
+fun markup m = markup_block m 0;
 fun mark m prt = if m = Markup.empty then prt else markup m [prt];
 fun mark_str (m, s) = mark m (str s);
 fun marks_str (ms, s) = fold_rev mark ms (str s);
@@ -195,11 +190,12 @@
 fun big_list name prts = block (fbreaks (str name :: prts));
 
 fun indent 0 prt = prt
-  | indent n prt = blk (0, [str (spaces n), prt]);
+  | indent n prt = blk (0, [str (Symbol.spaces n), prt]);
 
-fun unbreakable (Break (_, wd)) = String (output_spaces wd, wd)
-  | unbreakable (Block (m, es, indent, wd)) = Block (m, map unbreakable es, indent, wd)
-  | unbreakable (e as String _) = e;
+fun unbreakable (Block (m, consistent, indent, es, len)) =
+      Block (m, consistent, indent, map unbreakable es, len)
+  | unbreakable (Break (_, wd, _)) = Str (output_spaces wd, wd)
+  | unbreakable (e as Str _) = e;
 
 
 
@@ -247,15 +243,17 @@
 
 (*Add the lengths of the expressions until the next Break; if no Break then
   include "after", to account for text following this block.*)
-fun breakdist (Break _ :: _, _) = 0
-  | breakdist (Block (_, _, _, len) :: es, after) = len + breakdist (es, after)
-  | breakdist (String (_, len) :: es, after) = len + breakdist (es, after)
-  | breakdist ([], after) = after;
+fun break_dist (Break _ :: _, _) = 0
+  | break_dist (e :: es, after) = length e + break_dist (es, after)
+  | break_dist ([], after) = after;
+
+val force_break = fn Break (false, wd, ind) => Break (true, wd, ind) | e => e;
+val force_all = map force_break;
 
 (*Search for the next break (at this or higher levels) and force it to occur.*)
-fun forcenext [] = []
-  | forcenext (Break _ :: es) = fbrk :: es
-  | forcenext (e :: es) = e :: forcenext es;
+fun force_next [] = []
+  | force_next ((e as Break _) :: es) = force_break e :: es
+  | force_next (e :: es) = e :: force_next es;
 
 in
 
@@ -270,29 +268,31 @@
     fun format ([], _, _) text = text
       | format (e :: es, block as (_, blockin), after) (text as {ind, pos, nl, ...}) =
           (case e of
-            Block ((bg, en), bes, indent, _) =>
+            Block ((bg, en), consistent, indent, bes, blen) =>
               let
                 val pos' = pos + indent;
                 val pos'' = pos' mod emergencypos;
                 val block' =
                   if pos' < emergencypos then (ind |> add_indent indent, pos')
                   else (add_indent pos'' Buffer.empty, pos'');
+                val d = break_dist (es, after)
+                val bes' = if consistent andalso pos + blen > margin - d then force_all bes else bes;
                 val btext: text = text
                   |> control bg
-                  |> format (bes, block', breakdist (es, after))
+                  |> format (bes', block', d)
                   |> control en;
                 (*if this block was broken then force the next break*)
-                val es' = if nl < #nl btext then forcenext es else es;
+                val es' = if nl < #nl btext then force_next es else es;
               in format (es', block, after) btext end
-          | Break (force, wd) =>
+          | Break (force, wd, ind) =>
               (*no break if text to next break fits on this line
                 or if breaking would add only breakgain to space*)
               format (es, block, after)
                 (if not force andalso
-                    pos + wd <= Int.max (margin - breakdist (es, after), blockin + breakgain)
-                  then text |> blanks wd  (*just insert wd blanks*)
-                  else text |> newline |> indentation block)
-          | String str => format (es, block, after) (string str text));
+                    pos + wd <= Int.max (margin - break_dist (es, after), blockin + breakgain)
+                 then text |> blanks wd  (*just insert wd blanks*)
+                 else text |> newline |> indentation block |> blanks ind)
+          | Str str => format (es, block, after) (string str text));
   in
     #tx (format ([input], (Buffer.empty, 0), 0) empty)
   end;
@@ -305,23 +305,23 @@
 (*symbolic markup -- no formatting*)
 fun symbolic prt =
   let
-    fun out (Block ((bg, en), [], _, _)) = Buffer.add bg #> Buffer.add en
-      | out (Block ((bg, en), prts, indent, _)) =
+    fun out (Block ((bg, en), _, _, [], _)) = Buffer.add bg #> Buffer.add en
+      | out (Block ((bg, en), consistent, indent, prts, _)) =
           Buffer.add bg #>
-          Buffer.markup (Markup.block indent) (fold out prts) #>
+          Buffer.markup (Markup.block consistent indent) (fold out prts) #>
           Buffer.add en
-      | out (String (s, _)) = Buffer.add s
-      | out (Break (false, wd)) =
-          Buffer.markup (Markup.break wd) (Buffer.add (output_spaces wd))
-      | out (Break (true, _)) = Buffer.add (Output.output "\n");
+      | out (Break (false, wd, ind)) =
+          Buffer.markup (Markup.break wd ind) (Buffer.add (output_spaces wd))
+      | out (Break (true, _, _)) = Buffer.add (Output.output "\n")
+      | out (Str (s, _)) = Buffer.add s;
   in out prt Buffer.empty end;
 
 (*unformatted output*)
 fun unformatted prt =
   let
-    fun fmt (Block ((bg, en), prts, _, _)) = Buffer.add bg #> fold fmt prts #> Buffer.add en
-      | fmt (String (s, _)) = Buffer.add s
-      | fmt (Break (_, wd)) = Buffer.add (output_spaces wd);
+    fun fmt (Block ((bg, en), _, _, prts, _)) = Buffer.add bg #> fold fmt prts #> Buffer.add en
+      | fmt (Break (_, wd, _)) = Buffer.add (output_spaces wd)
+      | fmt (Str (s, _)) = Buffer.add s;
   in fmt prt Buffer.empty end;
 
 
@@ -343,7 +343,7 @@
 val symbolic_output = Buffer.content o symbolic;
 val symbolic_string_of = Output.escape o symbolic_output;
 
-val str_of = Output.escape o Buffer.content o unformatted;
+val unformatted_string_of = Output.escape o Buffer.content o unformatted;
 
 
 (* chunks *)
@@ -376,13 +376,15 @@
 
 (** ML toplevel pretty printing **)
 
-fun to_ML (Block (m, prts, ind, _)) = ML_Pretty.Block (m, map to_ML prts, ind)
-  | to_ML (String s) = ML_Pretty.String s
-  | to_ML (Break b) = ML_Pretty.Break b;
+fun to_ML (Block (m, consistent, indent, prts, _)) =
+      ML_Pretty.Block (m, consistent, indent, map to_ML prts)
+  | to_ML (Break b) = ML_Pretty.Break b
+  | to_ML (Str s) = ML_Pretty.String s;
 
-fun from_ML (ML_Pretty.Block (m, prts, ind)) = raw_markup m (ind, map from_ML prts)
-  | from_ML (ML_Pretty.String s) = String s
-  | from_ML (ML_Pretty.Break b) = Break b;
+fun from_ML (ML_Pretty.Block (m, consistent, indent, prts)) =
+      make_block m consistent indent (map from_ML prts)
+  | from_ML (ML_Pretty.Break b) = Break b
+  | from_ML (ML_Pretty.String s) = Str s;
 
 end;
 
--- a/src/Pure/General/pretty.scala	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/General/pretty.scala	Mon Dec 21 21:34:14 2015 +0100
@@ -9,18 +9,26 @@
 
 object Pretty
 {
-  /* spaces */
+  /* XML constructors */
 
-  val space = " "
-
-  private val static_spaces = space * 4000
+  val space: XML.Body = List(XML.Text(Symbol.space))
+  def spaces(n: Int): XML.Body =
+    if (n == 0) Nil
+    else if (n == 1) space
+    else List(XML.Text(Symbol.spaces(n)))
 
-  def spaces(k: Int): String =
-  {
-    require(k >= 0)
-    if (k < static_spaces.length) static_spaces.substring(0, k)
-    else space * k
-  }
+  def block(consistent: Boolean, indent: Int, body: XML.Body): XML.Tree =
+    XML.Elem(Markup.Block(consistent, indent), body)
+  def block(indent: Int, body: XML.Body): XML.Tree = block(false, indent, body)
+  def block(body: XML.Body): XML.Tree = block(2, body)
+
+  def brk(width: Int, indent: Int = 0): XML.Tree =
+    XML.Elem(Markup.Break(width, indent), spaces(width))
+
+  val fbrk: XML.Tree = XML.Text("\n")
+
+  val Separator: XML.Body = List(XML.elem(Markup.SEPARATOR, space), fbrk)
+  def separate(ts: List[XML.Tree]): XML.Body = Library.separate(Separator, ts.map(List(_))).flatten
 
 
   /* text metric -- standardized to width of space */
@@ -40,150 +48,131 @@
 
   /* markup trees with physical blocks and breaks */
 
-  def block(body: XML.Body): XML.Tree = Block(2, body)
-
-  object Block
-  {
-    def apply(i: Int, body: XML.Body): XML.Tree =
-      XML.Elem(Markup.Block(i), body)
+  private sealed abstract class Tree { def length: Double }
+  private case class Block(
+    markup: Option[(Markup, Option[XML.Body])],
+    consistent: Boolean, indent: Int, body: List[Tree], length: Double) extends Tree
+  private case class Break(force: Boolean, width: Int, indent: Int) extends Tree
+  { def length: Double = width.toDouble }
+  private case class Str(string: String, length: Double) extends Tree
 
-    def unapply(tree: XML.Tree): Option[(Int, XML.Body)] =
-      tree match {
-        case XML.Elem(Markup.Block(i), body) => Some((i, body))
-        case _ => None
-      }
-  }
-
-  object Break
-  {
-    def apply(w: Int): XML.Tree =
-      XML.Elem(Markup.Break(w), List(XML.Text(spaces(w))))
+  private val FBreak = Break(true, 1, 0)
 
-    def unapply(tree: XML.Tree): Option[Int] =
-      tree match {
-        case XML.Elem(Markup.Break(w), _) => Some(w)
-        case _ => None
+  private def make_block(
+      markup: Option[(Markup, Option[XML.Body])],
+      consistent: Boolean,
+      indent: Int,
+      body: List[Tree]): Tree =
+  {
+    def body_length(prts: List[Tree], len: Double): Double =
+    {
+      val (line, rest) =
+        Library.take_prefix[Tree]({ case Break(true, _, _) => false case _ => true }, prts)
+      val len1 = ((0.0 /: line) { case (l, t) => l + t.length }) max len
+      rest match {
+        case Break(true, _, ind) :: rest1 =>
+          body_length(Break(false, indent + ind, 0) :: rest1, len1)
+        case Nil => len1
       }
+    }
+    Block(markup, consistent, indent, body, body_length(body, 0.0))
   }
 
-  val FBreak = XML.Text("\n")
-
-  def item(body: XML.Body): XML.Tree =
-    Block(2, XML.elem(Markup.BULLET, List(XML.Text(space))) :: XML.Text(space) :: body)
-
-  val Separator = List(XML.elem(Markup.SEPARATOR, List(XML.Text(space))), FBreak)
-  def separate(ts: List[XML.Tree]): XML.Body = Library.separate(Separator, ts.map(List(_))).flatten
-
-
-  /* standard form */
-
-  def standard_form(body: XML.Body): XML.Body =
-    body flatMap {
-      case XML.Wrapped_Elem(markup, body1, body2) =>
-        List(XML.Wrapped_Elem(markup, body1, standard_form(body2)))
-      case XML.Elem(markup, body) =>
-        if (markup.name == Markup.ITEM) List(item(standard_form(body)))
-        else List(XML.Elem(markup, standard_form(body)))
-      case XML.Text(text) => Library.separate(FBreak, split_lines(text).map(XML.Text))
-    }
-
 
   /* formatted output */
 
+  private sealed case class Text(tx: XML.Body = Nil, pos: Double = 0.0, nl: Int = 0)
+  {
+    def newline: Text = copy(tx = fbrk :: tx, pos = 0.0, nl = nl + 1)
+    def string(s: String, len: Double): Text =
+      copy(tx = if (s == "") tx else XML.Text(s) :: tx, pos = pos + len)
+    def blanks(wd: Int): Text = string(Symbol.spaces(wd), wd.toDouble)
+    def content: XML.Body = tx.reverse
+  }
+
+  private def break_dist(trees: List[Tree], after: Double): Double =
+    trees match {
+      case (_: Break) :: _ => 0.0
+      case t :: ts => t.length + break_dist(ts, after)
+      case Nil => after
+    }
+
+  private def force_break(tree: Tree): Tree =
+    tree match { case Break(false, wd, ind) => Break(true, wd, ind) case _ => tree }
+  private def force_all(trees: List[Tree]): List[Tree] = trees.map(force_break(_))
+
+  private def force_next(trees: List[Tree]): List[Tree] =
+    trees match {
+      case Nil => Nil
+      case (t: Break) :: ts => force_break(t) :: ts
+      case t :: ts => t :: force_next(ts)
+    }
+
   private val margin_default = 76.0
 
   def formatted(input: XML.Body, margin: Double = margin_default,
     metric: Metric = Metric_Default): XML.Body =
   {
-    sealed case class Text(tx: XML.Body = Nil, pos: Double = 0.0, nl: Int = 0)
-    {
-      def newline: Text = copy(tx = FBreak :: tx, pos = 0.0, nl = nl + 1)
-      def string(s: String): Text = copy(tx = XML.Text(s) :: tx, pos = pos + metric(s))
-      def blanks(wd: Int): Text = string(spaces(wd))
-      def content: XML.Body = tx.reverse
-    }
-
     val breakgain = margin / 20
     val emergencypos = (margin / 2).round.toInt
 
-    def content_length(tree: XML.Tree): Double =
-      XML.traverse_text(List(tree))(0.0)(_ + metric(_))
-
-    def breakdist(trees: XML.Body, after: Double): Double =
-      trees match {
-        case Break(_) :: _ => 0.0
-        case FBreak :: _ => 0.0
-        case t :: ts => content_length(t) + breakdist(ts, after)
-        case Nil => after
+    def make_tree(inp: XML.Body): List[Tree] =
+      inp flatMap {
+        case XML.Wrapped_Elem(markup, body1, body2) =>
+          List(make_block(Some(markup, Some(body1)), false, 0, make_tree(body2)))
+        case XML.Elem(markup, body) =>
+          markup match {
+            case Markup.Block(consistent, indent) =>
+              List(make_block(None, consistent, indent, make_tree(body)))
+            case Markup.Break(width, indent) =>
+              List(Break(false, width, indent))
+            case Markup(Markup.ITEM, _) =>
+              List(make_block(None, false, 2,
+                make_tree(XML.elem(Markup.BULLET, space) :: space ::: body)))
+            case _ =>
+              List(make_block(Some((markup, None)), false, 0, make_tree(body)))
+          }
+        case XML.Text(text) =>
+          Library.separate(FBreak, split_lines(text).map(s => Str(s, metric(s))))
       }
 
-    def forcenext(trees: XML.Body): XML.Body =
-      trees match {
-        case Nil => Nil
-        case FBreak :: _ => trees
-        case Break(_) :: ts => FBreak :: ts
-        case t :: ts => t :: forcenext(ts)
-      }
-
-    def format(trees: XML.Body, blockin: Int, after: Double, text: Text): Text =
+    def format(trees: List[Tree], blockin: Int, after: Double, text: Text): Text =
       trees match {
         case Nil => text
 
-        case Block(indent, body) :: ts =>
+        case Block(markup, consistent, indent, body, blen) :: ts =>
           val pos1 = (text.pos + indent).ceil.toInt
           val pos2 = pos1 % emergencypos
-          val blockin1 =
-            if (pos1 < emergencypos) pos1
-            else pos2
-          val btext = format(body, blockin1, breakdist(ts, after), text)
-          val ts1 = if (text.nl < btext.nl) forcenext(ts) else ts
+          val blockin1 = if (pos1 < emergencypos) pos1 else pos2
+          val d = break_dist(ts, after)
+          val body1 = if (consistent && text.pos + blen > margin - d) force_all(body) else body
+          val btext =
+            markup match {
+              case None => format(body1, blockin1, d, text)
+              case Some((m, markup_body)) =>
+                val btext0 = format(body1, blockin1, d, text.copy(tx = Nil))
+                val elem =
+                  markup_body match {
+                    case None => XML.Elem(m, btext0.content)
+                    case Some(b) => XML.Wrapped_Elem(m, b, btext0.content)
+                  }
+                btext0.copy(tx = elem :: text.tx)
+            }
+          val ts1 = if (text.nl < btext.nl) force_next(ts) else ts
           format(ts1, blockin, after, btext)
 
-        case Break(wd) :: ts =>
-          if (text.pos + wd <= ((margin - breakdist(ts, after)) max (blockin + breakgain)))
+        case Break(force, wd, ind) :: ts =>
+          if (!force &&
+              text.pos + wd <= ((margin - break_dist(ts, after)) max (blockin + breakgain)))
             format(ts, blockin, after, text.blanks(wd))
-          else format(ts, blockin, after, text.newline.blanks(blockin))
-        case FBreak :: ts => format(ts, blockin, after, text.newline.blanks(blockin))
+          else format(ts, blockin, after, text.newline.blanks(blockin + ind))
 
-        case XML.Wrapped_Elem(markup, body1, body2) :: ts =>
-          val btext = format(body2, blockin, breakdist(ts, after), text.copy(tx = Nil))
-          val ts1 = if (text.nl < btext.nl) forcenext(ts) else ts
-          val btext1 = btext.copy(tx = XML.Wrapped_Elem(markup, body1, btext.content) :: text.tx)
-          format(ts1, blockin, after, btext1)
-
-        case XML.Elem(markup, body) :: ts =>
-          val btext = format(body, blockin, breakdist(ts, after), text.copy(tx = Nil))
-          val ts1 = if (text.nl < btext.nl) forcenext(ts) else ts
-          val btext1 = btext.copy(tx = XML.Elem(markup, btext.content) :: text.tx)
-          format(ts1, blockin, after, btext1)
-
-        case XML.Text(s) :: ts => format(ts, blockin, after, text.string(s))
+        case Str(s, len) :: ts => format(ts, blockin, after, text.string(s, len))
       }
-
-    format(standard_form(input), 0, 0.0, Text()).content
+    format(make_tree(input), 0, 0.0, Text()).content
   }
 
   def string_of(input: XML.Body, margin: Double = margin_default,
       metric: Metric = Metric_Default): String =
     XML.content(formatted(input, margin, metric))
-
-
-  /* unformatted output */
-
-  def unformatted(input: XML.Body): XML.Body =
-  {
-    def fmt(tree: XML.Tree): XML.Body =
-      tree match {
-        case Block(_, body) => body.flatMap(fmt)
-        case Break(wd) => List(XML.Text(spaces(wd)))
-        case FBreak => List(XML.Text(space))
-        case XML.Wrapped_Elem(markup, body1, body2) =>
-          List(XML.Wrapped_Elem(markup, body1, body2.flatMap(fmt)))
-        case XML.Elem(markup, body) => List(XML.Elem(markup, body.flatMap(fmt)))
-        case XML.Text(_) => List(tree)
-      }
-    standard_form(input).flatMap(fmt)
-  }
-
-  def str_of(input: XML.Body): String = XML.content(unformatted(input))
 }
--- a/src/Pure/General/symbol.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/General/symbol.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -7,6 +7,7 @@
 signature SYMBOL =
 sig
   type symbol = string
+  val spaces: int -> string
   val STX: symbol
   val DEL: symbol
   val space: symbol
@@ -94,6 +95,16 @@
 
 val space = chr 32;
 
+local
+  val small_spaces = Vector.tabulate (65, fn i => replicate_string i space);
+in
+  fun spaces n =
+    if n < 64 then Vector.sub (small_spaces, n)
+    else
+      replicate_string (n div 64) (Vector.sub (small_spaces, 64)) ^
+        Vector.sub (small_spaces, n mod 64);
+end;
+
 val comment = "\\<comment>";
 
 fun is_char s = size s = 1;
--- a/src/Pure/General/symbol.scala	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/General/symbol.scala	Mon Dec 21 21:34:14 2015 +0100
@@ -21,6 +21,20 @@
   type Range = Text.Range
 
 
+  /* spaces */
+
+  val space = " "
+
+  private val static_spaces = space * 4000
+
+  def spaces(n: Int): String =
+  {
+    require(n >= 0)
+    if (n < static_spaces.length) static_spaces.substring(0, n)
+    else space * n
+  }
+
+
   /* ASCII characters */
 
   def is_ascii_letter(c: Char): Boolean = 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z'
@@ -425,7 +439,7 @@
       "\\<Xi>", "\\<Pi>", "\\<Sigma>", "\\<Upsilon>", "\\<Phi>",
       "\\<Psi>", "\\<Omega>")
 
-    val blanks = recode_set(" ", "\t", "\n", "\u000B", "\f", "\r", "\r\n")
+    val blanks = recode_set(space, "\t", "\n", "\u000B", "\f", "\r", "\r\n")
 
     val sym_chars =
       Set("!", "#", "$", "%", "&", "*", "+", "-", "/", "<", "=", ">", "?", "@", "^", "_", "|", "~")
--- a/src/Pure/General/url.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/General/url.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -81,7 +81,7 @@
 
 val pretty = Pretty.mark_str o `Markup.url o implode_url;
 
-val print = Pretty.str_of o pretty;
+val print = Pretty.unformatted_string_of o pretty;
 
 
 (*final declarations of this structure!*)
--- a/src/Pure/Isar/runtime.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/Isar/runtime.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -76,6 +76,8 @@
         SOME exns => maps (flatten context) exns
       | NONE => [(context, identify exn)]);
 
+val print_thy = Pretty.unformatted_string_of o Context.pretty_abbrev_thy;
+
 in
 
 fun exn_messages_ids e =
@@ -104,8 +106,7 @@
             | ERROR "" => "Error"
             | ERROR msg => msg
             | Fail msg => raised exn "Fail" [msg]
-            | THEORY (msg, thys) =>
-                raised exn "THEORY" (msg :: map (robust Context.str_of_thy) thys)
+            | THEORY (msg, thys) => raised exn "THEORY" (msg :: map (robust print_thy) thys)
             | Ast.AST (msg, asts) =>
                 raised exn "AST" (msg :: map (robust (Pretty.string_of o Ast.pretty_ast)) asts)
             | TYPE (msg, Ts, ts) =>
--- a/src/Pure/Isar/toplevel.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/Isar/toplevel.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -26,7 +26,6 @@
   val pretty_state: state -> Pretty.T list
   val string_of_state: state -> string
   val pretty_abstract: state -> Pretty.T
-  val profiling: int Unsynchronized.ref
   type transition
   val empty: transition
   val name_of: transition -> string
@@ -204,9 +203,6 @@
 
 (** toplevel transitions **)
 
-val profiling = Unsynchronized.ref 0;
-
-
 (* node transactions -- maintaining stable checkpoints *)
 
 exception FAILURE of state * exn;
@@ -568,10 +564,7 @@
   setmp_thread_position tr (fn state =>
     let
       val timing_start = Timing.start ();
-
-      val (result, opt_err) =
-         state |> (apply_trans int trans |> ! profiling > 0 ? profile (! profiling));
-
+      val (result, opt_err) = apply_trans int trans state;
       val timing_result = Timing.result timing_start;
       val timing_props =
         Markup.command_timing :: (Markup.nameN, name_of tr) :: Position.properties_of (pos_of tr);
--- a/src/Pure/ML-Systems/ml_debugger.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/ML-Systems/ml_debugger.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -21,6 +21,7 @@
   val debug_function_result: state -> ML_Name_Space.valueVal
   val debug_location: state -> 'location
   val debug_name_space: state -> ML_Name_Space.T
+  val debug_local_name_space: state -> ML_Name_Space.T
 end;
 
 structure ML_Debugger: ML_DEBUGGER =
@@ -58,5 +59,6 @@
 fun debug_function_result () = fail ();
 fun debug_location () = fail ();
 fun debug_name_space () = fail ();
+fun debug_local_name_space () = fail ();
 
 end;
--- a/src/Pure/ML-Systems/ml_debugger_polyml-5.6.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/ML-Systems/ml_debugger_polyml-5.6.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -22,6 +22,7 @@
   val debug_function_result: state -> ML_Name_Space.valueVal
   val debug_location: state -> location
   val debug_name_space: state -> ML_Name_Space.T
+  val debug_local_name_space: state -> ML_Name_Space.T
 end;
 
 structure ML_Debugger: ML_DEBUGGER =
@@ -66,5 +67,6 @@
 val debug_function_result = PolyML.DebuggerInterface.debugFunctionResult;
 val debug_location = PolyML.DebuggerInterface.debugLocation;
 val debug_name_space = PolyML.DebuggerInterface.debugNameSpace;
+val debug_local_name_space = PolyML.DebuggerInterface.debugLocalNameSpace;
 
 end;
--- a/src/Pure/ML-Systems/ml_pretty.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/ML-Systems/ml_pretty.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -8,13 +8,13 @@
 struct
 
 datatype pretty =
-  Block of (string * string) * pretty list * int |
+  Block of (string * string) * bool * int * pretty list |
   String of string * int |
-  Break of bool * int;
+  Break of bool * int * int;
 
-fun block prts = Block (("", ""), prts, 2);
+fun block prts = Block (("", ""), false, 2, prts);
 fun str s = String (s, size s);
-fun brk wd = Break (false, wd);
+fun brk width = Break (false, width, 0);
 
 fun pair pretty1 pretty2 ((x, y), depth: int) =
   block [str "(", pretty1 (x, depth), str ",", brk 1, pretty2 (y, depth - 1), str ")"];
@@ -28,4 +28,3 @@
   in block (str lpar :: (elems (Int.max (depth, 0)) args @ [str rpar])) end;
 
 end;
-
--- a/src/Pure/ML-Systems/ml_profiling_polyml-5.6.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/ML-Systems/ml_profiling_polyml-5.6.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -4,14 +4,16 @@
 Profiling for Poly/ML 5.6.
 *)
 
-fun profile 0 f x = f x
-  | profile n f x =
-      let
-        val mode =
-          (case n of
-            1 => PolyML.Profiling.ProfileTime
-          | 2 => PolyML.Profiling.ProfileAllocations
-          | 3 => PolyML.Profiling.ProfileLongIntEmulation
-          | 6 => PolyML.Profiling.ProfileTimeThisThread
-          | _ => raise Fail ("Bad profile mode: " ^ Int.toString n));
-      in PolyML.Profiling.profile mode f x end;
+structure ML_Profiling =
+struct
+
+fun profile_time pr f x =
+  PolyML.Profiling.profileStream pr PolyML.Profiling.ProfileTime f x;
+
+fun profile_time_thread pr f x =
+  PolyML.Profiling.profileStream pr PolyML.Profiling.ProfileTimeThisThread f x;
+
+fun profile_allocations pr f x =
+  PolyML.Profiling.profileStream pr PolyML.Profiling.ProfileAllocations f x;
+
+end;
--- a/src/Pure/ML-Systems/ml_profiling_polyml.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/ML-Systems/ml_profiling_polyml.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -4,10 +4,24 @@
 Profiling for Poly/ML.
 *)
 
-fun profile 0 f x = f x
-  | profile n f x =
-      let
-        val _ = RunCall.run_call1 RuntimeCalls.POLY_SYS_profiler n;
-        val res = Exn.capture f x;
-        val _ = RunCall.run_call1 RuntimeCalls.POLY_SYS_profiler 0;
-      in Exn.release res end;
+structure ML_Profiling =
+struct
+
+local
+
+fun profile n f x =
+  let
+    val _ = RunCall.run_call1 RuntimeCalls.POLY_SYS_profiler n;
+    val res = Exn.capture f x;
+    val _ = RunCall.run_call1 RuntimeCalls.POLY_SYS_profiler 0;
+  in Exn.release res end;
+
+in
+
+fun profile_time (_: (int * string) list -> unit) f x = profile 1 f x;
+fun profile_time_thread (_: (int * string) list -> unit) f x = profile 6 f x;
+fun profile_allocations (_: (int * string) list -> unit) f x = profile 2 f x;
+
+end;
+
+end;
--- a/src/Pure/ML-Systems/polyml.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/ML-Systems/polyml.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -132,11 +132,11 @@
 
 val pretty_ml =
   let
-    fun convert _ (PolyML.PrettyBreak (wd, _)) = ML_Pretty.Break (false, wd)
+    fun convert _ (PolyML.PrettyBreak (width, offset)) = ML_Pretty.Break (false, width, offset)
       | convert _ (PolyML.PrettyBlock (_, _,
             [PolyML.ContextProperty ("fbrk", _)], [PolyML.PrettyString " "])) =
-          ML_Pretty.Break (true, 1)
-      | convert len (PolyML.PrettyBlock (ind, _, context, prts)) =
+          ML_Pretty.Break (true, 1, 0)
+      | convert len (PolyML.PrettyBlock (ind, consistent, context, prts)) =
           let
             fun property name default =
               (case List.find (fn PolyML.ContextProperty (a, _) => name = a | _ => false) context of
@@ -145,20 +145,20 @@
             val bg = property "begin" "";
             val en = property "end" "";
             val len' = property "length" len;
-          in ML_Pretty.Block ((bg, en), map (convert len') prts, ind) end
+          in ML_Pretty.Block ((bg, en), consistent, ind, map (convert len') prts) end
       | convert len (PolyML.PrettyString s) =
           ML_Pretty.String (s, case Int.fromString len of SOME i => i | NONE => size s)
   in convert "" end;
 
-fun ml_pretty (ML_Pretty.Break (false, wd)) = PolyML.PrettyBreak (wd, 0)
-  | ml_pretty (ML_Pretty.Break (true, _)) =
+fun ml_pretty (ML_Pretty.Break (false, width, offset)) = PolyML.PrettyBreak (width, offset)
+  | ml_pretty (ML_Pretty.Break (true, _, _)) =
       PolyML.PrettyBlock (0, false, [PolyML.ContextProperty ("fbrk", "")],
         [PolyML.PrettyString " "])
-  | ml_pretty (ML_Pretty.Block ((bg, en), prts, ind)) =
+  | ml_pretty (ML_Pretty.Block ((bg, en), consistent, ind, prts)) =
       let val context =
         (if bg = "" then [] else [PolyML.ContextProperty ("begin", bg)]) @
         (if en = "" then [] else [PolyML.ContextProperty ("end", en)])
-      in PolyML.PrettyBlock (ind, false, context, map ml_pretty prts) end
+      in PolyML.PrettyBlock (ind, consistent, context, map ml_pretty prts) end
   | ml_pretty (ML_Pretty.String (s, len)) =
       if len = size s then PolyML.PrettyString s
       else PolyML.PrettyBlock
--- a/src/Pure/ML-Systems/smlnj.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/ML-Systems/smlnj.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -74,7 +74,12 @@
   (Control.primaryPrompt := p1; Control.secondaryPrompt := p2);
 
 (*dummy implementation*)
-fun profile (n: int) f x = f x;
+structure ML_Profiling =
+struct
+  fun profile_time (_: (int * string) list -> unit) f x = f x;
+  fun profile_time_thread (_: (int * string) list -> unit) f x = f x;
+  fun profile_allocations (_: (int * string) list -> unit) f x = f x;
+end;
 
 (*dummy implementation*)
 fun print_exception_trace (_: exn -> string) (_: string -> unit) f = f ();
@@ -115,12 +120,16 @@
   let
     fun str "" = ()
       | str s = PrettyPrint.string pps s;
-    fun pprint (ML_Pretty.Block ((bg, en), prts, ind)) =
-          (str bg; PrettyPrint.openHOVBox pps (PrettyPrint.Rel (dest_int ind));
-            List.app pprint prts; PrettyPrint.closeBox pps; str en)
+    fun pprint (ML_Pretty.Block ((bg, en), consistent, ind, prts)) =
+         (str bg;
+          (if consistent then PrettyPrint.openHVBox else PrettyPrint.openHOVBox) pps
+            (PrettyPrint.Rel (dest_int ind));
+          List.app pprint prts; PrettyPrint.closeBox pps;
+          str en)
       | pprint (ML_Pretty.String (s, _)) = str s
-      | pprint (ML_Pretty.Break (false, wd)) = PrettyPrint.break pps {nsp = dest_int wd, offset = 0}
-      | pprint (ML_Pretty.Break (true, _)) = PrettyPrint.newline pps;
+      | pprint (ML_Pretty.Break (false, width, offset)) =
+          PrettyPrint.break pps {nsp = dest_int width, offset = dest_int offset}
+      | pprint (ML_Pretty.Break (true, _, _)) = PrettyPrint.newline pps;
   in pprint end;
 
 fun toplevel_pp context path pp =
--- a/src/Pure/PIDE/markup.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/PIDE/markup.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -64,9 +64,9 @@
   val urlN: string val url: string -> T
   val docN: string val doc: string -> T
   val indentN: string
-  val blockN: string val block: int -> T
   val widthN: string
-  val breakN: string val break: int -> T
+  val blockN: string val block: bool -> int -> T
+  val breakN: string val break: int -> int -> T
   val fbreakN: string val fbreak: T
   val itemN: string val item: T
   val wordsN: string val words: T
@@ -377,11 +377,21 @@
 
 (* pretty printing *)
 
+val consistentN = "consistent";
 val indentN = "indent";
-val (blockN, block) = markup_int "block" indentN;
+val widthN = "width";
 
-val widthN = "width";
-val (breakN, break) = markup_int "break" widthN;
+val blockN = "block";
+fun block c i =
+  (blockN,
+    (if c then [(consistentN, print_bool c)] else []) @
+    (if i <> 0 then [(indentN, print_int i)] else []));
+
+val breakN = "break";
+fun break w i =
+  (breakN,
+    (if w <> 0 then [(widthN, print_int w)] else []) @
+    (if i <> 0 then [(indentN, print_int i)] else []));
 
 val (fbreakN, fbreak) = markup_elem "fbreak";
 
--- a/src/Pure/PIDE/markup.scala	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/PIDE/markup.scala	Mon Dec 21 21:34:14 2015 +0100
@@ -180,8 +180,41 @@
 
   /* pretty printing */
 
-  val Block = new Markup_Int("block", "indent")
-  val Break = new Markup_Int("break", "width")
+  val Consistent = new Properties.Boolean("consistent")
+  val Indent = new Properties.Int("indent")
+  val Width = new Properties.Int("width")
+
+  object Block
+  {
+    val name = "block"
+    def apply(c: Boolean, i: Int): Markup =
+      Markup(name,
+        (if (c) Consistent(c) else Nil) :::
+        (if (i != 0) Indent(i) else Nil))
+    def unapply(markup: Markup): Option[(Boolean, Int)] =
+      if (markup.name == name) {
+        val c = Consistent.unapply(markup.properties).getOrElse(false)
+        val i = Indent.unapply(markup.properties).getOrElse(0)
+        Some((c, i))
+      }
+      else None
+  }
+
+  object Break
+  {
+    val name = "break"
+    def apply(w: Int, i: Int): Markup =
+      Markup(name,
+        (if (w != 0) Width(w) else Nil) :::
+        (if (i != 0) Indent(i) else Nil))
+    def unapply(markup: Markup): Option[(Int, Int)] =
+      if (markup.name == name) {
+        val w = Width.unapply(markup.properties).getOrElse(0)
+        val i = Indent.unapply(markup.properties).getOrElse(0)
+        Some((w, i))
+      }
+      else None
+  }
 
   val ITEM = "item"
   val BULLET = "bullet"
--- a/src/Pure/Proof/extraction.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/Proof/extraction.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -121,7 +121,7 @@
 fun extr_name s vs = Long_Name.append "extr" (space_implode "_" (s :: vs));
 fun corr_name s vs = extr_name s vs ^ "_correctness";
 
-fun msg d s = writeln (Pretty.spaces d ^ s);
+fun msg d s = writeln (Symbol.spaces d ^ s);
 
 fun vars_of t = map Var (rev (Term.add_vars t []));
 fun frees_of t = map Free (rev (Term.add_frees t []));
--- a/src/Pure/Syntax/syntax_phases.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/Syntax/syntax_phases.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -750,7 +750,7 @@
         let
           val ((bg1, bg2), en) = typing_elem;
           val bg = bg1 ^ Pretty.symbolic_output (pretty_typ_ast Markup.empty ty) ^ bg2;
-        in SOME (Pretty.raw_markup (bg, en) (0, [pretty_ast Markup.empty t])) end
+        in SOME (Pretty.make_block (bg, en) false 0 [pretty_ast Markup.empty t]) end
       else NONE
 
     and ofsort_trans ty s =
@@ -758,7 +758,7 @@
         let
           val ((bg1, bg2), en) = sorting_elem;
           val bg = bg1 ^ Pretty.symbolic_output (pretty_typ_ast Markup.empty s) ^ bg2;
-        in SOME (Pretty.raw_markup (bg, en) (0, [pretty_typ_ast Markup.empty ty])) end
+        in SOME (Pretty.make_block (bg, en) false 0 [pretty_typ_ast Markup.empty ty]) end
       else NONE
 
     and pretty_typ_ast m ast = ast
--- a/src/Pure/Thy/thy_output.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/Thy/thy_output.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -592,8 +592,9 @@
         #> space_implode "\\isasep\\isanewline%\n"
         #> Latex.environment "isabelle"
       else
-        map ((if Config.get ctxt break then string_of_margin ctxt else Pretty.str_of) #>
-          Output.output)
+        map
+          ((if Config.get ctxt break then string_of_margin ctxt else Pretty.unformatted_string_of)
+            #> Output.output)
         #> space_implode "\\isasep\\isanewline%\n"
         #> enclose "\\isa{" "}");
 
@@ -602,7 +603,7 @@
 
 fun verbatim_text ctxt =
   if Config.get ctxt display then
-    split_lines #> map (prefix (Pretty.spaces (Config.get ctxt indent))) #> cat_lines #>
+    split_lines #> map (prefix (Symbol.spaces (Config.get ctxt indent))) #> cat_lines #>
     Latex.output_ascii #> Latex.environment "isabellett"
   else
     split_lines #>
--- a/src/Pure/Tools/debugger.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/Tools/debugger.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -191,7 +191,7 @@
     fun print x =
       Pretty.from_ML (ML_Name_Space.display_val (x, ML_Options.get_print_depth (), space));
     fun print_all () =
-      #allVal (ML_Debugger.debug_name_space (the_debug_state thread_name index)) ()
+      #allVal (ML_Debugger.debug_local_name_space (the_debug_state thread_name index)) ()
       |> sort_by #1 |> map (Pretty.item o single o print o #2)
       |> Pretty.chunks |> Pretty.string_of |> writeln_message;
   in Context.setmp_thread_data (SOME context) print_all () end;
--- a/src/Pure/context.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/context.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -37,9 +37,7 @@
   val theory_name: theory -> string
   val PureN: string
   val pretty_thy: theory -> Pretty.T
-  val string_of_thy: theory -> string
   val pretty_abbrev_thy: theory -> Pretty.T
-  val str_of_thy: theory -> string
   val get_theory: theory -> string -> theory
   val this_theory: theory -> string -> theory
   val eq_thy_id: theory_id * theory_id -> bool
@@ -170,7 +168,6 @@
   in rev (name :: ancestor_names) end;
 
 val pretty_thy = Pretty.str_list "{" "}" o display_names;
-val string_of_thy = Pretty.string_of o pretty_thy;
 
 fun pretty_abbrev_thy thy =
   let
@@ -179,8 +176,6 @@
     val abbrev = if n > 5 then "..." :: List.drop (names, n - 5) else names;
   in Pretty.str_list "{" "}" abbrev end;
 
-val str_of_thy = Pretty.str_of o pretty_abbrev_thy;
-
 fun get_theory thy name =
   if theory_name thy <> name then
     (case find_first (fn thy' => theory_name thy' = name) (ancestors_of thy) of
--- a/src/Pure/defs.ML	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/defs.ML	Mon Dec 21 21:34:14 2015 +0100
@@ -147,7 +147,8 @@
 fun disjoint_specs context c (i, {description = a, pos = pos_a, lhs = Ts, ...}: spec) =
   Inttab.forall (fn (j, {description = b, pos = pos_b, lhs = Us, ...}: spec) =>
     i = j orelse disjoint_args (Ts, Us) orelse
-      error ("Clash of specifications for " ^ Pretty.str_of (pretty_item context c) ^ ":\n" ^
+      error ("Clash of specifications for " ^
+        Pretty.unformatted_string_of (pretty_item context c) ^ ":\n" ^
         "  " ^ quote a ^ Position.here pos_a ^ "\n" ^
         "  " ^ quote b ^ Position.here pos_b));
 
--- a/src/Pure/library.scala	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Pure/library.scala	Mon Dec 21 21:34:14 2015 +0100
@@ -205,7 +205,10 @@
     try { Some(new Regex(s)) } catch { case ERROR(_) => None }
 
 
-  /* canonical list operations */
+  /* lists */
+
+  def take_prefix[A](pred: A => Boolean, xs: List[A]): (List[A], List[A]) =
+    (xs.takeWhile(pred), xs.dropWhile(pred))
 
   def member[A, B](xs: List[A])(x: B): Boolean = xs.contains(x)
   def insert[A](x: A)(xs: List[A]): List[A] = if (xs.contains(x)) xs else x :: xs
--- a/src/Tools/jEdit/src/isabelle_sidekick.scala	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Tools/jEdit/src/isabelle_sidekick.scala	Mon Dec 21 21:34:14 2015 +0100
@@ -179,7 +179,7 @@
                 val range = info.range + command_start
                 val content = command.source(info.range).replace('\n', ' ')
                 val info_text =
-                  Pretty.formatted(Library.separate(Pretty.FBreak, info.info), margin = 40.0)
+                  Pretty.formatted(Library.separate(Pretty.fbrk, info.info), margin = 40.0)
                     .mkString
 
                 new DefaultMutableTreeNode(
--- a/src/Tools/jEdit/src/jedit_lib.scala	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Tools/jEdit/src/jedit_lib.scala	Mon Dec 21 21:34:14 2015 +0100
@@ -299,7 +299,7 @@
       def string_width(s: String): Double =
         painter.getFont.getStringBounds(s, painter.getFontRenderContext).getWidth
 
-      val unit = string_width(Pretty.space) max 1.0
+      val unit = string_width(Symbol.space) max 1.0
       val average = string_width("mix") / (3 * unit)
       def apply(s: String): Double = if (s == "\n") 1.0 else string_width(s) / unit
     }
--- a/src/Tools/jEdit/src/rendering.scala	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Tools/jEdit/src/rendering.scala	Mon Dec 21 21:34:14 2015 +0100
@@ -506,7 +506,7 @@
   /* tooltips */
 
   private def pretty_typing(kind: String, body: XML.Body): XML.Tree =
-    Pretty.block(XML.Text(kind) :: Pretty.Break(1) :: body)
+    Pretty.block(XML.Text(kind) :: Pretty.brk(1) :: body)
 
   private def timing_threshold: Double = options.real("jedit_timing_threshold")
 
@@ -585,7 +585,7 @@
       case tips =>
         val r = Text.Range(results.head.range.start, results.last.range.stop)
         val all_tips = (tips.filter(_._1) ++ tips.filter(!_._1).lastOption.toList).map(_._2)
-        Some(Text.Info(r, Library.separate(Pretty.FBreak, all_tips)))
+        Some(Text.Info(r, Library.separate(Pretty.fbrk, all_tips)))
     }
   }
 
--- a/src/Tools/jEdit/src/state_dockable.scala	Sat Dec 19 17:03:17 2015 +0100
+++ b/src/Tools/jEdit/src/state_dockable.scala	Mon Dec 21 21:34:14 2015 +0100
@@ -108,8 +108,6 @@
       locate_button, pretty_text_area.search_label, pretty_text_area.search_field, zoom)
   add(controls.peer, BorderLayout.NORTH)
 
-  override def focusOnDefaultComponent { update_button.requestFocus }
-
 
   /* main */