merged
authordesharna
Fri, 10 Dec 2021 08:58:09 +0100
changeset 74903 d969474ddc45
parent 74902 ece4f07ebb04 (current diff)
parent 74889 7dbac7d3cdab (diff)
child 74904 cab76af373e7
child 74905 246e22068141
merged
src/HOL/ex/Rewrite_Examples.thy
--- a/.hgtags	Fri Dec 10 08:39:34 2021 +0100
+++ b/.hgtags	Fri Dec 10 08:58:09 2021 +0100
@@ -42,3 +42,5 @@
 81cc8f2ea9e720a68f0ba96e2b8d8e98a5ff3152 Isabelle2021-1-RC1
 b92b5a57521b27cf592b835caa8e8d73e05070d2 Isabelle2021-1-RC2
 2b212c8138a57096487461fa353386f753ff7a11 Isabelle2021-1-RC3
+2336356d4180b948eb9070f3f9f8986cda7e8f76 Isabelle2021-1-RC4
+8baf2e8b16e2218edaeb6dee402b21f97a49a505 Isabelle2021-1-RC5
--- a/Admin/Release/CHECKLIST	Fri Dec 10 08:39:34 2021 +0100
+++ b/Admin/Release/CHECKLIST	Fri Dec 10 08:58:09 2021 +0100
@@ -73,7 +73,7 @@
 
 - fully-automated packaging (e.g. on lxcisa0):
 
-  hg up -r DISTNAME && Admin/build_release -J .../java11 -D /p/home/isabelle/dist -b HOL -l -R DISTNAME
+  hg up -r DISTNAME && Admin/build_release -D /p/home/isabelle/dist -b HOL -l -R DISTNAME
 
 - Docker image:
 
--- a/Admin/components/components.sha1	Fri Dec 10 08:39:34 2021 +0100
+++ b/Admin/components/components.sha1	Fri Dec 10 08:58:09 2021 +0100
@@ -96,6 +96,7 @@
 d94e6da7299004890c04a7b395a3f2d381a3281e  flatlaf-1.0-rc3.tar.gz
 7ca3e6a8c9bd837990e64d89e7fa07a7e7cf78ff  flatlaf-1.0.tar.gz
 9908e5ab721f1c0035c0ab04dc7ad0bd00a8db27  flatlaf-1.2.tar.gz
+9534b721b7b78344f3225067ee4df28a5440b87e  flatlaf-1.6.4.tar.gz
 212a0f1f867511722024cc60156fd71872a16f92  flatlaf-1.6.tar.gz
 f339234ec18369679be0095264e0c0af7762f351  gnu-utils-20210414.tar.gz
 71259aa46134e6cf2c6473b4fc408051b3336490  gnu-utils-20211030.tar.gz
@@ -273,6 +274,7 @@
 edcb517b7578db4eec1b6573b624f291776e11f6  naproche-20210124.tar.gz
 d858eb0ede6aea6b8cc40de63bd3a17f8f9f5300  naproche-20210129.tar.gz
 810ee0f35adada9bf970c33fd80b986ab2255bf3  naproche-20210201.tar.gz
+d098dd0873b1720a77dc4e060267f9a6c93f341a  naproche-2d99afe5c349.tar.gz
 4a4e56fd03b7ba4edd38046f853873a90cf55d1a  naproche-4ad61140062f.tar.gz
 77252e0b40f89825b9b5935f9f0c4cd5d4e7012a  naproche-6d0d76ce2f2a.tar.gz
 9c02ecf93863c3289002c5e5ac45a83e2505984c  naproche-755224402e36.tar.gz
@@ -330,8 +332,10 @@
 cb8e85387315f62dcfc6b21ec378186e58068f76  polyml-5.8.2.tar.gz
 d1fd6eced69dc1df7226432fcb824568e0994ff2  polyml-5.8.tar.gz
 fb40145228f84513a9b083b54678a7d61b9c34c4  polyml-5.9-5d4caa8f7148.tar.gz
+0f1c903b043acf7b221821d8b6374b3f943a122b  polyml-5.9-610a153b941d.tar.gz
 5f00a47b8f5180b33e68fcc6c343b061957a0a98  polyml-5.9-960de0cd0795.tar.gz
 7056b285af67902b32f5049349a064f073f05860  polyml-5.9-cc80e2b43c38.tar.gz
+0c396bd6b46ff11a2432b91aab2be0248bd9b0a4  polyml-5.9.tar.gz
 49f1adfacdd6d29fa9f72035d94a31eaac411a97  polyml-test-0a6ebca445fc.tar.gz
 2a8c4421e0a03c0d6ad556b3c36c34eb11568adb  polyml-test-1236652ebd55.tar.gz
 8e83fb5088cf265902b8da753a8eac5fe3f6a14b  polyml-test-159dc81efc3b.tar.gz
@@ -435,6 +439,7 @@
 d33e1e36139e86b9e9a48d8b46a6f90d7863a51c  verit-2021.06-rmx-1.tar.gz
 c11d1120fcefaec79f099fe2be05b03cd2aed8b9  verit-2021.06-rmx.tar.gz
 b576fd5d89767c1067541d4839fb749c6a68d22c  verit-2021.06.1-rmx.tar.gz
+19c6e5677b0a26cbc5805da79d00d06a66b7a671  verit-2021.06.2-rmx.tar.gz
 81d21dfd0ea5c58f375301f5166be9dbf8921a7a  windows_app-20130716.tar.gz
 fe15e1079cf5ad86f3cbab4553722a0d20002d11  windows_app-20130905.tar.gz
 e6a43b7b3b21295853bd2a63b27ea20bd6102f5f  windows_app-20130906.tar.gz
@@ -459,10 +464,12 @@
 86e721296c400ada440e4a9ce11b9e845eec9e25  z3-4.3.0.tar.gz
 a8917c31b31c182edeec0aaa48870844960c8a61  z3-4.3.2pre-1.tar.gz
 06b30757ff23aefbc30479785c212685ffd39f4d  z3-4.3.2pre.tar.gz
+ed37c451b9b748901295898bf713b24d22cc8c17  z3-4.4.0_4.4.1.tar.gz
 93e7e4bddc6afcf87fe2b6656cfcb1b1acd0a4f8  z3-4.4.0pre-1.tar.gz
 b1bc411c2083fc01577070b56b94514676f53854  z3-4.4.0pre-2.tar.gz
 4c366ab255d2e9343fb635d44d4d55ddd24c76d0  z3-4.4.0pre-3.tar.gz
 517ba7b94c1985416c5b411c8ae84456367eb231  z3-4.4.0pre.tar.gz
+6e5d7a65757cac970eb5ad28cd62130c99f42c23  z3-4.4.1.tar.gz
 aa20745f0b03e606b1a4149598e0c7572b63c657  z3-4.8.3.tar.gz
 9dfeb39c87393af7b6a34118507637aa53aca05e  zipperposition-2.0-1.tar.gz
 b884c60653002a7811e3b652ae0515e825d98667  zipperposition-2.0.tar.gz
--- a/Admin/components/main	Fri Dec 10 08:39:34 2021 +0100
+++ b/Admin/components/main	Fri Dec 10 08:58:09 2021 +0100
@@ -5,7 +5,7 @@
 csdp-6.1.1
 cvc4-1.8
 e-2.6-1
-flatlaf-1.6
+flatlaf-1.6.4
 idea-icons-20210508
 isabelle_fonts-20211004
 isabelle_setup-20211109
@@ -17,7 +17,7 @@
 minisat-2.2.1-1
 nunchaku-0.5
 opam-2.0.7
-polyml-5.9-cc80e2b43c38
+polyml-5.9
 postgresql-42.2.24
 scala-2.13.5
 smbc-0.4.1
@@ -26,7 +26,7 @@
 ssh-java-20190323
 stack-2.7.3
 vampire-4.6
-verit-2021.06.1-rmx
+verit-2021.06.2-rmx
 xz-java-1.9
-z3-4.4.0pre-3
+z3-4.4.0_4.4.1
 zipperposition-2.1-1
--- a/Admin/polyml/README	Fri Dec 10 08:39:34 2021 +0100
+++ b/Admin/polyml/README	Fri Dec 10 08:58:09 2021 +0100
@@ -3,8 +3,8 @@
 
 This compilation of Poly/ML (https://www.polyml.org) is based on the
 source distribution from
-https://github.com/polyml/polyml/commit/cc80e2b43c38 (shortly before
-official version 5.9).
+https://github.com/polyml/polyml/commit/39d96a2def90 (official release
+5.9 with minimal additions fixes-5.9).
 
 The Isabelle repository provides an administrative tool "isabelle
 build_polyml", which can be used in the polyml component directory as
@@ -55,4 +55,4 @@
 
 
         Makarius
-        12-Nov-2021
+        26-Nov-2021
--- a/NEWS	Fri Dec 10 08:39:34 2021 +0100
+++ b/NEWS	Fri Dec 10 08:58:09 2021 +0100
@@ -7,6 +7,32 @@
 New in this Isabelle version
 ----------------------------
 
+*** General ***
+
+* Old-style {* verbatim *} tokens have been discontinued (legacy feature
+since Isabelle2019). INCOMPATIBILITY, use \<open>cartouche\<close> syntax instead.
+
+
+*** HOL ***
+
+* Theory "HOL.Relation": Added lemmas asymp_less and asymp_greater to
+  type class preorder.
+
+* Theory "HOL-Library.Multiset":
+  - Consolidated operation and fact names.
+        multp ~> multp_code
+        multeqp ~> multeqp_code
+        multp_cancel_add_mset ~> multp_cancel_add_mset0
+        multp_cancel_add_mset0[simplified] ~> multp_cancel_add_mset
+        multp_code_iff ~> multp_code_iff_mult
+        multeqp_code_iff ~> multeqp_code_iff_reflcl_mult
+    Minor INCOMPATIBILITY.
+  - Moved mult1_lessE out of preorder type class and add explicit
+    assumption. Minor INCOMPATIBILITY.
+  - Added predicate multp equivalent to set mult. Reuse name previously
+    used for what is now called multp_code. Minor INCOMPATIBILITY.
+  - Lifted multiple lemmas from mult to multp.
+  - Redefined less_multiset to be based on multp. INCOMPATIBILITY.
 
 
 New in Isabelle2021-1 (December 2021)
@@ -116,9 +142,18 @@
 explicitly requires document_build=build. Minor INCOMPATIBILITY, need to
 adjust session ROOT options.
 
+* Option "document_comment_latex" enables regular LaTeX comment.sty,
+instead of the historic version for plain TeX (default). The latter is
+much faster, but in conflict with LaTeX classes like Dagstuhl LIPIcs.
+
 * Option "document_echo" informs about document file names during
 session presentation.
 
+* Option "document_heading_prefix" specifies a prefix for the LaTeX
+macro names generated from document heading commands like 'chapter',
+'section' etc. The default is "isamarkup", so 'section' becomes
+"\isamarkupsection" for example.
+
 * The command-line tool "isabelle latex" has been discontinued,
 INCOMPATIBILITY for old document build scripts.
 
@@ -204,12 +239,32 @@
 min.absorb4, max.absorb1, max.absorb2, max.absorb3, max.absorb4. Minor
 INCOMPATIBILITY.
 
+* The Mirabelle testing tool is now part of Main HOL, and accessible via
+the command-line tool "isabelle mirabelle" (implemented in
+Isabelle/Scala). It has become more robust and supports parallelism
+within Isabelle/ML.
+
+* Nitpick: External solver "MiniSat" is available for all supported
+Isabelle platforms (including 64bit Windows and ARM); while
+"MiniSat_JNI" only works for Intel Linux and macOS.
+
+* Nitpick/Kodkod: default is back to external Java process (option
+kodkod_scala = false), both for PIDE and batch builds. This reduces
+confusion and increases robustness of timeouts, despite substantial
+overhead to run an external JVM. For more fine-grained control, the
+kodkod_scala option can be modified within the formal theory context
+like this:
+
+  declare [[kodkod_scala = false]]
+
 * Sledgehammer:
   - Update of bundled provers:
-      E 2.6
-      Vampire 4.6 (with Open Source license)
-      veriT 2021.06-rmx
-      Zipperposition 2.1
+      . E 2.6
+      . Vampire 4.6 (with Open Source license)
+      . veriT 2021.06.1-rmx
+      . Zipperposition 2.1
+      . Z3 4.4.1 for arm64-linux, which approximates Z3 4.4.0pre,
+        but sometimes fails or crashes
   - Adjusted default provers:
       cvc4 vampire verit e spass z3 zipperposition
   - Adjusted Zipperposition's slicing.
@@ -227,10 +282,6 @@
     version 2.4 (release 20200713). The new version fixes one
     implementation defect. Very slight INCOMPATIBILITY.
 
-* Nitpick: External solver "MiniSat" is available for all supported
-Isabelle platforms (including Windows and ARM); while "MiniSat_JNI" only
-works for Intel Linux and macOS.
-
 * Theory HOL-Library.Lattice_Syntax has been superseded by bundle
 "lattice_syntax": it can be used in a local context via 'include' or in
 a global theory via 'unbundle'. The opposite declarations are bundled as
@@ -393,11 +444,11 @@
 syntactic order. The original order of occurrences may be recovered as
 well, e.g. via TFrees.list_set.
 
-* Thm.instantiate, Thm.generalize and related operations require
-scalable datastructures from structure TVars, Vars, Names etc.
-INCOMPATIBILITY: e.g. use TVars.empty and TVars.make for immediate
-adoption; better use TVars.add, TVars.add_tfrees etc. for scalable
-accumulation of items.
+* Thm.instantiate, Thm.generalize and related operations (e.g.
+Variable.import) now use scalable data structures from structure TVars,
+Vars, Names etc. INCOMPATIBILITY: e.g. use TVars.empty and TVars.make
+for immediate adoption; better use TVars.add, TVars.add_tfrees etc. for
+scalable accumulation of items.
 
 * Thm.instantiate_beta applies newly emerging abstractions to their
 arguments in the term, but leaves other beta-redexes unchanged --- in
@@ -509,6 +560,9 @@
 
 *** System ***
 
+* Almost complete support for arm64-linux platform. The reference
+platform is Raspberry Pi 4 with 8 GB RAM running Pi OS (64 bit).
+
 * Update to OpenJDK 17: the current long-term support version of Java.
 
 * Update to Poly/ML 5.9 with improved support for ARM on Linux. On
@@ -546,17 +600,21 @@
 INCOMPATIBILITY: HTTP proxy configuration now works via JVM properties
 https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/net/doc-files/net-properties.html
 
+* System options may declare an implicit standard value, which is used
+when the option is activated without providing an explicit value, e.g.
+"isabelle build -o document -o document_output" instead of
+"isabelle build -o document=true -o document_output=output". For options
+of type "bool", the standard is always "true" and cannot be specified
+differently.
+
+* System option "document=true" is an alias for "document=pdf", and
+"document=false" is an alias for "document=" (empty string).
+
 * System option "system_log" specifies an optional log file for internal
-messages produced by Output.system_message in Isabelle/ML; the value
-"true" refers to console progress of the build job. This works for
+messages produced by Output.system_message in Isabelle/ML; the standard
+value "-" refers to console progress of the build job. This works for
 "isabelle build" or any derivative of it.
 
-* System options of type string may be set to "true" using the short
-notation of type bool. E.g. "isabelle build -o system_log".
-
-* System option "document=true" is an alias for "document=pdf" and thus
-can be used in the short form. E.g. "isabelle build -o document".
-
 * Command-line tool "isabelle version" supports repository archives
 (without full .hg directory). It also provides more options.
 
@@ -567,11 +625,11 @@
 (native Windows) or ISABELLE_APPLE_PLATFORM64 (Apple Silicon).
 
 * Timeouts for Isabelle/ML tools are subject to system option
-"timeout_scale" --- this already used for the overall session build
-process before, and allows to adapt to slow machines. The underlying
-Timeout.apply in Isabelle/ML treats an original timeout specification 0
-as no timeout; before it meant immediate timeout. Rare INCOMPATIBILITY
-in boundary cases.
+"timeout_scale", to support adjustments to slow machines. Before,
+timeout_scale was only used for the overall session build process, now
+it affects the underlying Timeout.apply in Isabelle/ML as well. It
+treats a timeout specification 0 as "no timeout", instead of "immediate
+timeout". Rare INCOMPATIBILITY in boundary cases.
 
 
 
--- a/etc/options	Fri Dec 10 08:39:34 2021 +0100
+++ b/etc/options	Fri Dec 10 08:58:09 2021 +0100
@@ -5,9 +5,9 @@
 option browser_info : bool = false
   -- "generate theory browser information"
 
-option document : string = ""
+option document : string = "" (standard "true")
   -- "build PDF document (enabled for \"pdf\" or \"true\", disabled for \"\" or \"false\")"
-option document_output : string = ""
+option document_output : string = "" (standard "output")
   -- "document output directory"
 option document_echo : bool = false
   -- "inform about document file names during session presentation"
@@ -17,10 +17,14 @@
   -- "default command tags (separated by commas)"
 option document_bibliography : bool = false
   -- "explicitly enable use of bibtex (default: according to presence of root.bib)"
-option document_build : string = "lualatex"
-  -- "document build engine (e.g. lualatex, pdflatex, build)"
+option document_build : string = "lualatex" (standard "build")
+  -- "document build engine (e.g. build, lualatex, pdflatex)"
 option document_logo : string = ""
   -- "generate named instance of Isabelle logo (underscore means unnamed variant)"
+option document_heading_prefix : string = "isamarkup" (standard)
+  -- "prefix for LaTeX macros generated from 'chapter', 'section' etc."
+option document_comment_latex : bool = false
+  -- "use regular LaTeX version of comment.sty, instead of historic plain TeX version"
 
 option thy_output_display : bool = false
   -- "indicate output as multi-line display-style material"
@@ -128,11 +132,11 @@
 option process_output_tail : int = 40
   -- "build process output tail shown to user (in lines, 0 = unlimited)"
 
-option profiling : string = ""
+option profiling : string = "" (standard "time")
   -- "ML profiling (possible values: time, allocations)"
 
-option system_log : string = ""
-  -- "output for system messages (log file or \"true\" for console progress)"
+option system_log : string = "" (standard "-")
+  -- "output for system messages (log file or \"-\" for console progress)"
 
 option system_heaps : bool = false
   -- "store session heaps in $ISABELLE_HEAPS_SYSTEM, not $ISABELLE_HEAPS"
--- a/lib/texinputs/isabelle.sty	Fri Dec 10 08:39:34 2021 +0100
+++ b/lib/texinputs/isabelle.sty	Fri Dec 10 08:58:09 2021 +0100
@@ -140,8 +140,6 @@
 \chardef\isacharbar=`\|%
 \chardef\isacharbraceright=`\}%
 \chardef\isachartilde=`\~%
-\def\isacharverbatimopen{\isacharbraceleft\isacharasterisk}%
-\def\isacharverbatimclose{\isacharasterisk\isacharbraceright}%
 \def\isacartoucheopen{\isatext{\guilsinglleft}}%
 \def\isacartoucheclose{\isatext{\guilsinglright}}%
 }
@@ -275,29 +273,8 @@
 \newcommand{\isamarkupcancel}[1]{\isa{\xout{#1}}}
 
 
-% tagged regions
-
-%plain TeX version of comment package -- much faster!
-\let\isafmtname\fmtname\def\fmtname{plain}
-\usepackage{comment}
-\let\fmtname\isafmtname
+% tags
 
 \newcommand{\isafold}[1]{\emph{$\langle\mathord{\mathit{#1}}\rangle$}}
 
-\newcommand{\isakeeptag}[1]%
-{\includecomment{isadelim#1}\includecomment{isatag#1}\csarg\def{isafold#1}{}}
-\newcommand{\isadroptag}[1]%
-{\excludecomment{isadelim#1}\excludecomment{isatag#1}\csarg\def{isafold#1}{}}
-\newcommand{\isafoldtag}[1]%
-{\includecomment{isadelim#1}\excludecomment{isatag#1}\csarg\def{isafold#1}{\isafold{#1}}}
-
-\isakeeptag{document}
-\isakeeptag{theory}
-\isakeeptag{proof}
-\isakeeptag{ML}
-\isakeeptag{visible}
-\isadroptag{invisible}
-\isakeeptag{important}
-\isakeeptag{unimportant}
-
 \IfFileExists{isabelletags.sty}{\usepackage{isabelletags}}{}
--- a/src/Doc/Implementation/Logic.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Doc/Implementation/Logic.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -168,6 +168,8 @@
   @{ML_antiquotation_def "type_abbrev"} & : & \<open>ML_antiquotation\<close> \\
   @{ML_antiquotation_def "nonterminal"} & : & \<open>ML_antiquotation\<close> \\
   @{ML_antiquotation_def "typ"} & : & \<open>ML_antiquotation\<close> \\
+  @{ML_antiquotation_def "Type"} & : & \<open>ML_antiquotation\<close> \\
+  @{ML_antiquotation_def "Type_fn"} & : & \<open>ML_antiquotation\<close> \\
   \end{matharray}
 
   \<^rail>\<open>
@@ -180,6 +182,8 @@
    @@{ML_antiquotation nonterminal}) embedded
   ;
   @@{ML_antiquotation typ} type
+  ;
+  (@@{ML_antiquotation Type} | @@{ML_antiquotation Type_fn}) embedded
   \<close>
 
   \<^descr> \<open>@{class c}\<close> inlines the internalized class \<open>c\<close> --- as \<^ML_type>\<open>string\<close>
@@ -199,6 +203,51 @@
 
   \<^descr> \<open>@{typ \<tau>}\<close> inlines the internalized type \<open>\<tau>\<close> --- as constructor term for
   datatype \<^ML_type>\<open>typ\<close>.
+
+  \<^descr> \<open>@{Type source}\<close> refers to embedded source text to produce a type
+  constructor with name (formally checked) and arguments (inlined ML text).
+  The embedded \<open>source\<close> follows the syntax category @{syntax type_const}
+  defined below.
+
+  \<^descr> \<open>@{Type_fn source}\<close> is similar to \<open>@{Type source}\<close>, but produces a partial
+  ML function that matches against a type constructor pattern, following
+  syntax category @{syntax type_const_fn} below.
+
+
+  \<^rail>\<open>
+    @{syntax_def type_const}: @{syntax name} (@{syntax embedded_ml}*)
+    ;
+    @{syntax_def type_const_fn}: @{syntax type_const} @'=>' @{syntax embedded_ml}
+    ;
+    @{syntax_def embedded_ml}:
+      @'_' | @{syntax embedded} | @{syntax control_symbol} @{syntax embedded}
+  \<close>
+
+  The text provided as @{syntax embedded_ml} is treated as regular Isabelle/ML
+  source, possibly with nested antiquotations. ML text that only consists of a
+  single antiquotation in compact control-cartouche notation, can be written
+  without an extra level of nesting embedded text (see the last example
+  below).
+\<close>
+
+text %mlex \<open>
+  Here are some minimal examples for type constructor antiquotations.
+\<close>
+
+ML \<open>
+  \<comment> \<open>type constructor without arguments:\<close>
+  val natT = \<^Type>\<open>nat\<close>;
+
+  \<comment> \<open>type constructor with arguments:\<close>
+  fun mk_funT (A, B) = \<^Type>\<open>fun A B\<close>;
+
+  \<comment> \<open>type constructor decomposition as partial function:\<close>
+  val dest_funT = \<^Type_fn>\<open>fun A B => \<open>(A, B)\<close>\<close>;
+
+  \<comment> \<open>nested type constructors:\<close>
+  fun mk_relT A = \<^Type>\<open>fun A \<^Type>\<open>fun A \<^Type>\<open>bool\<close>\<close>\<close>;
+
+  \<^assert> (mk_relT natT = \<^typ>\<open>nat \<Rightarrow> nat \<Rightarrow> bool\<close>);
 \<close>
 
 
@@ -387,6 +436,9 @@
   @{ML_antiquotation_def "const"} & : & \<open>ML_antiquotation\<close> \\
   @{ML_antiquotation_def "term"} & : & \<open>ML_antiquotation\<close> \\
   @{ML_antiquotation_def "prop"} & : & \<open>ML_antiquotation\<close> \\
+  @{ML_antiquotation_def "Const"} & : & \<open>ML_antiquotation\<close> \\
+  @{ML_antiquotation_def "Const_"} & : & \<open>ML_antiquotation\<close> \\
+  @{ML_antiquotation_def "Const_fn"} & : & \<open>ML_antiquotation\<close> \\
   \end{matharray}
 
   \<^rail>\<open>
@@ -398,6 +450,9 @@
   @@{ML_antiquotation term} term
   ;
   @@{ML_antiquotation prop} prop
+  ;
+  (@@{ML_antiquotation Const} | @@{ML_antiquotation Const_} | @@{ML_antiquotation Const_fn})
+    embedded
   \<close>
 
   \<^descr> \<open>@{const_name c}\<close> inlines the internalized logical constant name \<open>c\<close> ---
@@ -414,6 +469,58 @@
 
   \<^descr> \<open>@{prop \<phi>}\<close> inlines the internalized proposition \<open>\<phi>\<close> --- as constructor
   term for datatype \<^ML_type>\<open>term\<close>.
+
+  \<^descr> \<open>@{Const source}\<close> refers to embedded source text to produce a term
+  constructor with name (formally checked), type arguments and term arguments
+  (inlined ML text). The embedded \<open>source\<close> follows the syntax category
+  @{syntax term_const} defined below, using @{syntax embedded_ml} from
+  antiquotation @{ML_antiquotation Type} (\secref{sec:types}).
+
+  \<^descr> \<open>@{Const_ source}\<close> is similar to \<open>@{Const source}\<close> for patterns instead of
+  closed values. This distinction is required due to redundant type
+  information within term constants: subtrees with duplicate ML pattern
+  variable need to be suppressed (replaced by dummy patterns). The embedded \<open>source\<close> follows
+  the syntax category @{syntax term_const} defined below.
+
+  \<^descr> \<open>@{Const_fn source}\<close> is similar to \<open>@{Const_ source}\<close>, but produces a
+  proper \<^verbatim>\<open>fn\<close> expression with function body. The embedded \<open>source\<close> follows
+  the syntax category @{syntax term_const_fn} defined below.
+
+
+  \<^rail>\<open>
+    @{syntax_def term_const}:
+      @{syntax name} (@{syntax embedded_ml}*) (@{syntax for_args})?
+    ;
+    @{syntax_def term_const_fn}:
+      @{ syntax term_const} @'=>' @{syntax embedded_ml}
+    ;
+    @{syntax_def for_args}: @'for' (@{syntax embedded_ml}+)
+  \<close>
+\<close>
+
+text %mlex \<open>
+  Here are some minimal examples for term constant antiquotations. Type
+  arguments for constants are analogous to type constructors
+  (\secref{sec:types}). Term arguments are different: a flexible number of
+  arguments is inserted via curried applications (\<^ML>\<open>op $\<close>).
+\<close>
+
+ML \<open>
+  \<comment> \<open>constant without type argument:\<close>
+  fun mk_conj (A, B) = \<^Const>\<open>conj for A B\<close>;
+  val dest_conj = \<^Const_fn>\<open>conj for A B => \<open>(A, B)\<close>\<close>;
+
+  \<comment> \<open>constant with type argument:\<close>
+  fun mk_eq T (t, u) = \<^Const>\<open>HOL.eq T for t u\<close>;
+  val dest_eq = \<^Const_fn>\<open>HOL.eq T for t u => \<open>(T, (t, u))\<close>\<close>;
+
+  \<comment> \<open>constant with variable number of term arguments:\<close>
+  val c = Const (\<^const_name>\<open>conj\<close>, \<^typ>\<open>bool \<Rightarrow> bool \<Rightarrow> bool\<close>);
+  val P = \<^term>\<open>P::bool\<close>;
+  val Q = \<^term>\<open>Q::bool\<close>;
+  \<^assert> (\<^Const>\<open>conj\<close> = c);
+  \<^assert> (\<^Const>\<open>conj for P\<close> = c $ P);
+  \<^assert> (\<^Const>\<open>conj for P Q\<close> = c $ P $ Q);
 \<close>
 
 
--- a/src/Doc/Isar_Ref/Outer_Syntax.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Doc/Isar_Ref/Outer_Syntax.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -234,12 +234,11 @@
 
 text \<open>
   A chunk of document @{syntax text} is usually given as @{syntax cartouche}
-  \<open>\<open>\<dots>\<close>\<close> or @{syntax verbatim}, i.e.\ enclosed in \<^verbatim>\<open>{*\<close>~\<open>\<dots>\<close>~\<^verbatim>\<open>*}\<close>. For
-  convenience, any of the smaller text unit that conforms to @{syntax name} is
-  admitted as well.
+  \<open>\<open>\<dots>\<close>\<close>. For convenience, any of the smaller text unit that conforms to
+  @{syntax name} is admitted as well.
 
   \<^rail>\<open>
-    @{syntax_def text}: @{syntax embedded} | @{syntax verbatim}
+    @{syntax_def text}: @{syntax embedded}
   \<close>
 
   Typical uses are document markup commands, like \<^theory_text>\<open>chapter\<close>, \<^theory_text>\<open>section\<close> etc.
--- a/src/Doc/System/Presentation.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Doc/System/Presentation.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -161,17 +161,33 @@
   \<^medskip> Isabelle is usually smart enough to create the PDF from the given
   \<^verbatim>\<open>root.tex\<close> and optional \<^verbatim>\<open>root.bib\<close> (bibliography) and \<^verbatim>\<open>root.idx\<close> (index)
   using standard {\LaTeX} tools. Actual command-lines are given by settings
-  @{setting_ref ISABELLE_PDFLATEX}, @{setting_ref ISABELLE_LUALATEX},
+  @{setting_ref ISABELLE_LUALATEX} (or @{setting_ref ISABELLE_PDFLATEX}),
   @{setting_ref ISABELLE_BIBTEX}, @{setting_ref ISABELLE_MAKEINDEX}: these
   variables are used without quoting in shell scripts, and thus may contain
   additional options.
 
-  Alternatively, the session \<^verbatim>\<open>ROOT\<close> may include an option
-  \<^verbatim>\<open>document_build=build\<close> together with an executable \<^verbatim>\<open>build\<close> script in
-  \isakeyword{document\_files}: it is invoked with command-line arguments for
-  the document format (\<^verbatim>\<open>pdf\<close>) and the document variant name. The script needs
-  to produce corresponding output files, e.g.\ \<^verbatim>\<open>root.pdf\<close> for default
-  document variants.
+  The system option @{system_option_def "document_build"} specifies an
+  alternative build engine, e.g. within the session \<^verbatim>\<open>ROOT\<close> file as
+  ``\<^verbatim>\<open>options [document_build = pdflatex]\<close>''. The following standard engines
+  are available:
+
+    \<^item> \<^verbatim>\<open>lualatex\<close> (default) uses the shell command \<^verbatim>\<open>$ISABELLE_LUALATEX\<close> on
+    the main \<^verbatim>\<open>root.tex\<close> file, with further runs of \<^verbatim>\<open>$ISABELLE_BIBTEX\<close> and
+    \<^verbatim>\<open>$ISABELLE_MAKEINDEX\<close> as required.
+
+    \<^item> \<^verbatim>\<open>pdflatex\<close> uses \<^verbatim>\<open>$ISABELLE_PDFLATEX\<close> instead of \<^verbatim>\<open>$ISABELLE_LUALATEX\<close>,
+    and the other tools as above.
+
+    \<^item> \<^verbatim>\<open>build\<close> invokes an executable script of the same name in a private
+    directory containing all \isakeyword{document\_files} and other generated
+    document sources. The script is invoked as ``\<^verbatim>\<open>./build pdf\<close>~\<open>name\<close>'' for
+    the document variant name; it needs to produce a corresponding
+    \<open>name\<close>\<^verbatim>\<open>.pdf\<close> file by arbitrary means on its own.
+
+  Further engines can be defined by add-on components in Isabelle/Scala
+  (\secref{sec:scala-build}), providing a service class derived from
+  \<^scala_type>\<open>isabelle.Document_Build.Engine\<close>. Available classes are listed
+  in \<^scala>\<open>isabelle.Document_Build.engines\<close>.
 \<close>
 
 
--- a/src/Doc/System/Sessions.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Doc/System/Sessions.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -212,6 +212,9 @@
     default configuration with output readily available to the author of the
     document.
 
+    \<^item> @{system_option_def "document_echo"} informs about document file names
+    during session presentation.
+
     \<^item> @{system_option_def "document_variants"} specifies document variants as
     a colon-separated list of \<open>name=tags\<close> entries. The default name
     \<^verbatim>\<open>document\<close>, without additional tags.
@@ -239,10 +242,21 @@
     is occasionally useful to control the global visibility of commands via
     session options (e.g.\ in \<^verbatim>\<open>ROOT\<close>).
 
+    \<^item> @{system_option_def "document_comment_latex"} enables regular {\LaTeX}
+    \<^verbatim>\<open>comment.sty\<close>, instead of the historic version for plain {\TeX}
+    (default). The latter is much faster, but in conflict with {\LaTeX}
+    classes like Dagstuhl
+    LIPIcs\<^footnote>\<open>\<^url>\<open>https://github.com/dagstuhl-publishing/styles\<close>\<close>.
+
     \<^item> @{system_option_def "document_bibliography"} explicitly enables the use
     of \<^verbatim>\<open>bibtex\<close>; the default is to check the presence of \<^verbatim>\<open>root.bib\<close>, but it
     could have a different name.
 
+    \<^item> @{system_option_def "document_heading_prefix"} specifies a prefix for
+    the {\LaTeX} macro names generated from Isar commands like \<^theory_text>\<open>chapter\<close>,
+    \<^theory_text>\<open>section\<close> etc. The default is \<^verbatim>\<open>isamarkup\<close>, e.g. \<^theory_text>\<open>section\<close> becomes
+    \<^verbatim>\<open>\isamarkupsection\<close>.
+
     \<^item> @{system_option_def "threads"} determines the number of worker threads
     for parallel checking of theories and proofs. The default \<open>0\<close> means that a
     sensible maximum value is determined by the underlying hardware. For
@@ -273,8 +287,8 @@
 
     \<^item> @{system_option_def system_log} specifies an optional log file for
     low-level messages produced by \<^ML>\<open>Output.system_message\<close> in
-    Isabelle/ML; the value ``\<^verbatim>\<open>true\<close>'' refers to console progress of the build
-    job.
+    Isabelle/ML; the standard value ``\<^verbatim>\<open>-\<close>'' refers to console progress of the
+    build job.
 
     \<^item> @{system_option_def "system_heaps"} determines the directories for
     session heap images: \<^path>\<open>$ISABELLE_HEAPS\<close> is the user directory and
--- a/src/Doc/Tutorial/ToyList/ToyList.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Doc/Tutorial/ToyList/ToyList.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -107,7 +107,7 @@
 When Isabelle prints a syntax error message, it refers to the HOL syntax as
 the \textbf{inner syntax} and the enclosing theory language as the \textbf{outer syntax}.
 
-Comments\index{comment} must be in enclosed in \texttt{(* }and\texttt{ *)}.
+Comments\index{comment} must be in enclosed in \texttt{(*}and\texttt{*)}.
 
 \section{Evaluation}
 \index{evaluation}
--- a/src/HOL/Analysis/Infinite_Set_Sum.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Analysis/Infinite_Set_Sum.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -10,6 +10,11 @@
   imports Set_Integral Infinite_Sum
 begin
 
+(*
+  WARNING! This file is considered obsolete and will, in the long run, be replaced with
+  the more general "Infinite_Sum".
+*)
+
 text \<open>Conflicting notation from \<^theory>\<open>HOL-Analysis.Infinite_Sum\<close>\<close>
 no_notation Infinite_Sum.abs_summable_on (infixr "abs'_summable'_on" 46)
 
@@ -1239,10 +1244,20 @@
                      y = enn2ereal x}"
       by (metis (mono_tags, lifting) Sup_upper empty_subsetI ennreal_0 finite.emptyI
           mem_Collect_eq sum.empty zero_ennreal.rep_eq)
-    moreover have "Sup {y. \<exists>x. (\<exists>y. finite y \<and> y \<subseteq> A \<and> x = ennreal (sum f y)) \<and>
-                y = enn2ereal x} = Sup {y. \<exists>x. finite x \<and> x \<subseteq> A \<and> y = ereal (sum f x)}"
-      using enn2ereal_ennreal fnn in_mono sum_nonneg Collect_cong
-      by (smt (verit, ccfv_SIG))
+    moreover have "(\<exists>x. (\<exists>y. finite y \<and> y \<subseteq> A \<and> x = ennreal (sum f y)) \<and> y = enn2ereal x) = 
+                   (\<exists>x. finite x \<and> x \<subseteq> A \<and> y = ereal (sum f x))" for y
+    proof -
+      have "(\<exists>x. (\<exists>y. finite y \<and> y \<subseteq> A \<and> x = ennreal (sum f y)) \<and> y = enn2ereal x) \<longleftrightarrow>
+            (\<exists>X x. finite X \<and> X \<subseteq> A \<and> x = ennreal (sum f X) \<and> y = enn2ereal x)"
+        by blast
+      also have "\<dots> \<longleftrightarrow> (\<exists>X. finite X \<and> X \<subseteq> A \<and> y = ereal (sum f X))"
+        by (rule arg_cong[of _ _ Ex])
+           (auto simp: fun_eq_iff intro!: enn2ereal_ennreal sum_nonneg enn2ereal_ennreal[symmetric] fnn)
+      finally show ?thesis .
+    qed
+    hence "Sup {y. \<exists>x. (\<exists>y. finite y \<and> y \<subseteq> A \<and> x = ennreal (sum f y)) \<and> y = enn2ereal x} =
+           Sup {y. \<exists>x. finite x \<and> x \<subseteq> A \<and> y = ereal (sum f x)}"
+      by simp
     ultimately show "max 0 (Sup {y. \<exists>x. (\<exists>xa. finite xa \<and> xa \<subseteq> A \<and> x
                                        = ennreal (sum f xa)) \<and> y = enn2ereal x})
          = Sup {y. \<exists>x. finite x \<and> x \<subseteq> A \<and> y = ereal (sum f x)}"
@@ -1275,7 +1290,7 @@
   then have \<open>sum n F \<le> infsetsum n A\<close> if \<open>finite F\<close> and \<open>F\<subseteq>A\<close> for F
     using that by (auto simp flip: infsetsum_finite simp: n_def[abs_def] intro!: infsetsum_mono_neutral)
   then show \<open>n summable_on A\<close>
-    apply (rule_tac pos_summable_on)
+    apply (rule_tac nonneg_bdd_above_summable_on)
     by (auto simp add: n_def bdd_above_def)
 qed
 
--- a/src/HOL/Analysis/Infinite_Sum.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Analysis/Infinite_Sum.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -1,8 +1,9 @@
 (*
   Title:    HOL/Analysis/Infinite_Sum.thy
   Author:   Dominique Unruh, University of Tartu
+            Manuel Eberl, University of Innsbruck
 
-  A theory of sums over possible infinite sets.
+  A theory of sums over possibly infinite sets.
 *)
 
 section \<open>Infinite sums\<close>
@@ -23,7 +24,7 @@
 
 theory Infinite_Sum
   imports
-    "HOL-Analysis.Elementary_Topology"
+    Elementary_Topology
     "HOL-Library.Extended_Nonnegative_Real"
     "HOL-Library.Complex_Order"
 begin
@@ -107,6 +108,14 @@
   shows \<open>infsum f A = 0\<close>
   by (simp add: assms infsum_def)
 
+lemma summable_iff_has_sum_infsum: "f summable_on A \<longleftrightarrow> has_sum f A (infsum f A)"
+  using infsumI summable_on_def by blast
+
+lemma has_sum_infsum[simp]:
+  assumes \<open>f summable_on S\<close>
+  shows \<open>has_sum f S (infsum f S)\<close>
+  using assms by (auto simp: summable_on_def infsum_def has_sum_def tendsto_Lim)
+
 lemma has_sum_cong_neutral:
   fixes f g :: \<open>'a \<Rightarrow> 'b::{comm_monoid_add, topological_space}\<close>
   assumes \<open>\<And>x. x\<in>T-S \<Longrightarrow> g x = 0\<close>
@@ -131,8 +140,7 @@
       also have \<open>sum f ((F\<inter>S) \<union> (F0\<inter>S)) = sum g F\<close>
         apply (rule sum.mono_neutral_cong)
         using that \<open>finite F0\<close> F0'_def assms by auto
-      finally show ?thesis
-        by -
+      finally show ?thesis .
     qed
     with \<open>F0' \<subseteq> T\<close> \<open>finite F0'\<close> show \<open>eventually P (filtermap (sum g) (finite_subsets_at_top T))\<close>
       by (metis (no_types, lifting) eventually_filtermap eventually_finite_subsets_at_top)
@@ -151,8 +159,7 @@
       also have \<open>sum g ((F\<inter>T) \<union> (F0\<inter>T)) = sum f F\<close>
         apply (rule sum.mono_neutral_cong)
         using that \<open>finite F0\<close> F0'_def assms by auto
-      finally show ?thesis
-        by -
+      finally show ?thesis .
     qed
     with \<open>F0' \<subseteq> S\<close> \<open>finite F0'\<close> show \<open>eventually P (filtermap (sum f) (finite_subsets_at_top S))\<close>
       by (metis (no_types, lifting) eventually_filtermap eventually_finite_subsets_at_top)
@@ -186,7 +193,7 @@
 lemma has_sum_cong: 
   assumes "\<And>x. x\<in>A \<Longrightarrow> f x = g x"
   shows "has_sum f A x \<longleftrightarrow> has_sum g A x"
-  by (smt (verit, best) DiffE IntD2 assms has_sum_cong_neutral)
+  using assms by (intro has_sum_cong_neutral) auto
 
 lemma summable_on_cong:
   assumes "\<And>x. x\<in>A \<Longrightarrow> f x = g x"
@@ -361,9 +368,7 @@
   assumes \<open>\<And>x. x \<in> A-B \<Longrightarrow> f x \<le> 0\<close>
   assumes \<open>\<And>x. x \<in> B-A \<Longrightarrow> g x \<ge> 0\<close>
   shows "infsum f A \<le> infsum g B"
-  apply (rule has_sum_mono_neutral[of f A _ g B _])
-  using assms apply auto
-  by (metis finite_subsets_at_top_neq_bot infsum_def summable_on_def has_sum_def tendsto_Lim)+
+  by (rule has_sum_mono_neutral[of f A _ g B _]) (use assms in \<open>auto intro: has_sum_infsum\<close>)
 
 lemma has_sum_mono:
   fixes f :: "'a\<Rightarrow>'b::{ordered_comm_monoid_add,linorder_topology}"
@@ -407,15 +412,20 @@
     by (meson assms(1) has_sum_def)
   hence *: "\<forall>\<^sub>F F in (finite_subsets_at_top A). dist (sum f F) x < \<epsilon>"
     using assms(2) by (rule tendstoD)
-  show ?thesis
-    by (smt (verit) * eventually_finite_subsets_at_top order_refl)
+  thus ?thesis
+    unfolding eventually_finite_subsets_at_top by fastforce
 qed
 
 lemma infsum_finite_approximation:
   fixes f :: "'a \<Rightarrow> 'b::{comm_monoid_add,metric_space}"
   assumes "f summable_on A" and "\<epsilon> > 0"
   shows "\<exists>F. finite F \<and> F \<subseteq> A \<and> dist (sum f F) (infsum f A) \<le> \<epsilon>"
-  by (metis assms(1) assms(2) finite_subsets_at_top_neq_bot infsum_def summable_on_def has_sum_finite_approximation has_sum_def tendsto_Lim)
+proof -
+  from assms have "has_sum f A (infsum f A)"
+    by (simp add: summable_iff_has_sum_infsum)
+  from this and \<open>\<epsilon> > 0\<close> show ?thesis
+    by (rule has_sum_finite_approximation)
+qed
 
 lemma abs_summable_summable:
   fixes f :: \<open>'a \<Rightarrow> 'b :: banach\<close>
@@ -435,7 +445,7 @@
     have ev_P: \<open>eventually P (finite_subsets_at_top A)\<close>
       using lim
       by (auto simp add: P_def[abs_def] \<open>0 < d\<close> eventually_conj_iff eventually_finite_subsets_at_top_weakI tendsto_iff)
-
+    
     moreover have \<open>dist (sum f F1) (sum f F2) < e\<close> if \<open>P F1\<close> and \<open>P F2\<close> for F1 F2
     proof -
       from ev_P
@@ -447,29 +457,26 @@
       have dist_F: \<open>dist (sum (\<lambda>x. norm (f x)) F) L < d\<close>
         by (metis F_def \<open>F \<subseteq> A\<close> P_def P_sup_F' \<open>finite F\<close> le_supE order_refl)
 
-      from dist_F have \<open>dist (sum (\<lambda>x. norm (f x)) F) (sum (\<lambda>x. norm (f x)) F2) < 2*d\<close>
-        by (smt (verit, ccfv_threshold) P_def dist_norm real_norm_def that(2))
-      then have \<open>norm (sum (\<lambda>x. norm (f x)) (F-F2)) < 2*d\<close>
-        unfolding dist_norm
-        by (metis F_def \<open>finite F\<close> sum_diff sup_commute sup_ge1)
-      then have \<open>norm (sum f (F-F2)) < 2*d\<close>
-        by (smt (verit, ccfv_threshold) real_norm_def sum_norm_le)
-      then have dist_F_F2: \<open>dist (sum f F) (sum f F2) < 2*d\<close>
-        by (metis F_def \<open>finite F\<close> dist_norm sum_diff sup_commute sup_ge1)
+      have dist_F_subset: \<open>dist (sum f F) (sum f F') < 2*d\<close> if F': \<open>F' \<subseteq> F\<close> \<open>P F'\<close> for F'
+      proof -
+        have \<open>dist (sum f F) (sum f F') = norm (sum f (F-F'))\<close>
+          unfolding dist_norm using \<open>finite F\<close> F' by (subst sum_diff) auto
+        also have \<open>\<dots> \<le> norm (\<Sum>x\<in>F-F'. norm (f x))\<close>
+          by (rule order.trans[OF sum_norm_le[OF order.refl]]) auto
+        also have \<open>\<dots> = dist (\<Sum>x\<in>F. norm (f x)) (\<Sum>x\<in>F'. norm (f x))\<close>
+          unfolding dist_norm using \<open>finite F\<close> F' by (subst sum_diff) auto
+        also have \<open>\<dots> < 2 * d\<close>
+          using dist_F F' unfolding P_def dist_norm real_norm_def by linarith
+        finally show \<open>dist (sum f F) (sum f F') < 2*d\<close> .
+      qed
 
-      from dist_F have \<open>dist (sum (\<lambda>x. norm (f x)) F) (sum (\<lambda>x. norm (f x)) F1) < 2*d\<close>
-        by (smt (verit, best) P_def dist_norm real_norm_def that(1))
-      then have \<open>norm (sum (\<lambda>x. norm (f x)) (F-F1)) < 2*d\<close>
-        unfolding dist_norm
-        by (metis F_def \<open>finite F\<close> inf_sup_ord(3) order_trans sum_diff sup_ge2)
-      then have \<open>norm (sum f (F-F1)) < 2*d\<close>
-        by (smt (verit, ccfv_threshold) real_norm_def sum_norm_le)
-      then have dist_F_F1: \<open>dist (sum f F) (sum f F1) < 2*d\<close>
-        by (metis F_def \<open>finite F\<close> dist_norm inf_sup_ord(3) le_supE sum_diff)
-
-      from dist_F_F2 dist_F_F1 show \<open>dist (sum f F1) (sum f F2) < e\<close>
-        unfolding d_def apply auto
-        by (meson dist_triangle_half_r less_divide_eq_numeral1(1))
+      have \<open>dist (sum f F1) (sum f F2) \<le> dist (sum f F) (sum f F1) + dist (sum f F) (sum f F2)\<close>
+        by (rule dist_triangle3)
+      also have \<open>\<dots> < 2 * d + 2 * d\<close>
+        by (intro add_strict_mono dist_F_subset that) (auto simp: F_def)
+      also have \<open>\<dots> \<le> e\<close>
+        by (auto simp: d_def)
+      finally show \<open>dist (sum f F1) (sum f F2) < e\<close> .
     qed
     then show ?thesis
       using ev_P by blast
@@ -583,11 +590,6 @@
     using False by auto
 qed
 
-lemma has_sum_infsum[simp]:
-  assumes \<open>f summable_on S\<close>
-  shows \<open>has_sum f S (infsum f S)\<close>
-  using assms by (auto simp: summable_on_def infsum_def has_sum_def tendsto_Lim)
-
 lemma infsum_tendsto:
   assumes \<open>f summable_on S\<close>
   shows \<open>((\<lambda>F. sum f F) \<longlongrightarrow> infsum f S) (finite_subsets_at_top S)\<close>
@@ -692,8 +694,13 @@
   assumes "f summable_on B"
   assumes disj: "A \<inter> B = {}"
   shows \<open>infsum f (A \<union> B) = infsum f A + infsum f B\<close>
-  by (smt (verit, ccfv_threshold) assms(1) assms(2) disj finite_subsets_at_top_neq_bot summable_on_def has_sum_Un_disjoint has_sum_def has_sum_infsum tendsto_Lim)
+  by (intro infsumI has_sum_Un_disjoint has_sum_infsum assms)  
 
+(* TODO move *)
+lemma (in uniform_space) cauchy_filter_complete_converges:
+  assumes "cauchy_filter F" "complete A" "F \<le> principal A" "F \<noteq> bot"
+  shows   "\<exists>c. F \<le> nhds c"
+  using assms unfolding complete_uniform by blast
 
 text \<open>The following lemma indeed needs a complete space (as formalized by the premise \<^term>\<open>complete UNIV\<close>).
   The following two counterexamples show this:
@@ -720,68 +727,71 @@
   assumes \<open>B \<subseteq> A\<close>
   shows \<open>f summable_on B\<close>
 proof -
+  let ?filter_fB = \<open>filtermap (sum f) (finite_subsets_at_top B)\<close>
   from \<open>f summable_on A\<close>
   obtain S where \<open>(sum f \<longlongrightarrow> S) (finite_subsets_at_top A)\<close> (is \<open>(sum f \<longlongrightarrow> S) ?filter_A\<close>)
     using summable_on_def has_sum_def by blast
   then have cauchy_fA: \<open>cauchy_filter (filtermap (sum f) (finite_subsets_at_top A))\<close> (is \<open>cauchy_filter ?filter_fA\<close>)
     by (auto intro!: nhds_imp_cauchy_filter simp: filterlim_def)
 
-  let ?filter_fB = \<open>filtermap (sum f) (finite_subsets_at_top B)\<close>
-
   have \<open>cauchy_filter (filtermap (sum f) (finite_subsets_at_top B))\<close>
   proof (unfold cauchy_filter_def, rule filter_leI)
     fix E :: \<open>('b\<times>'b) \<Rightarrow> bool\<close> assume \<open>eventually E uniformity\<close>
     then obtain E' where \<open>eventually E' uniformity\<close> and E'E'E: \<open>E' (x, y) \<longrightarrow> E' (y, z) \<longrightarrow> E (x, z)\<close> for x y z
       using uniformity_trans by blast
-    from plus_cont[simplified uniformly_continuous_on_uniformity filterlim_def le_filter_def, rule_format, 
-                   OF \<open>eventually E' uniformity\<close>]
     obtain D where \<open>eventually D uniformity\<close> and DE: \<open>D (x, y) \<Longrightarrow> E' (x+c, y+c)\<close> for x y c
-      apply atomize_elim
-      by (auto simp: case_prod_beta eventually_filtermap uniformity_prod_def 
-        eventually_prod_same uniformity_refl)
-    with cauchy_fA have \<open>eventually D (?filter_fA \<times>\<^sub>F ?filter_fA)\<close>
+      using plus_cont \<open>eventually E' uniformity\<close>
+      unfolding uniformly_continuous_on_uniformity filterlim_def le_filter_def uniformity_prod_def
+      by (auto simp: case_prod_beta eventually_filtermap eventually_prod_same uniformity_refl)
+    have DE': "E' (x, y)" if "D (x + c, y + c)" for x y c
+      using DE[of "x + c" "y + c" "-c"] that by simp
+
+    from \<open>eventually D uniformity\<close> and cauchy_fA have \<open>eventually D (?filter_fA \<times>\<^sub>F ?filter_fA)\<close>
       unfolding cauchy_filter_def le_filter_def by simp
-    then obtain P1 P2 where ev_P1: \<open>eventually (\<lambda>F. P1 (sum f F)) ?filter_A\<close> and ev_P2: \<open>eventually (\<lambda>F. P2 (sum f F)) ?filter_A\<close>
-      and P1P2E: \<open>P1 x \<Longrightarrow> P2 y \<Longrightarrow> D (x, y)\<close> for x y
+    then obtain P1 P2
+      where ev_P1: \<open>eventually (\<lambda>F. P1 (sum f F)) ?filter_A\<close> 
+        and ev_P2: \<open>eventually (\<lambda>F. P2 (sum f F)) ?filter_A\<close>
+        and P1P2E: \<open>P1 x \<Longrightarrow> P2 y \<Longrightarrow> D (x, y)\<close> for x y
       unfolding eventually_prod_filter eventually_filtermap
       by auto
-    from ev_P1 obtain F1 where \<open>finite F1\<close> and \<open>F1 \<subseteq> A\<close> and \<open>\<forall>F. F\<supseteq>F1 \<and> finite F \<and> F\<subseteq>A \<longrightarrow> P1 (sum f F)\<close>
+    from ev_P1 obtain F1 where F1: \<open>finite F1\<close> \<open>F1 \<subseteq> A\<close> \<open>\<And>F. F\<supseteq>F1 \<Longrightarrow> finite F \<Longrightarrow> F\<subseteq>A \<Longrightarrow> P1 (sum f F)\<close>
       by (metis eventually_finite_subsets_at_top)
-    from ev_P2 obtain F2 where \<open>finite F2\<close> and \<open>F2 \<subseteq> A\<close> and \<open>\<forall>F. F\<supseteq>F2 \<and> finite F \<and> F\<subseteq>A \<longrightarrow> P2 (sum f F)\<close>
+    from ev_P2 obtain F2 where F2: \<open>finite F2\<close> \<open>F2 \<subseteq> A\<close> \<open>\<And>F. F\<supseteq>F2 \<Longrightarrow> finite F \<Longrightarrow> F\<subseteq>A \<Longrightarrow> P2 (sum f F)\<close>
       by (metis eventually_finite_subsets_at_top)
     define F0 F0A F0B where \<open>F0 \<equiv> F1 \<union> F2\<close> and \<open>F0A \<equiv> F0 - B\<close> and \<open>F0B \<equiv> F0 \<inter> B\<close>
     have [simp]: \<open>finite F0\<close>  \<open>F0 \<subseteq> A\<close>
-       apply (simp add: F0_def \<open>finite F1\<close> \<open>finite F2\<close>)
-      by (simp add: F0_def \<open>F1 \<subseteq> A\<close> \<open>F2 \<subseteq> A\<close>)
-    have [simp]: \<open>finite F0A\<close>
-      by (simp add: F0A_def)
-    have \<open>\<forall>F1 F2. F1\<supseteq>F0 \<and> F2\<supseteq>F0 \<and> finite F1 \<and> finite F2 \<and> F1\<subseteq>A \<and> F2\<subseteq>A \<longrightarrow> D (sum f F1, sum f F2)\<close>
-      by (simp add: F0_def P1P2E \<open>\<forall>F. F1 \<subseteq> F \<and> finite F \<and> F \<subseteq> A \<longrightarrow> P1 (sum f F)\<close> \<open>\<forall>F. F2 \<subseteq> F \<and> finite F \<and> F \<subseteq> A \<longrightarrow> P2 (sum f F)\<close>)
-    then have \<open>\<forall>F1 F2. F1\<supseteq>F0B \<and> F2\<supseteq>F0B \<and> finite F1 \<and> finite F2 \<and> F1\<subseteq>B \<and> F2\<subseteq>B \<longrightarrow> 
-              D (sum f (F1 \<union> F0A), sum f (F2 \<union> F0A))\<close>
-      by (smt (verit) Diff_Diff_Int Diff_subset_conv F0A_def F0B_def \<open>F0 \<subseteq> A\<close> \<open>finite F0A\<close> assms(4) finite_UnI sup.absorb_iff1 sup.mono sup_commute)
-    then have \<open>\<forall>F1 F2. F1\<supseteq>F0B \<and> F2\<supseteq>F0B \<and> finite F1 \<and> finite F2 \<and> F1\<subseteq>B \<and> F2\<subseteq>B \<longrightarrow> 
-              D (sum f F1 + sum f F0A, sum f F2 + sum f F0A)\<close>
-      by (metis Diff_disjoint F0A_def \<open>finite F0A\<close> inf.absorb_iff1 inf_assoc inf_bot_right sum.union_disjoint)
-    then have *: \<open>\<forall>F1 F2. F1\<supseteq>F0B \<and> F2\<supseteq>F0B \<and> finite F1 \<and> finite F2 \<and> F1\<subseteq>B \<and> F2\<subseteq>B \<longrightarrow> 
-              E' (sum f F1, sum f F2)\<close>
-      using DE[where c=\<open>- sum f F0A\<close>]
-      apply auto by (metis add.commute add_diff_cancel_left')
+      using \<open>F1 \<subseteq> A\<close> \<open>F2 \<subseteq> A\<close> \<open>finite F1\<close> \<open>finite F2\<close> unfolding F0_def by blast+
+ 
+    have *: "E' (sum f F1', sum f F2')"
+      if "F1'\<supseteq>F0B" "F2'\<supseteq>F0B" "finite F1'" "finite F2'" "F1'\<subseteq>B" "F2'\<subseteq>B" for F1' F2'
+    proof (intro DE'[where c = "sum f F0A"] P1P2E)
+      have "P1 (sum f (F1' \<union> F0A))"
+        using that assms F1(1,2) F2(1,2) by (intro F1) (auto simp: F0A_def F0B_def F0_def)
+      thus "P1 (sum f F1' + sum f F0A)"
+        by (subst (asm) sum.union_disjoint) (use that in \<open>auto simp: F0A_def\<close>)
+    next
+      have "P2 (sum f (F2' \<union> F0A))"
+        using that assms F1(1,2) F2(1,2) by (intro F2) (auto simp: F0A_def F0B_def F0_def)
+      thus "P2 (sum f F2' + sum f F0A)"
+        by (subst (asm) sum.union_disjoint) (use that in \<open>auto simp: F0A_def\<close>)      
+    qed
+
     show \<open>eventually E (?filter_fB \<times>\<^sub>F ?filter_fB)\<close>
-      apply (subst eventually_prod_filter)
-      apply (rule exI[of _ \<open>\<lambda>x. E' (x, sum f F0B)\<close>])
-      apply (rule exI[of _ \<open>\<lambda>x. E' (sum f F0B, x)\<close>])
-      apply (auto simp: eventually_filtermap)
-      using * apply (metis (no_types, lifting) F0B_def Int_lower2 \<open>finite F0\<close> eventually_finite_subsets_at_top finite_Int order_refl)
-      using * apply (metis (no_types, lifting) F0B_def Int_lower2 \<open>finite F0\<close> eventually_finite_subsets_at_top finite_Int order_refl)
-      using E'E'E by auto
+      unfolding eventually_prod_filter
+    proof (safe intro!: exI)
+      show "eventually (\<lambda>x. E' (x, sum f F0B)) (filtermap (sum f) (finite_subsets_at_top B))"
+       and "eventually (\<lambda>x. E' (sum f F0B, x)) (filtermap (sum f) (finite_subsets_at_top B))"
+        unfolding eventually_filtermap eventually_finite_subsets_at_top
+        by (rule exI[of _ F0B]; use * in \<open>force simp: F0B_def\<close>)+
+    next
+      show "E (x, y)" if "E' (x, sum f F0B)" and "E' (sum f F0B, y)" for x y
+        using E'E'E that by blast
+    qed
   qed
 
-  then obtain x where \<open>filtermap (sum f) (finite_subsets_at_top B) \<le> nhds x\<close>
-    apply atomize_elim
-    apply (rule complete_uniform[where S=UNIV, THEN iffD1, rule_format, simplified])
-    using assms by (auto simp add: filtermap_bot_iff)
-
+  then obtain x where \<open>?filter_fB \<le> nhds x\<close>
+    using cauchy_filter_complete_converges[of ?filter_fB UNIV] \<open>complete (UNIV :: _)\<close>
+    by (auto simp: filtermap_bot_iff)
   then have \<open>(sum f \<longlongrightarrow> x) (finite_subsets_at_top B)\<close>
     by (auto simp: filterlim_def)
   then show ?thesis
@@ -795,9 +805,8 @@
   assumes \<open>f summable_on A\<close>
   assumes \<open>B \<subseteq> A\<close>
   shows \<open>f summable_on B\<close>
-  apply (rule summable_on_subset)
-  using assms apply auto
-  by (metis Cauchy_convergent UNIV_I complete_def convergent_def)
+  by (rule summable_on_subset[OF _ _ assms])
+     (auto simp: complete_def convergent_def dest!: Cauchy_convergent)
 
 lemma has_sum_empty[simp]: \<open>has_sum f {} 0\<close>
   by (meson ex_in_conv has_sum_0)
@@ -847,9 +856,8 @@
   assumes conv: \<open>\<And>a. a \<in> A \<Longrightarrow> f summable_on (B a)\<close>
   assumes disj: \<open>\<And>a a'. a\<in>A \<Longrightarrow> a'\<in>A \<Longrightarrow> a\<noteq>a' \<Longrightarrow> B a \<inter> B a' = {}\<close>
   shows \<open>sum (\<lambda>a. infsum f (B a)) A = infsum f (\<Union>a\<in>A. B a)\<close>
-  using sum_has_sum[of A f B \<open>\<lambda>a. infsum f (B a)\<close>]
-  using assms apply auto
-  by (metis finite_subsets_at_top_neq_bot infsum_def summable_on_def has_sum_def tendsto_Lim)
+  by (rule sym, rule infsumI)
+     (use sum_has_sum[of A f B \<open>\<lambda>a. infsum f (B a)\<close>] assms in auto)
 
 text \<open>The lemmas \<open>infsum_comm_additive_general\<close> and \<open>infsum_comm_additive\<close> (and variants) below both state that the infinite sum commutes with
   a continuous additive function. \<open>infsum_comm_additive_general\<close> is stated more for more general type classes
@@ -897,7 +905,8 @@
   assumes \<open>isCont f (infsum g S)\<close>
   assumes \<open>g summable_on S\<close>
   shows \<open>infsum (f o g) S = f (infsum g S)\<close>
-  by (smt (verit) assms(2) assms(3) continuous_within f_sum finite_subsets_at_top_neq_bot summable_on_comm_additive_general has_sum_comm_additive_general has_sum_def has_sum_infsum tendsto_Lim)
+  using assms
+  by (intro infsumI has_sum_comm_additive_general has_sum_infsum) (auto simp: isCont_def)
 
 lemma has_sum_comm_additive: 
   fixes f :: \<open>'b :: {ab_group_add,topological_space} \<Rightarrow> 'c :: {ab_group_add,topological_space}\<close>
@@ -906,7 +915,8 @@
     \<comment> \<open>For \<^class>\<open>t2_space\<close>, this is equivalent to \<open>isCont f x\<close> by @{thm [source] isCont_def}.\<close>
   assumes infsum: \<open>has_sum g S x\<close>
   shows \<open>has_sum (f o g) S (f x)\<close>
-  by (smt (verit, best) additive.sum assms(1) assms(2) comp_eq_dest_lhs continuous_within finite_subsets_at_top_neq_bot infsum summable_on_def has_sum_comm_additive_general has_sum_def has_sum_infsum sum.cong tendsto_Lim) 
+  using assms
+  by (intro has_sum_comm_additive_general has_sum_infsum) (auto simp: isCont_def additive.sum) 
 
 lemma summable_on_comm_additive:
   fixes f :: \<open>'b :: {ab_group_add,t2_space} \<Rightarrow> 'c :: {ab_group_add,topological_space}\<close>
@@ -924,8 +934,7 @@
   shows \<open>infsum (f o g) S = f (infsum g S)\<close>
   by (rule infsum_comm_additive_general; auto simp: assms additive.sum)
 
-
-lemma pos_has_sum:
+lemma nonneg_bdd_above_has_sum:
   fixes f :: \<open>'a \<Rightarrow> 'b :: {conditionally_complete_linorder, ordered_comm_monoid_add, linorder_topology}\<close>
   assumes \<open>\<And>x. x\<in>A \<Longrightarrow> f x \<ge> 0\<close>
   assumes \<open>bdd_above (sum f ` {F. F\<subseteq>A \<and> finite F})\<close>
@@ -938,14 +947,24 @@
       by (metis (mono_tags, lifting) Collect_cong Collect_empty_eq assms(2) empty_subsetI finite.emptyI less_cSUP_iff mem_Collect_eq)
     show \<open>\<forall>\<^sub>F x in finite_subsets_at_top A. a < sum f x\<close>
       unfolding eventually_finite_subsets_at_top
-      apply (rule exI[of _ F])
-      using \<open>a < sum f F\<close> and \<open>finite F\<close> and \<open>F \<subseteq> A\<close>
-      apply auto
-      by (smt (verit, best) Diff_iff assms(1) less_le_trans subset_iff sum_mono2)
+    proof (rule exI[of _ F], safe)
+      fix Y assume Y: "finite Y" "F \<subseteq> Y" "Y \<subseteq> A"
+      have "a < sum f F"
+        by fact
+      also have "\<dots> \<le> sum f Y"
+        using assms Y by (intro sum_mono2) auto
+      finally show "a < sum f Y" .
+    qed (use \<open>finite F\<close> \<open>F \<subseteq> A\<close> in auto)
   next
-    fix a assume \<open>(SUP F\<in>{F. finite F \<and> F\<subseteq>A}. sum f F) < a\<close>
-    then have \<open>sum f F < a\<close> if \<open>F\<subseteq>A\<close> and \<open>finite F\<close> for F
-      by (smt (verit, best) Collect_cong antisym_conv assms(2) cSUP_upper dual_order.trans le_less_linear less_le mem_Collect_eq that(1) that(2))
+    fix a assume *: \<open>(SUP F\<in>{F. finite F \<and> F\<subseteq>A}. sum f F) < a\<close>
+    have \<open>sum f F < a\<close> if \<open>F\<subseteq>A\<close> and \<open>finite F\<close> for F
+    proof -
+      have "sum f F \<le> (SUP F\<in>{F. finite F \<and> F\<subseteq>A}. sum f F)"
+        by (rule cSUP_upper) (use that assms(2) in \<open>auto simp: conj_commute\<close>)
+      also have "\<dots> < a"
+        by fact
+      finally show ?thesis .
+    qed
     then show \<open>\<forall>\<^sub>F x in finite_subsets_at_top A. sum f x < a\<close>
       by (rule eventually_finite_subsets_at_top_weakI)
   qed
@@ -953,38 +972,37 @@
     using has_sum_def by blast
 qed
 
-lemma pos_summable_on:
+lemma nonneg_bdd_above_summable_on:
   fixes f :: \<open>'a \<Rightarrow> 'b :: {conditionally_complete_linorder, ordered_comm_monoid_add, linorder_topology}\<close>
   assumes \<open>\<And>x. x\<in>A \<Longrightarrow> f x \<ge> 0\<close>
   assumes \<open>bdd_above (sum f ` {F. F\<subseteq>A \<and> finite F})\<close>
   shows \<open>f summable_on A\<close>
-  using assms(1) assms(2) summable_on_def pos_has_sum by blast
+  using assms(1) assms(2) summable_on_def nonneg_bdd_above_has_sum by blast
 
-
-lemma pos_infsum:
+lemma nonneg_bdd_above_infsum:
   fixes f :: \<open>'a \<Rightarrow> 'b :: {conditionally_complete_linorder, ordered_comm_monoid_add, linorder_topology}\<close>
   assumes \<open>\<And>x. x\<in>A \<Longrightarrow> f x \<ge> 0\<close>
   assumes \<open>bdd_above (sum f ` {F. F\<subseteq>A \<and> finite F})\<close>
   shows \<open>infsum f A = (SUP F\<in>{F. finite F \<and> F\<subseteq>A}. sum f F)\<close>
-  using assms by (auto intro!: infsumI pos_has_sum)
+  using assms by (auto intro!: infsumI nonneg_bdd_above_has_sum)
 
-lemma pos_has_sum_complete:
+lemma nonneg_has_sum_complete:
   fixes f :: \<open>'a \<Rightarrow> 'b :: {complete_linorder, ordered_comm_monoid_add, linorder_topology}\<close>
   assumes \<open>\<And>x. x\<in>A \<Longrightarrow> f x \<ge> 0\<close>
   shows \<open>has_sum f A (SUP F\<in>{F. finite F \<and> F\<subseteq>A}. sum f F)\<close>
-  using assms pos_has_sum by blast
+  using assms nonneg_bdd_above_has_sum by blast
 
-lemma pos_summable_on_complete:
+lemma nonneg_summable_on_complete:
   fixes f :: \<open>'a \<Rightarrow> 'b :: {complete_linorder, ordered_comm_monoid_add, linorder_topology}\<close>
   assumes \<open>\<And>x. x\<in>A \<Longrightarrow> f x \<ge> 0\<close>
   shows \<open>f summable_on A\<close>
-  using assms pos_summable_on by blast
+  using assms nonneg_bdd_above_summable_on by blast
 
-lemma pos_infsum_complete:
+lemma nonneg_infsum_complete:
   fixes f :: \<open>'a \<Rightarrow> 'b :: {complete_linorder, ordered_comm_monoid_add, linorder_topology}\<close>
   assumes \<open>\<And>x. x\<in>A \<Longrightarrow> f x \<ge> 0\<close>
   shows \<open>infsum f A = (SUP F\<in>{F. finite F \<and> F\<subseteq>A}. sum f F)\<close>
-  using assms pos_infsum by blast
+  using assms nonneg_bdd_above_infsum by blast
 
 lemma has_sum_nonneg:
   fixes f :: "'a \<Rightarrow> 'b::{ordered_comm_monoid_add,linorder_topology}"
@@ -995,10 +1013,51 @@
 
 lemma infsum_nonneg:
   fixes f :: "'a \<Rightarrow> 'b::{ordered_comm_monoid_add,linorder_topology}"
-  assumes "f summable_on M"
-    and "\<And>x. x \<in> M \<Longrightarrow> 0 \<le> f x"
+  assumes "\<And>x. x \<in> M \<Longrightarrow> 0 \<le> f x"
   shows "infsum f M \<ge> 0" (is "?lhs \<ge> _")
-  by (metis assms infsum_0_simp summable_on_0_simp infsum_mono)
+  apply (cases \<open>f summable_on M\<close>)
+   apply (metis assms infsum_0_simp summable_on_0_simp infsum_mono)
+  using assms by (auto simp add: infsum_not_exists)
+
+lemma has_sum_mono2:
+  fixes f :: "'a \<Rightarrow> 'b::{topological_ab_group_add, ordered_comm_monoid_add,linorder_topology}"
+  assumes "has_sum f A S" "has_sum f B S'" "A \<subseteq> B"
+  assumes "\<And>x. x \<in> B - A \<Longrightarrow> f x \<ge> 0"
+  shows   "S \<le> S'"
+proof -
+  have "has_sum f (B - A) (S' - S)"
+    by (rule has_sum_Diff) fact+
+  hence "S' - S \<ge> 0"
+    by (rule has_sum_nonneg) (use assms(4) in auto)
+  thus ?thesis
+    by (metis add_0 add_mono_thms_linordered_semiring(3) diff_add_cancel)
+qed
+
+lemma infsum_mono2:
+  fixes f :: "'a \<Rightarrow> 'b::{topological_ab_group_add, ordered_comm_monoid_add,linorder_topology}"
+  assumes "f summable_on A" "f summable_on B" "A \<subseteq> B"
+  assumes "\<And>x. x \<in> B - A \<Longrightarrow> f x \<ge> 0"
+  shows   "infsum f A \<le> infsum f B"
+  by (rule has_sum_mono2[OF has_sum_infsum has_sum_infsum]) (use assms in auto)
+
+lemma finite_sum_le_has_sum:
+  fixes f :: "'a \<Rightarrow> 'b::{topological_ab_group_add, ordered_comm_monoid_add,linorder_topology}"
+  assumes "has_sum f A S" "finite B" "B \<subseteq> A"
+  assumes "\<And>x. x \<in> A - B \<Longrightarrow> f x \<ge> 0"
+  shows   "sum f B \<le> S"
+proof (rule has_sum_mono2)
+  show "has_sum f A S"
+    by fact
+  show "has_sum f B (sum f B)"
+    by (rule has_sum_finite) fact+
+qed (use assms in auto)
+
+lemma finite_sum_le_infsum:
+  fixes f :: "'a \<Rightarrow> 'b::{topological_ab_group_add, ordered_comm_monoid_add,linorder_topology}"
+  assumes "f summable_on A" "finite B" "B \<subseteq> A"
+  assumes "\<And>x. x \<in> A - B \<Longrightarrow> f x \<ge> 0"
+  shows   "sum f B \<le> infsum f A"
+  by (rule finite_sum_le_has_sum[OF has_sum_infsum]) (use assms in auto)
 
 lemma has_sum_reindex:
   assumes \<open>inj_on h A\<close>
@@ -1016,11 +1075,9 @@
     using assms subset_inj_on by blast
   also have \<open>\<dots> \<longleftrightarrow> has_sum (g \<circ> h) A x\<close>
     by (simp add: has_sum_def)
-  finally show ?thesis
-    by -
+  finally show ?thesis .
 qed
 
-
 lemma summable_on_reindex:
   assumes \<open>inj_on h A\<close>
   shows \<open>g summable_on (h ` A) \<longleftrightarrow> (g \<circ> h) summable_on A\<close>
@@ -1029,8 +1086,32 @@
 lemma infsum_reindex:
   assumes \<open>inj_on h A\<close>
   shows \<open>infsum g (h ` A) = infsum (g \<circ> h) A\<close>
-  by (metis (no_types, opaque_lifting) assms finite_subsets_at_top_neq_bot infsum_def summable_on_reindex has_sum_def has_sum_infsum has_sum_reindex tendsto_Lim)
+  by (metis (no_types, opaque_lifting) assms finite_subsets_at_top_neq_bot infsum_def 
+        summable_on_reindex has_sum_def has_sum_infsum has_sum_reindex tendsto_Lim)
 
+lemma summable_on_reindex_bij_betw:
+  assumes "bij_betw g A B"
+  shows   "(\<lambda>x. f (g x)) summable_on A \<longleftrightarrow> f summable_on B"
+proof -
+  thm summable_on_reindex
+  have \<open>(\<lambda>x. f (g x)) summable_on A \<longleftrightarrow> f summable_on g ` A\<close>
+    apply (rule summable_on_reindex[symmetric, unfolded o_def])
+    using assms bij_betw_imp_inj_on by blast
+  also have \<open>\<dots> \<longleftrightarrow> f summable_on B\<close>
+    using assms bij_betw_imp_surj_on by blast
+  finally show ?thesis .
+qed
+
+lemma infsum_reindex_bij_betw:
+  assumes "bij_betw g A B"
+  shows   "infsum (\<lambda>x. f (g x)) A = infsum f B"
+proof -
+  have \<open>infsum (\<lambda>x. f (g x)) A = infsum f (g ` A)\<close>
+    by (metis (mono_tags, lifting) assms bij_betw_imp_inj_on infsum_cong infsum_reindex o_def)
+  also have \<open>\<dots> = infsum f B\<close>
+    using assms bij_betw_imp_surj_on by blast
+  finally show ?thesis .
+qed
 
 lemma sum_uniformity:
   assumes plus_cont: \<open>uniformly_continuous_on UNIV (\<lambda>(x::'b::{uniform_space,comm_monoid_add},y). x+y)\<close>
@@ -1152,7 +1233,7 @@
           from sum_b[unfolded tendsto_iff_uniformity, rule_format, OF _ D'_uni[THEN uniformity_sym]]
           obtain Ha0 where \<open>finite (Ha0 a)\<close> and \<open>Ha0 a \<subseteq> B a\<close>
             and \<open>Ha0 a \<subseteq> L \<Longrightarrow> L \<subseteq> B a \<Longrightarrow> finite L \<Longrightarrow> D' (b a, sum (\<lambda>b. f (a,b)) L)\<close> if \<open>a \<in> A\<close> for a L
-            unfolding FB_def eventually_finite_subsets_at_top apply auto by metis
+            unfolding FB_def eventually_finite_subsets_at_top unfolding prod.case by metis
           moreover define Ha where \<open>Ha a = Ha0 a \<union> Ga a\<close> for a
           ultimately show ?thesis
             using that[where Ha=Ha]
@@ -1180,19 +1261,23 @@
         moreover have \<open>Sigma M Ha \<subseteq> Sigma M B\<close>
           using Ha_B \<open>M \<subseteq> A\<close> by auto
         ultimately show ?thesis
-          apply (simp add: FMB_def eventually_finite_subsets_at_top)
-          by (metis Ha_fin finite_SigmaI subsetD that(2) that(3))
+          unfolding FMB_def eventually_finite_subsets_at_top
+          by (intro exI[of _ "Sigma M Ha"])
+             (use Ha_fin that(2,3) in \<open>fastforce intro!: finite_SigmaI\<close>)
       qed
       moreover have \<open>eventually (\<lambda>H. D (\<Sum>(a,b)\<in>H. f (a,b), a)) FMB\<close>
         unfolding FMB_def eventually_finite_subsets_at_top
-        apply (rule exI[of _ G])
-        using \<open>G \<subseteq> Sigma A B\<close> \<open>finite G\<close> that G_sum apply auto
-        by (meson Sigma_mono dual_order.refl order_trans)
+      proof (rule exI[of _ G], safe)
+        fix Y assume Y: "finite Y" "G \<subseteq> Y" "Y \<subseteq> Sigma M B"
+        have "Y \<subseteq> Sigma A B"
+          using Y \<open>M \<subseteq> A\<close> by blast
+        thus "D (\<Sum>(a,b)\<in>Y. f (a, b), a)"
+          using G_sum[of Y] Y by auto
+      qed (use \<open>finite G\<close> \<open>G \<subseteq> Sigma A B\<close> that in auto)
       ultimately have \<open>\<forall>\<^sub>F x in FMB. E (sum b M, a)\<close>
-        by (smt (verit, best) DDE' eventually_elim2)
+        by eventually_elim (use DDE' in auto)
       then show \<open>E (sum b M, a)\<close>
-        apply (rule eventually_const[THEN iffD1, rotated])
-        using FMB_def by force
+        by (rule eventually_const[THEN iffD1, rotated]) (force simp: FMB_def)
     qed
     then show \<open>\<forall>\<^sub>F x in FA. E (sum b x, a)\<close>
       using \<open>finite (fst ` G)\<close> and \<open>fst ` G \<subseteq> A\<close>
@@ -1277,7 +1362,8 @@
     and f :: \<open>'a \<times> 'b \<Rightarrow> 'c::banach\<close>
   assumes [simp]: "f summable_on (Sigma A B)"
   shows \<open>infsum (\<lambda>x. infsum (\<lambda>y. f (x,y)) (B x)) A = infsum f (Sigma A B)\<close>
-  by (smt (verit, best) SigmaE assms infsum_Sigma'_banach infsum_cong summable_on_cong old.prod.case)
+  using assms
+  by (subst infsum_Sigma'_banach) auto
 
 lemma infsum_swap:
   fixes A :: "'a set" and B :: "'b set"
@@ -1302,8 +1388,7 @@
   also have \<open>\<dots> = infsum (\<lambda>y. infsum (\<lambda>x. f x y) A) B\<close>
     apply (subst infsum_Sigma)
     using assms by auto
-  finally show ?thesis
-    by -
+  finally show ?thesis .
 qed
 
 lemma infsum_swap_banach:
@@ -1326,11 +1411,10 @@
   also have \<open>\<dots> = infsum (\<lambda>y. infsum (\<lambda>x. f x y) A) B\<close>
     apply (subst infsum_Sigma'_banach)
     using assms by auto
-  finally show ?thesis
-    by -
+  finally show ?thesis .
 qed
 
-lemma infsum_0D:
+lemma nonneg_infsum_le_0D:
   fixes f :: "'a \<Rightarrow> 'b::{topological_ab_group_add,ordered_ab_group_add,linorder_topology}"
   assumes "infsum f A \<le> 0"
     and abs_sum: "f summable_on A"
@@ -1340,34 +1424,31 @@
 proof (rule ccontr)
   assume \<open>f x \<noteq> 0\<close>
   have ex: \<open>f summable_on (A-{x})\<close>
-    apply (rule summable_on_cofin_subset)
-    using assms by auto
-  then have pos: \<open>infsum f (A - {x}) \<ge> 0\<close>
-    apply (rule infsum_nonneg)
-    using nneg by auto
+    by (rule summable_on_cofin_subset) (use assms in auto)
+  have pos: \<open>infsum f (A - {x}) \<ge> 0\<close>
+    by (rule infsum_nonneg) (use nneg in auto)
 
   have [trans]: \<open>x \<ge> y \<Longrightarrow> y > z \<Longrightarrow> x > z\<close> for x y z :: 'b by auto
 
   have \<open>infsum f A = infsum f (A-{x}) + infsum f {x}\<close>
-    apply (subst infsum_Un_disjoint[symmetric])
-    using assms ex apply auto by (metis insert_absorb) 
+    by (subst infsum_Un_disjoint[symmetric]) (use assms ex in \<open>auto simp: insert_absorb\<close>)
   also have \<open>\<dots> \<ge> infsum f {x}\<close> (is \<open>_ \<ge> \<dots>\<close>)
-    using pos apply (rule add_increasing) by simp
+    using pos by (rule add_increasing) simp
   also have \<open>\<dots> = f x\<close> (is \<open>_ = \<dots>\<close>)
-    apply (subst infsum_finite) by auto
+    by (subst infsum_finite) auto
   also have \<open>\<dots> > 0\<close>
     using \<open>f x \<noteq> 0\<close> assms(4) nneg by fastforce
   finally show False
     using assms by auto
 qed
 
-lemma has_sum_0D:
+lemma nonneg_has_sum_le_0D:
   fixes f :: "'a \<Rightarrow> 'b::{topological_ab_group_add,ordered_ab_group_add,linorder_topology}"
   assumes "has_sum f A a" \<open>a \<le> 0\<close>
     and nneg: "\<And>x. x \<in> A \<Longrightarrow> f x \<ge> 0"
     and "x \<in> A"
   shows "f x = 0"
-  by (metis assms(1) assms(2) assms(4) infsumI infsum_0D summable_on_def nneg)
+  by (metis assms(1) assms(2) assms(4) infsumI nonneg_infsum_le_0D summable_on_def nneg)
 
 lemma has_sum_cmult_left:
   fixes f :: "'a \<Rightarrow> 'b :: {topological_semigroup_mult, semiring_0}"
@@ -1563,8 +1644,7 @@
     also have \<open>\<dots> \<le> infsum (\<lambda>_. 1::'a) A\<close>
       apply (rule infsum_mono_neutral)
       using \<open>finite F\<close> \<open>F \<subseteq> A\<close> by auto
-    finally show ?thesis
-      by -
+    finally show ?thesis .
   qed
   then show False
     by (meson linordered_field_no_ub not_less)
@@ -1596,13 +1676,548 @@
   shows \<open>infsum (\<lambda>x. - f x) A = - infsum f A\<close>
   by (metis (full_types) add.inverse_inverse add.inverse_neutral infsumI infsum_def has_sum_infsum has_sum_uminus)
 
+lemma has_sum_le_finite_sums:
+  fixes a :: \<open>'a::{comm_monoid_add,topological_space,linorder_topology}\<close>
+  assumes \<open>has_sum f A a\<close>
+  assumes \<open>\<And>F. finite F \<Longrightarrow> F \<subseteq> A \<Longrightarrow> sum f F \<le> b\<close>
+  shows \<open>a \<le> b\<close>
+proof -
+  from assms(1)
+  have 1: \<open>(sum f \<longlongrightarrow> a) (finite_subsets_at_top A)\<close>
+    unfolding has_sum_def .
+  from assms(2)
+  have 2: \<open>\<forall>\<^sub>F F in finite_subsets_at_top A. sum f F \<le> b\<close>
+    by (rule_tac eventually_finite_subsets_at_top_weakI)
+  show \<open>a \<le> b\<close>
+    using _ _ 1 2
+    apply (rule tendsto_le[where f=\<open>\<lambda>_. b\<close>])
+    by auto
+qed
+
+lemma infsum_le_finite_sums:
+  fixes b :: \<open>'a::{comm_monoid_add,topological_space,linorder_topology}\<close>
+  assumes \<open>f summable_on A\<close>
+  assumes \<open>\<And>F. finite F \<Longrightarrow> F \<subseteq> A \<Longrightarrow> sum f F \<le> b\<close>
+  shows \<open>infsum f A \<le> b\<close>
+  by (meson assms(1) assms(2) has_sum_infsum has_sum_le_finite_sums)
+
+
+lemma summable_on_scaleR_left [intro]:
+  fixes c :: \<open>'a :: real_normed_vector\<close>
+  assumes "c \<noteq> 0 \<Longrightarrow> f summable_on A"
+  shows   "(\<lambda>x. f x *\<^sub>R c) summable_on A"
+  apply (cases \<open>c \<noteq> 0\<close>)
+   apply (subst asm_rl[of \<open>(\<lambda>x. f x *\<^sub>R c) = (\<lambda>y. y *\<^sub>R c) o f\<close>], simp add: o_def)
+   apply (rule summable_on_comm_additive)
+  using assms by (auto simp add: scaleR_left.additive_axioms)
+
+
+lemma summable_on_scaleR_right [intro]:
+  fixes f :: \<open>'a \<Rightarrow> 'b :: real_normed_vector\<close>
+  assumes "c \<noteq> 0 \<Longrightarrow> f summable_on A"
+  shows   "(\<lambda>x. c *\<^sub>R f x) summable_on A"
+  apply (cases \<open>c \<noteq> 0\<close>)
+   apply (subst asm_rl[of \<open>(\<lambda>x. c *\<^sub>R f x) = (\<lambda>y. c *\<^sub>R y) o f\<close>], simp add: o_def)
+   apply (rule summable_on_comm_additive)
+  using assms by (auto simp add: scaleR_right.additive_axioms)
+
+lemma infsum_scaleR_left:
+  fixes c :: \<open>'a :: real_normed_vector\<close>
+  assumes "c \<noteq> 0 \<Longrightarrow> f summable_on A"
+  shows   "infsum (\<lambda>x. f x *\<^sub>R c) A = infsum f A *\<^sub>R c"
+  apply (cases \<open>c \<noteq> 0\<close>)
+   apply (subst asm_rl[of \<open>(\<lambda>x. f x *\<^sub>R c) = (\<lambda>y. y *\<^sub>R c) o f\<close>], simp add: o_def)
+   apply (rule infsum_comm_additive)
+  using assms by (auto simp add: scaleR_left.additive_axioms)
+
+lemma infsum_scaleR_right:
+  fixes f :: \<open>'a \<Rightarrow> 'b :: real_normed_vector\<close>
+  shows   "infsum (\<lambda>x. c *\<^sub>R f x) A = c *\<^sub>R infsum f A"
+proof -
+  consider (summable) \<open>f summable_on A\<close> | (c0) \<open>c = 0\<close> | (not_summable) \<open>\<not> f summable_on A\<close> \<open>c \<noteq> 0\<close>
+    by auto
+  then show ?thesis
+  proof cases
+    case summable
+    then show ?thesis
+      apply (subst asm_rl[of \<open>(\<lambda>x. c *\<^sub>R f x) = (\<lambda>y. c *\<^sub>R y) o f\<close>], simp add: o_def)
+      apply (rule infsum_comm_additive)
+      using summable by (auto simp add: scaleR_right.additive_axioms)
+  next
+    case c0
+    then show ?thesis by auto
+  next
+    case not_summable
+    have \<open>\<not> (\<lambda>x. c *\<^sub>R f x) summable_on A\<close>
+    proof (rule notI)
+      assume \<open>(\<lambda>x. c *\<^sub>R f x) summable_on A\<close>
+      then have \<open>(\<lambda>x. inverse c *\<^sub>R c *\<^sub>R f x) summable_on A\<close>
+        using summable_on_scaleR_right by blast
+      then have \<open>f summable_on A\<close>
+        using not_summable by auto
+      with not_summable show False
+        by simp
+    qed
+    then show ?thesis
+      by (simp add: infsum_not_exists not_summable(1)) 
+  qed
+qed
+
+
+lemma infsum_Un_Int:
+  fixes f :: "'a \<Rightarrow> 'b::{topological_ab_group_add, t2_space}"
+  assumes [simp]: "f summable_on A - B" "f summable_on B - A" \<open>f summable_on A \<inter> B\<close>
+  shows   "infsum f (A \<union> B) = infsum f A + infsum f B - infsum f (A \<inter> B)"
+proof -
+  have [simp]: \<open>f summable_on A\<close>
+    apply (subst asm_rl[of \<open>A = (A-B) \<union> (A\<inter>B)\<close>]) apply auto[1]
+    apply (rule summable_on_Un_disjoint)
+    by auto
+  have \<open>infsum f (A \<union> B) = infsum f A + infsum f (B - A)\<close>
+    apply (subst infsum_Un_disjoint[symmetric])
+    by auto
+  moreover have \<open>infsum f (B - A \<union> A \<inter> B) = infsum f (B - A) + infsum f (A \<inter> B)\<close>
+    by (rule infsum_Un_disjoint) auto
+  moreover have "B - A \<union> A \<inter> B = B"
+    by blast
+  ultimately show ?thesis
+    by auto
+qed
+
+lemma inj_combinator':
+  assumes "x \<notin> F"
+  shows \<open>inj_on (\<lambda>(g, y). g(x := y)) (Pi\<^sub>E F B \<times> B x)\<close>
+proof -
+  have "inj_on ((\<lambda>(y, g). g(x := y)) \<circ> prod.swap) (Pi\<^sub>E F B \<times> B x)"
+    using inj_combinator[of x F B] assms by (intro comp_inj_on) (auto simp: product_swap)
+  thus ?thesis
+    by (simp add: o_def)
+qed
+
+lemma infsum_prod_PiE:
+  \<comment> \<open>See also \<open>infsum_prod_PiE_abs\<close> below with incomparable premises.\<close>
+  fixes f :: "'a \<Rightarrow> 'b \<Rightarrow> 'c :: {comm_monoid_mult, topological_semigroup_mult, division_ring, banach}"
+  assumes finite: "finite A"
+  assumes "\<And>x. x \<in> A \<Longrightarrow> f x summable_on B x"
+  assumes "(\<lambda>g. \<Prod>x\<in>A. f x (g x)) summable_on (PiE A B)"
+  shows   "infsum (\<lambda>g. \<Prod>x\<in>A. f x (g x)) (PiE A B) = (\<Prod>x\<in>A. infsum (f x) (B x))"
+proof (use finite assms(2-) in induction)
+  case empty
+  then show ?case 
+    by auto
+next
+  case (insert x F)
+  have pi: \<open>Pi\<^sub>E (insert x F) B = (\<lambda>(g,y). g(x:=y)) ` (Pi\<^sub>E F B \<times> B x)\<close>
+    unfolding PiE_insert_eq 
+    by (subst swap_product [symmetric]) (simp add: image_image case_prod_unfold)
+  have prod: \<open>(\<Prod>x'\<in>F. f x' ((p(x:=y)) x')) = (\<Prod>x'\<in>F. f x' (p x'))\<close> for p y
+    by (rule prod.cong) (use insert.hyps in auto)
+  have inj: \<open>inj_on (\<lambda>(g, y). g(x := y)) (Pi\<^sub>E F B \<times> B x)\<close>
+    using \<open>x \<notin> F\<close> by (rule inj_combinator')
+
+  have summable1: \<open>(\<lambda>g. \<Prod>x\<in>insert x F. f x (g x)) summable_on Pi\<^sub>E (insert x F) B\<close>
+    using insert.prems(2) .
+  also have \<open>Pi\<^sub>E (insert x F) B = (\<lambda>(g,y). g(x:=y)) ` (Pi\<^sub>E F B \<times> B x)\<close>
+    by (simp only: pi)
+  also have "(\<lambda>g. \<Prod>x\<in>insert x F. f x (g x)) summable_on \<dots> \<longleftrightarrow>
+               ((\<lambda>g. \<Prod>x\<in>insert x F. f x (g x)) \<circ> (\<lambda>(g,y). g(x:=y))) summable_on (Pi\<^sub>E F B \<times> B x)"
+    using inj by (rule summable_on_reindex)
+  also have "(\<Prod>z\<in>F. f z ((g(x := y)) z)) = (\<Prod>z\<in>F. f z (g z))" for g y
+    using insert.hyps by (intro prod.cong) auto
+  hence "((\<lambda>g. \<Prod>x\<in>insert x F. f x (g x)) \<circ> (\<lambda>(g,y). g(x:=y))) =
+             (\<lambda>(p, y). f x y * (\<Prod>x'\<in>F. f x' (p x')))"
+    using insert.hyps by (auto simp: fun_eq_iff cong: prod.cong_simp)
+  finally have summable2: \<open>(\<lambda>(p, y). f x y * (\<Prod>x'\<in>F. f x' (p x'))) summable_on Pi\<^sub>E F B \<times> B x\<close> .
+
+  then have \<open>(\<lambda>p. \<Sum>\<^sub>\<infinity>y\<in>B x. f x y * (\<Prod>x'\<in>F. f x' (p x'))) summable_on Pi\<^sub>E F B\<close>
+    by (rule summable_on_Sigma_banach)
+  then have \<open>(\<lambda>p. (\<Sum>\<^sub>\<infinity>y\<in>B x. f x y) * (\<Prod>x'\<in>F. f x' (p x'))) summable_on Pi\<^sub>E F B\<close>
+    apply (subst infsum_cmult_left[symmetric])
+    using insert.prems(1) by blast
+  then have summable3: \<open>(\<lambda>p. (\<Prod>x'\<in>F. f x' (p x'))) summable_on Pi\<^sub>E F B\<close> if \<open>(\<Sum>\<^sub>\<infinity>y\<in>B x. f x y) \<noteq> 0\<close>
+    apply (subst (asm) summable_on_cmult_right')
+    using that by auto
+
+  have \<open>(\<Sum>\<^sub>\<infinity>g\<in>Pi\<^sub>E (insert x F) B. \<Prod>x\<in>insert x F. f x (g x))
+     = (\<Sum>\<^sub>\<infinity>(p,y)\<in>Pi\<^sub>E F B \<times> B x. \<Prod>x'\<in>insert x F. f x' ((p(x:=y)) x'))\<close>
+    apply (subst pi)
+    apply (subst infsum_reindex)
+    using inj by (auto simp: o_def case_prod_unfold)
+  also have \<open>\<dots> = (\<Sum>\<^sub>\<infinity>(p, y)\<in>Pi\<^sub>E F B \<times> B x. f x y * (\<Prod>x'\<in>F. f x' ((p(x:=y)) x')))\<close>
+    apply (subst prod.insert)
+    using insert by auto
+  also have \<open>\<dots> = (\<Sum>\<^sub>\<infinity>(p, y)\<in>Pi\<^sub>E F B \<times> B x. f x y * (\<Prod>x'\<in>F. f x' (p x')))\<close>
+    apply (subst prod) by rule
+  also have \<open>\<dots> = (\<Sum>\<^sub>\<infinity>p\<in>Pi\<^sub>E F B. \<Sum>\<^sub>\<infinity>y\<in>B x. f x y * (\<Prod>x'\<in>F. f x' (p x')))\<close>
+    apply (subst infsum_Sigma_banach[symmetric])
+    using summable2 apply blast
+    by fastforce
+  also have \<open>\<dots> = (\<Sum>\<^sub>\<infinity>y\<in>B x. f x y) * (\<Sum>\<^sub>\<infinity>p\<in>Pi\<^sub>E F B. \<Prod>x'\<in>F. f x' (p x'))\<close>
+    apply (subst infsum_cmult_left')
+    apply (subst infsum_cmult_right')
+    by (rule refl)
+  also have \<open>\<dots> = (\<Prod>x\<in>insert x F. infsum (f x) (B x))\<close>
+    apply (subst prod.insert)
+    using \<open>finite F\<close> \<open>x \<notin> F\<close> apply auto[2]
+    apply (cases \<open>infsum (f x) (B x) = 0\<close>)
+     apply simp
+    apply (subst insert.IH)
+      apply (simp add: insert.prems(1))
+     apply (rule summable3)
+    by auto
+  finally show ?case
+    by simp
+qed
+
+lemma infsum_prod_PiE_abs:
+  \<comment> \<open>See also @{thm [source] infsum_prod_PiE} above with incomparable premises.\<close>
+  fixes f :: "'a \<Rightarrow> 'b \<Rightarrow> 'c :: {banach, real_normed_div_algebra, comm_semiring_1}"
+  assumes finite: "finite A"
+  assumes "\<And>x. x \<in> A \<Longrightarrow> f x abs_summable_on B x"
+  shows   "infsum (\<lambda>g. \<Prod>x\<in>A. f x (g x)) (PiE A B) = (\<Prod>x\<in>A. infsum (f x) (B x))"
+proof (use finite assms(2) in induction)
+  case empty
+  then show ?case 
+    by auto
+next
+  case (insert x F)
+  
+  have pi: \<open>Pi\<^sub>E (insert x F) B = (\<lambda>(g,y). g(x:=y)) ` (Pi\<^sub>E F B \<times> B x)\<close> for x F and B :: "'a \<Rightarrow> 'b set"
+    unfolding PiE_insert_eq 
+    by (subst swap_product [symmetric]) (simp add: image_image case_prod_unfold)
+  have prod: \<open>(\<Prod>x'\<in>F. f x' ((p(x:=y)) x')) = (\<Prod>x'\<in>F. f x' (p x'))\<close> for p y
+    by (rule prod.cong) (use insert.hyps in auto)
+  have inj: \<open>inj_on (\<lambda>(g, y). g(x := y)) (Pi\<^sub>E F B \<times> B x)\<close>
+    using \<open>x \<notin> F\<close> by (rule inj_combinator')
+
+  define s where \<open>s x = infsum (\<lambda>y. norm (f x y)) (B x)\<close> for x
+
+  have *: \<open>(\<Sum>p\<in>P. norm (\<Prod>x\<in>F. f x (p x))) \<le> prod s F\<close> 
+    if P: \<open>P \<subseteq> Pi\<^sub>E F B\<close> and [simp]: \<open>finite P\<close> \<open>finite F\<close> 
+      and sum: \<open>\<And>x. x \<in> F \<Longrightarrow> f x abs_summable_on B x\<close> for P F
+  proof -
+    define B' where \<open>B' x = {p x| p. p\<in>P}\<close> for x
+    have [simp]: \<open>finite (B' x)\<close> for x
+      using that by (auto simp: B'_def)
+    have [simp]: \<open>finite (Pi\<^sub>E F B')\<close>
+      by (simp add: finite_PiE)
+    have [simp]: \<open>P \<subseteq> Pi\<^sub>E F B'\<close>
+      using that by (auto simp: B'_def)
+    have B'B: \<open>B' x \<subseteq> B x\<close> if \<open>x \<in> F\<close> for x
+      unfolding B'_def using P that 
+      by auto
+    have s_bound: \<open>(\<Sum>y\<in>B' x. norm (f x y)) \<le> s x\<close> if \<open>x \<in> F\<close> for x
+      apply (simp_all add: s_def flip: infsum_finite)
+      apply (rule infsum_mono_neutral)
+      using that sum B'B by auto
+    have \<open>(\<Sum>p\<in>P. norm (\<Prod>x\<in>F. f x (p x))) \<le> (\<Sum>p\<in>Pi\<^sub>E F B'. norm (\<Prod>x\<in>F. f x (p x)))\<close>
+      apply (rule sum_mono2)
+      by auto
+    also have \<open>\<dots> = (\<Sum>p\<in>Pi\<^sub>E F B'. \<Prod>x\<in>F. norm (f x (p x)))\<close>
+      apply (subst prod_norm[symmetric])
+      by simp
+    also have \<open>\<dots> = (\<Prod>x\<in>F. \<Sum>y\<in>B' x. norm (f x y))\<close>
+    proof (use \<open>finite F\<close> in induction)
+      case empty
+      then show ?case by simp
+    next
+      case (insert x F)
+      have aux: \<open>a = b \<Longrightarrow> c * a = c * b\<close> for a b c :: real
+        by auto
+      have inj: \<open>inj_on (\<lambda>(g, y). g(x := y)) (Pi\<^sub>E F B' \<times> B' x)\<close>
+        by (rule inj_combinator') (use insert.hyps in auto)
+      have \<open>(\<Sum>p\<in>Pi\<^sub>E (insert x F) B'. \<Prod>x\<in>insert x F. norm (f x (p x)))
+         =  (\<Sum>(p,y)\<in>Pi\<^sub>E F B' \<times> B' x. \<Prod>x'\<in>insert x F. norm (f x' ((p(x := y)) x')))\<close>
+        apply (subst pi)
+        apply (subst sum.reindex)
+        using inj by (auto simp: case_prod_unfold)
+      also have \<open>\<dots> = (\<Sum>(p,y)\<in>Pi\<^sub>E F B' \<times> B' x. norm (f x y) * (\<Prod>x'\<in>F. norm (f x' ((p(x := y)) x'))))\<close>
+        apply (subst prod.insert)
+        using insert.hyps by (auto simp: case_prod_unfold)
+      also have \<open>\<dots> = (\<Sum>(p, y)\<in>Pi\<^sub>E F B' \<times> B' x. norm (f x y) * (\<Prod>x'\<in>F. norm (f x' (p x'))))\<close>
+        apply (rule sum.cong)
+         apply blast
+        unfolding case_prod_unfold
+        apply (rule aux)
+        apply (rule prod.cong)
+        using insert.hyps(2) by auto
+      also have \<open>\<dots> = (\<Sum>y\<in>B' x. norm (f x y)) * (\<Sum>p\<in>Pi\<^sub>E F B'. \<Prod>x'\<in>F. norm (f x' (p x')))\<close>
+        apply (subst sum_product)
+        apply (subst sum.swap)
+        apply (subst sum.cartesian_product)
+        by simp
+      also have \<open>\<dots> = (\<Sum>y\<in>B' x. norm (f x y)) * (\<Prod>x\<in>F. \<Sum>y\<in>B' x. norm (f x y))\<close>
+        by (simp add: insert.IH)
+      also have \<open>\<dots> = (\<Prod>x\<in>insert x F. \<Sum>y\<in>B' x. norm (f x y))\<close>
+        using insert.hyps(1) insert.hyps(2) by force
+      finally show ?case .
+    qed
+    also have \<open>\<dots> = (\<Prod>x\<in>F. \<Sum>\<^sub>\<infinity>y\<in>B' x. norm (f x y))\<close>
+      by auto
+    also have \<open>\<dots> \<le> (\<Prod>x\<in>F. s x)\<close>
+      apply (rule prod_mono)
+      apply auto
+      apply (simp add: sum_nonneg)
+      using s_bound by presburger
+    finally show ?thesis .
+  qed
+  have \<open>(\<lambda>g. \<Prod>x\<in>insert x F. f x (g x)) abs_summable_on Pi\<^sub>E (insert x F) B\<close>
+    apply (rule nonneg_bdd_above_summable_on)
+     apply (simp; fail)
+    apply (rule bdd_aboveI[where M=\<open>\<Prod>x'\<in>insert x F. s x'\<close>])
+    using * insert.hyps insert.prems by blast
+
+  also have \<open>Pi\<^sub>E (insert x F) B = (\<lambda>(g,y). g(x:=y)) ` (Pi\<^sub>E F B \<times> B x)\<close>
+    by (simp only: pi)
+  also have "(\<lambda>g. \<Prod>x\<in>insert x F. f x (g x)) abs_summable_on \<dots> \<longleftrightarrow>
+               ((\<lambda>g. \<Prod>x\<in>insert x F. f x (g x)) \<circ> (\<lambda>(g,y). g(x:=y))) abs_summable_on (Pi\<^sub>E F B \<times> B x)"
+    using inj by (subst summable_on_reindex) (auto simp: o_def)
+  also have "(\<Prod>z\<in>F. f z ((g(x := y)) z)) = (\<Prod>z\<in>F. f z (g z))" for g y
+    using insert.hyps by (intro prod.cong) auto
+  hence "((\<lambda>g. \<Prod>x\<in>insert x F. f x (g x)) \<circ> (\<lambda>(g,y). g(x:=y))) =
+             (\<lambda>(p, y). f x y * (\<Prod>x'\<in>F. f x' (p x')))"
+    using insert.hyps by (auto simp: fun_eq_iff cong: prod.cong_simp)
+  finally have summable2: \<open>(\<lambda>(p, y). f x y * (\<Prod>x'\<in>F. f x' (p x'))) abs_summable_on Pi\<^sub>E F B \<times> B x\<close> .
+
+  have \<open>(\<Sum>\<^sub>\<infinity>g\<in>Pi\<^sub>E (insert x F) B. \<Prod>x\<in>insert x F. f x (g x))
+     = (\<Sum>\<^sub>\<infinity>(p,y)\<in>Pi\<^sub>E F B \<times> B x. \<Prod>x'\<in>insert x F. f x' ((p(x:=y)) x'))\<close>
+    apply (subst pi)
+    apply (subst infsum_reindex)
+    using inj by (auto simp: o_def case_prod_unfold)
+  also have \<open>\<dots> = (\<Sum>\<^sub>\<infinity>(p, y)\<in>Pi\<^sub>E F B \<times> B x. f x y * (\<Prod>x'\<in>F. f x' ((p(x:=y)) x')))\<close>
+    apply (subst prod.insert)
+    using insert by auto
+  also have \<open>\<dots> = (\<Sum>\<^sub>\<infinity>(p, y)\<in>Pi\<^sub>E F B \<times> B x. f x y * (\<Prod>x'\<in>F. f x' (p x')))\<close>
+    apply (subst prod) by rule
+  also have \<open>\<dots> = (\<Sum>\<^sub>\<infinity>p\<in>Pi\<^sub>E F B. \<Sum>\<^sub>\<infinity>y\<in>B x. f x y * (\<Prod>x'\<in>F. f x' (p x')))\<close>
+    apply (subst infsum_Sigma_banach[symmetric])
+    using summable2 abs_summable_summable apply blast
+    by fastforce
+  also have \<open>\<dots> = (\<Sum>\<^sub>\<infinity>y\<in>B x. f x y) * (\<Sum>\<^sub>\<infinity>p\<in>Pi\<^sub>E F B. \<Prod>x'\<in>F. f x' (p x'))\<close>
+    apply (subst infsum_cmult_left')
+    apply (subst infsum_cmult_right')
+    by (rule refl)
+  also have \<open>\<dots> = (\<Prod>x\<in>insert x F. infsum (f x) (B x))\<close>
+    apply (subst prod.insert)
+    using \<open>finite F\<close> \<open>x \<notin> F\<close> apply auto[2]
+    apply (cases \<open>infsum (f x) (B x) = 0\<close>)
+     apply (simp; fail)
+    apply (subst insert.IH)
+      apply (auto simp add: insert.prems(1))
+    done
+  finally show ?case
+    by simp
+qed
+
+
+
+subsection \<open>Absolute convergence\<close>
+
+lemma abs_summable_countable:
+  assumes \<open>f abs_summable_on A\<close>
+  shows \<open>countable {x\<in>A. f x \<noteq> 0}\<close>
+proof -
+  have fin: \<open>finite {x\<in>A. norm (f x) \<ge> t}\<close> if \<open>t > 0\<close> for t
+  proof (rule ccontr)
+    assume *: \<open>infinite {x \<in> A. t \<le> norm (f x)}\<close>
+    have \<open>infsum (\<lambda>x. norm (f x)) A \<ge> b\<close> for b
+    proof -
+      obtain b' where b': \<open>of_nat b' \<ge> b / t\<close>
+        by (meson real_arch_simple)
+      from *
+      obtain F where cardF: \<open>card F \<ge> b'\<close> and \<open>finite F\<close> and F: \<open>F \<subseteq> {x \<in> A. t \<le> norm (f x)}\<close>
+        by (meson finite_if_finite_subsets_card_bdd nle_le)
+      have \<open>b \<le> of_nat b' * t\<close>
+        using b' \<open>t > 0\<close> by (simp add: field_simps split: if_splits)
+      also have \<open>\<dots> \<le> of_nat (card F) * t\<close>
+        by (simp add: cardF that)
+      also have \<open>\<dots> = sum (\<lambda>x. t) F\<close>
+        by simp
+      also have \<open>\<dots> \<le> sum (\<lambda>x. norm (f x)) F\<close>
+        by (metis (mono_tags, lifting) F in_mono mem_Collect_eq sum_mono)
+      also have \<open>\<dots> = infsum (\<lambda>x. norm (f x)) F\<close>
+        using \<open>finite F\<close> by (rule infsum_finite[symmetric])
+      also have \<open>\<dots> \<le> infsum (\<lambda>x. norm (f x)) A\<close>
+        by (rule infsum_mono_neutral) (use \<open>finite F\<close> assms F in auto)
+      finally show ?thesis .
+    qed
+    then show False
+      by (meson gt_ex linorder_not_less)
+  qed
+  have \<open>countable (\<Union>i\<in>{1..}. {x\<in>A. norm (f x) \<ge> 1/of_nat i})\<close>
+    by (rule countable_UN) (use fin in \<open>auto intro!: countable_finite\<close>)
+  also have \<open>\<dots> = {x\<in>A. f x \<noteq> 0}\<close>
+  proof safe
+    fix x assume x: "x \<in> A" "f x \<noteq> 0"
+    define i where "i = max 1 (nat (ceiling (1 / norm (f x))))"
+    have "i \<ge> 1"
+      by (simp add: i_def)
+    moreover have "real i \<ge> 1 / norm (f x)"
+      unfolding i_def by linarith
+    hence "1 / real i \<le> norm (f x)" using \<open>f x \<noteq> 0\<close>
+      by (auto simp: divide_simps mult_ac)
+    ultimately show "x \<in> (\<Union>i\<in>{1..}. {x \<in> A. 1 / real i \<le> norm (f x)})"
+      using \<open>x \<in> A\<close> by auto
+  qed auto
+  finally show ?thesis .
+qed
+
+(* Logically belongs in the section about reals, but needed as a dependency here *)
+lemma summable_on_iff_abs_summable_on_real:
+  fixes f :: \<open>'a \<Rightarrow> real\<close>
+  shows \<open>f summable_on A \<longleftrightarrow> f abs_summable_on A\<close>
+proof (rule iffI)
+  assume \<open>f summable_on A\<close>
+  define n A\<^sub>p A\<^sub>n
+    where \<open>n x = norm (f x)\<close> and \<open>A\<^sub>p = {x\<in>A. f x \<ge> 0}\<close> and \<open>A\<^sub>n = {x\<in>A. f x < 0}\<close> for x
+  have [simp]: \<open>A\<^sub>p \<union> A\<^sub>n = A\<close> \<open>A\<^sub>p \<inter> A\<^sub>n = {}\<close>
+    by (auto simp: A\<^sub>p_def A\<^sub>n_def)
+  from \<open>f summable_on A\<close> have [simp]: \<open>f summable_on A\<^sub>p\<close> \<open>f summable_on A\<^sub>n\<close>
+    using A\<^sub>p_def A\<^sub>n_def summable_on_subset_banach by fastforce+
+  then have [simp]: \<open>n summable_on A\<^sub>p\<close>
+    apply (subst summable_on_cong[where g=f])
+    by (simp_all add: A\<^sub>p_def n_def)
+  moreover have [simp]: \<open>n summable_on A\<^sub>n\<close>
+    apply (subst summable_on_cong[where g=\<open>\<lambda>x. - f x\<close>])
+     apply (simp add: A\<^sub>n_def n_def[abs_def])
+    by (simp add: summable_on_uminus)
+  ultimately have [simp]: \<open>n summable_on (A\<^sub>p \<union> A\<^sub>n)\<close>
+    apply (rule summable_on_Un_disjoint) by simp
+  then show \<open>n summable_on A\<close>
+    by simp
+next
+  show \<open>f abs_summable_on A \<Longrightarrow> f summable_on A\<close>
+    using abs_summable_summable by blast
+qed
+
+lemma abs_summable_on_Sigma_iff:
+  shows   "f abs_summable_on Sigma A B \<longleftrightarrow>
+             (\<forall>x\<in>A. (\<lambda>y. f (x, y)) abs_summable_on B x) \<and>
+             ((\<lambda>x. infsum (\<lambda>y. norm (f (x, y))) (B x)) abs_summable_on A)"
+proof (intro iffI conjI ballI)
+  assume asm: \<open>f abs_summable_on Sigma A B\<close>
+  then have \<open>(\<lambda>x. infsum (\<lambda>y. norm (f (x,y))) (B x)) summable_on A\<close>
+    apply (rule_tac summable_on_Sigma_banach)
+    by (auto simp: case_prod_unfold)
+  then show \<open>(\<lambda>x. \<Sum>\<^sub>\<infinity>y\<in>B x. norm (f (x, y))) abs_summable_on A\<close>
+    using summable_on_iff_abs_summable_on_real by force
+
+  show \<open>(\<lambda>y. f (x, y)) abs_summable_on B x\<close> if \<open>x \<in> A\<close> for x
+  proof -
+    from asm have \<open>f abs_summable_on Pair x ` B x\<close>
+      apply (rule summable_on_subset_banach)
+      using that by auto
+    then show ?thesis
+      apply (subst (asm) summable_on_reindex)
+      by (auto simp: o_def inj_on_def)
+  qed
+next
+  assume asm: \<open>(\<forall>x\<in>A. (\<lambda>xa. f (x, xa)) abs_summable_on B x) \<and>
+    (\<lambda>x. \<Sum>\<^sub>\<infinity>y\<in>B x. norm (f (x, y))) abs_summable_on A\<close>
+  have \<open>(\<Sum>xy\<in>F. norm (f xy)) \<le> (\<Sum>\<^sub>\<infinity>x\<in>A. \<Sum>\<^sub>\<infinity>y\<in>B x. norm (f (x, y)))\<close>
+    if \<open>F \<subseteq> Sigma A B\<close> and [simp]: \<open>finite F\<close> for F
+  proof -
+    have [simp]: \<open>(SIGMA x:fst ` F. {y. (x, y) \<in> F}) = F\<close>
+      by (auto intro!: set_eqI simp add: Domain.DomainI fst_eq_Domain)
+    have [simp]: \<open>finite {y. (x, y) \<in> F}\<close> for x
+      by (metis \<open>finite F\<close> Range.intros finite_Range finite_subset mem_Collect_eq subsetI)
+    have \<open>(\<Sum>xy\<in>F. norm (f xy)) = (\<Sum>x\<in>fst ` F. \<Sum>y\<in>{y. (x,y)\<in>F}. norm (f (x,y)))\<close>
+      apply (subst sum.Sigma)
+      by auto
+    also have \<open>\<dots> = (\<Sum>\<^sub>\<infinity>x\<in>fst ` F. \<Sum>\<^sub>\<infinity>y\<in>{y. (x,y)\<in>F}. norm (f (x,y)))\<close>
+      apply (subst infsum_finite)
+      by auto
+    also have \<open>\<dots> \<le> (\<Sum>\<^sub>\<infinity>x\<in>fst ` F. \<Sum>\<^sub>\<infinity>y\<in>B x. norm (f (x,y)))\<close>
+      apply (rule infsum_mono)
+        apply (simp; fail)
+       apply (simp; fail)
+      apply (rule infsum_mono_neutral)
+      using asm that(1) by auto
+    also have \<open>\<dots> \<le> (\<Sum>\<^sub>\<infinity>x\<in>A. \<Sum>\<^sub>\<infinity>y\<in>B x. norm (f (x,y)))\<close>
+      by (rule infsum_mono_neutral) (use asm that(1) in \<open>auto simp add: infsum_nonneg\<close>)
+    finally show ?thesis .
+  qed
+  then show \<open>f abs_summable_on Sigma A B\<close>
+    by (intro nonneg_bdd_above_summable_on) (auto simp: bdd_above_def)
+qed
+
+lemma abs_summable_on_comparison_test:
+  assumes "g abs_summable_on A"
+  assumes "\<And>x. x \<in> A \<Longrightarrow> norm (f x) \<le> norm (g x)"
+  shows   "f abs_summable_on A"
+proof (rule nonneg_bdd_above_summable_on)
+  show "bdd_above (sum (\<lambda>x. norm (f x)) ` {F. F \<subseteq> A \<and> finite F})"
+  proof (rule bdd_aboveI2)
+    fix F assume F: "F \<in> {F. F \<subseteq> A \<and> finite F}"
+    have \<open>sum (\<lambda>x. norm (f x)) F \<le> sum (\<lambda>x. norm (g x)) F\<close>
+      using assms F by (intro sum_mono) auto
+    also have \<open>\<dots> = infsum (\<lambda>x. norm (g x)) F\<close>
+      using F by simp
+    also have \<open>\<dots> \<le> infsum (\<lambda>x. norm (g x)) A\<close>
+    proof (rule infsum_mono_neutral)
+      show "g abs_summable_on F"
+        by (rule summable_on_subset_banach[OF assms(1)]) (use F in auto)
+    qed (use F assms in auto)
+    finally show "(\<Sum>x\<in>F. norm (f x)) \<le> (\<Sum>\<^sub>\<infinity>x\<in>A. norm (g x))" .
+  qed
+qed auto
+
+lemma abs_summable_iff_bdd_above:
+  fixes f :: \<open>'a \<Rightarrow> 'b::real_normed_vector\<close>
+  shows \<open>f abs_summable_on A \<longleftrightarrow> bdd_above (sum (\<lambda>x. norm (f x)) ` {F. F\<subseteq>A \<and> finite F})\<close>
+proof (rule iffI)
+  assume \<open>f abs_summable_on A\<close>
+  show \<open>bdd_above (sum (\<lambda>x. norm (f x)) ` {F. F \<subseteq> A \<and> finite F})\<close>
+  proof (rule bdd_aboveI2)
+    fix F assume F: "F \<in> {F. F \<subseteq> A \<and> finite F}"
+    show "(\<Sum>x\<in>F. norm (f x)) \<le> (\<Sum>\<^sub>\<infinity>x\<in>A. norm (f x))"
+      by (rule finite_sum_le_infsum) (use \<open>f abs_summable_on A\<close> F in auto)
+  qed
+next
+  assume \<open>bdd_above (sum (\<lambda>x. norm (f x)) ` {F. F\<subseteq>A \<and> finite F})\<close>
+  then show \<open>f abs_summable_on A\<close>
+    by (simp add: nonneg_bdd_above_summable_on)
+qed
+
+lemma abs_summable_product:
+  fixes x :: "'a \<Rightarrow> 'b::{real_normed_div_algebra,banach,second_countable_topology}"
+  assumes x2_sum: "(\<lambda>i. (x i) * (x i)) abs_summable_on A"
+    and y2_sum: "(\<lambda>i. (y i) * (y i)) abs_summable_on A"
+  shows "(\<lambda>i. x i * y i) abs_summable_on A"
+proof (rule nonneg_bdd_above_summable_on)
+  show "bdd_above (sum (\<lambda>xa. norm (x xa * y xa)) ` {F. F \<subseteq> A \<and> finite F})"
+  proof (rule bdd_aboveI2)
+    fix F assume F: \<open>F \<in> {F. F \<subseteq> A \<and> finite F}\<close>
+    then have r1: "finite F" and b4: "F \<subseteq> A"
+      by auto
+  
+    have a1: "(\<Sum>\<^sub>\<infinity>i\<in>F. norm (x i * x i)) \<le> (\<Sum>\<^sub>\<infinity>i\<in>A. norm (x i * x i))"
+      apply (rule infsum_mono_neutral)
+      using b4 r1 x2_sum by auto
+
+    have "norm (x i * y i) \<le> norm (x i * x i) + norm (y i * y i)" for i
+      unfolding norm_mult by (smt mult_left_mono mult_nonneg_nonneg mult_right_mono norm_ge_zero)
+    hence "(\<Sum>i\<in>F. norm (x i * y i)) \<le> (\<Sum>i\<in>F. norm (x i * x i) + norm (y i * y i))"
+      by (simp add: sum_mono)
+    also have "\<dots> = (\<Sum>i\<in>F. norm (x i * x i)) + (\<Sum>i\<in>F. norm (y i * y i))"
+      by (simp add: sum.distrib)
+    also have "\<dots> = (\<Sum>\<^sub>\<infinity>i\<in>F. norm (x i * x i)) + (\<Sum>\<^sub>\<infinity>i\<in>F. norm (y i * y i))"
+      by (simp add: \<open>finite F\<close>)
+    also have "\<dots> \<le> (\<Sum>\<^sub>\<infinity>i\<in>A. norm (x i * x i)) + (\<Sum>\<^sub>\<infinity>i\<in>A. norm (y i * y i))"
+      using F assms
+      by (intro add_mono infsum_mono2) auto
+    finally show \<open>(\<Sum>xa\<in>F. norm (x xa * y xa)) \<le> (\<Sum>\<^sub>\<infinity>i\<in>A. norm (x i * x i)) + (\<Sum>\<^sub>\<infinity>i\<in>A. norm (y i * y i))\<close>
+      by simp
+  qed
+qed auto
+
 subsection \<open>Extended reals and nats\<close>
 
 lemma summable_on_ennreal[simp]: \<open>(f::_ \<Rightarrow> ennreal) summable_on S\<close>
-  apply (rule pos_summable_on_complete) by simp
+  by (rule nonneg_summable_on_complete) simp
 
 lemma summable_on_enat[simp]: \<open>(f::_ \<Rightarrow> enat) summable_on S\<close>
-  apply (rule pos_summable_on_complete) by simp
+  by (rule nonneg_summable_on_complete) simp
 
 lemma has_sum_superconst_infinite_ennreal:
   fixes f :: \<open>'a \<Rightarrow> ennreal\<close>
@@ -1629,8 +2244,7 @@
         by (simp add: mult.commute)
       also have \<open>\<dots> \<le> sum f Y\<close>
         using geqb by (meson subset_eq sum_mono that(3))
-      finally show ?thesis
-        by -
+      finally show ?thesis .
     qed
     ultimately show \<open>\<forall>\<^sub>F x in finite_subsets_at_top S. y < sum f x\<close>
       unfolding eventually_finite_subsets_at_top 
@@ -1657,19 +2271,20 @@
 proof -
   obtain b' where b': \<open>e2ennreal b' = b\<close> and \<open>b' > 0\<close>
     using b by blast
-  have *: \<open>infsum (e2ennreal o f) S = \<infinity>\<close>
-    apply (rule infsum_superconst_infinite_ennreal[where b=b'])
-    using assms \<open>b' > 0\<close> b' e2ennreal_mono apply auto
-    by (metis dual_order.strict_iff_order enn2ereal_e2ennreal le_less_linear zero_ennreal_def)
+  have "0 < e2ennreal b"
+    using b' b
+    by (metis dual_order.refl enn2ereal_e2ennreal gr_zeroI order_less_le zero_ennreal.abs_eq)
+  hence *: \<open>infsum (e2ennreal o f) S = \<infinity>\<close>
+    using assms b'
+    by (intro infsum_superconst_infinite_ennreal[where b=b']) (auto intro!: e2ennreal_mono)
   have \<open>infsum f S = infsum (enn2ereal o (e2ennreal o f)) S\<close>
-    by (smt (verit, best) b comp_apply dual_order.trans enn2ereal_e2ennreal geqb infsum_cong less_imp_le)
+    using geqb b by (intro infsum_cong) (fastforce simp: enn2ereal_e2ennreal)
   also have \<open>\<dots> = enn2ereal \<infinity>\<close>
     apply (subst infsum_comm_additive_general)
     using * by (auto simp: continuous_at_enn2ereal)
   also have \<open>\<dots> = \<infinity>\<close>
     by simp
-  finally show ?thesis
-    by -
+  finally show ?thesis .
 qed
 
 lemma has_sum_superconst_infinite_ereal:
@@ -1704,7 +2319,7 @@
   assumes \<open>b > 0\<close>
   assumes \<open>infinite S\<close>
   shows "has_sum f S \<infinity>"
-  by (metis assms i0_lb has_sum_infsum infsum_superconst_infinite_enat pos_summable_on_complete)
+  by (metis assms i0_lb has_sum_infsum infsum_superconst_infinite_enat nonneg_summable_on_complete)
 
 text \<open>This lemma helps to relate a real-valued infsum to a supremum over extended nonnegative reals.\<close>
 
@@ -1719,12 +2334,11 @@
     apply (subst sum_ennreal[symmetric])
     using assms by auto
   also have \<open>\<dots> = (SUP F\<in>{F. finite F \<and> F \<subseteq> A}. (ennreal (sum f F)))\<close>
-    apply (subst pos_infsum_complete, simp)
+    apply (subst nonneg_infsum_complete, simp)
     apply (rule SUP_cong, blast)
     apply (subst sum_ennreal[symmetric])
     using fnn by auto
-  finally show ?thesis
-    by -
+  finally show ?thesis .
 qed
 
 text \<open>This lemma helps to related a real-valued infsum to a supremum over extended reals.\<close>
@@ -1739,10 +2353,8 @@
     apply (rule infsum_comm_additive_general[symmetric])
     using assms by auto
   also have \<open>\<dots> = (SUP F\<in>{F. finite F \<and> F \<subseteq> A}. (ereal (sum f F)))\<close>
-    apply (subst pos_infsum_complete)
-    by (simp_all add: assms)[2]
-  finally show ?thesis
-    by -
+    by (subst nonneg_infsum_complete) (simp_all add: assms)
+  finally show ?thesis .
 qed
 
 
@@ -1776,33 +2388,11 @@
   shows "has_sum f A (SUP F\<in>{F. finite F \<and> F \<subseteq> A}. (sum f F))"
   by (metis (mono_tags, lifting) assms has_sum_infsum infsum_nonneg_is_SUPREMUM_real)
 
-
-lemma summable_on_iff_abs_summable_on_real:
+lemma summable_countable_real:
   fixes f :: \<open>'a \<Rightarrow> real\<close>
-  shows \<open>f summable_on A \<longleftrightarrow> f abs_summable_on A\<close>
-proof (rule iffI)
-  assume \<open>f summable_on A\<close>
-  define n A\<^sub>p A\<^sub>n
-    where \<open>n x = norm (f x)\<close> and \<open>A\<^sub>p = {x\<in>A. f x \<ge> 0}\<close> and \<open>A\<^sub>n = {x\<in>A. f x < 0}\<close> for x
-  have [simp]: \<open>A\<^sub>p \<union> A\<^sub>n = A\<close> \<open>A\<^sub>p \<inter> A\<^sub>n = {}\<close>
-    by (auto simp: A\<^sub>p_def A\<^sub>n_def)
-  from \<open>f summable_on A\<close> have [simp]: \<open>f summable_on A\<^sub>p\<close> \<open>f summable_on A\<^sub>n\<close>
-    using A\<^sub>p_def A\<^sub>n_def summable_on_subset_banach by fastforce+
-  then have [simp]: \<open>n summable_on A\<^sub>p\<close>
-    apply (subst summable_on_cong[where g=f])
-    by (simp_all add: A\<^sub>p_def n_def)
-  moreover have [simp]: \<open>n summable_on A\<^sub>n\<close>
-    apply (subst summable_on_cong[where g=\<open>\<lambda>x. - f x\<close>])
-     apply (simp add: A\<^sub>n_def n_def[abs_def])
-    by (simp add: summable_on_uminus)
-  ultimately have [simp]: \<open>n summable_on (A\<^sub>p \<union> A\<^sub>n)\<close>
-    apply (rule summable_on_Un_disjoint) by simp
-  then show \<open>n summable_on A\<close>
-    by simp
-next
-  show \<open>f abs_summable_on A \<Longrightarrow> f summable_on A\<close>
-    using abs_summable_summable by blast
-qed
+  assumes \<open>f summable_on A\<close>
+  shows \<open>countable {x\<in>A. f x \<noteq> 0}\<close>
+  using abs_summable_countable assms summable_on_iff_abs_summable_on_real by blast
 
 subsection \<open>Complex numbers\<close>
 
@@ -1854,7 +2444,7 @@
   apply (rule summable_on_comm_additive[where f=Im, unfolded o_def])
   using assms by (auto intro!: additive.intro)
 
-lemma infsum_0D_complex:
+lemma nonneg_infsum_le_0D_complex:
   fixes f :: "'a \<Rightarrow> complex"
   assumes "infsum f A \<le> 0"
     and abs_sum: "f summable_on A"
@@ -1863,22 +2453,22 @@
   shows "f x = 0"
 proof -
   have \<open>Im (f x) = 0\<close>
-    apply (rule infsum_0D[where A=A])
+    apply (rule nonneg_infsum_le_0D[where A=A])
     using assms
     by (auto simp add: infsum_Im summable_on_Im less_eq_complex_def)
   moreover have \<open>Re (f x) = 0\<close>
-    apply (rule infsum_0D[where A=A])
+    apply (rule nonneg_infsum_le_0D[where A=A])
     using assms by (auto simp add: summable_on_Re infsum_Re less_eq_complex_def)
   ultimately show ?thesis
     by (simp add: complex_eqI)
 qed
 
-lemma has_sum_0D_complex:
+lemma nonneg_has_sum_le_0D_complex:
   fixes f :: "'a \<Rightarrow> complex"
   assumes "has_sum f A a" and \<open>a \<le> 0\<close>
     and "\<And>x. x \<in> A \<Longrightarrow> f x \<ge> 0" and "x \<in> A"
   shows "f x = 0"
-  by (metis assms infsumI infsum_0D_complex summable_on_def)
+  by (metis assms infsumI nonneg_infsum_le_0D_complex summable_on_def)
 
 text \<open>The lemma @{thm [source] infsum_mono_neutral} above applies to various linear ordered monoids such as the reals but not to the complex numbers.
       Thus we have a separate corollary for those:\<close>
@@ -1929,14 +2519,16 @@
   shows "infsum (\<lambda>x. cmod (f x)) M = cmod (infsum f M)"
 proof -
   have \<open>complex_of_real (infsum (\<lambda>x. cmod (f x)) M) = infsum (\<lambda>x. complex_of_real (cmod (f x))) M\<close>
-    apply (rule infsum_comm_additive[symmetric, unfolded o_def])
-    apply auto
-    apply (simp add: additive.intro)
-    by (smt (verit, best) assms(1) cmod_eq_Re fnn summable_on_Re summable_on_cong less_eq_complex_def zero_complex.simps(1) zero_complex.simps(2))
+  proof (rule infsum_comm_additive[symmetric, unfolded o_def])
+    have "(\<lambda>z. Re (f z)) summable_on M"
+      using assms summable_on_Re by blast
+    also have "?this \<longleftrightarrow> f abs_summable_on M"
+      using fnn by (intro summable_on_cong) (auto simp: less_eq_complex_def cmod_def)
+    finally show \<dots> .
+  qed (auto simp: additive_def)
   also have \<open>\<dots> = infsum f M\<close>
     apply (rule infsum_cong)
-    using fnn
-    using cmod_eq_Re complex_is_Real_iff less_eq_complex_def by force
+    using fnn cmod_eq_Re complex_is_Real_iff less_eq_complex_def by force
   finally show ?thesis
     by (metis abs_of_nonneg infsum_def le_less_trans norm_ge_zero norm_infsum_bound norm_of_real not_le order_refl)
 qed
@@ -1965,8 +2557,8 @@
   have *: \<open>(\<lambda>x. nr x + ni x) summable_on A\<close>
     apply (rule summable_on_add) by auto
   show \<open>n summable_on A\<close>
-    apply (rule pos_summable_on)
-     apply (simp add: n_def)
+    apply (rule nonneg_bdd_above_summable_on)
+     apply (simp add: n_def; fail)
     apply (rule bdd_aboveI[where M=\<open>infsum (\<lambda>x. nr x + ni x) A\<close>])
     using * n_sum by (auto simp flip: infsum_finite simp: ni_def[abs_def] nr_def[abs_def] intro!: infsum_mono_neutral)
 next
@@ -1974,4 +2566,11 @@
     using abs_summable_summable by blast
 qed
 
+lemma summable_countable_complex:
+  fixes f :: \<open>'a \<Rightarrow> complex\<close>
+  assumes \<open>f summable_on A\<close>
+  shows \<open>countable {x\<in>A. f x \<noteq> 0}\<close>
+  using abs_summable_countable assms summable_on_iff_abs_summable_on_complex by blast
+
 end
+
--- a/src/HOL/BNF_Def.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/BNF_Def.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -139,7 +139,7 @@
 
 lemma pick_middlep:
   "(P OO Q) a c \<Longrightarrow> P a (pick_middlep P Q a c) \<and> Q (pick_middlep P Q a c) c"
-  unfolding pick_middlep_def apply(rule someI_ex) by auto
+  unfolding pick_middlep_def by (rule someI_ex) auto
 
 definition fstOp where
   "fstOp P Q ac = (fst ac, pick_middlep P Q (fst ac) (snd ac))"
--- a/src/HOL/Bali/Term.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Bali/Term.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -110,7 +110,7 @@
                | UNot     \<comment> \<open>{\tt !} logical complement\<close>
 
 \<comment> \<open>function codes for binary operations\<close>
-datatype binop = Mul     \<comment> \<open>{\tt * }   multiplication\<close>
+datatype binop = Mul     \<comment> \<open>{\tt *}   multiplication\<close>
                | Div     \<comment> \<open>{\tt /}   division\<close>
                | Mod     \<comment> \<open>{\tt \%}   remainder\<close>
                | Plus    \<comment> \<open>{\tt +}   addition\<close>
--- a/src/HOL/Computational_Algebra/Factorial_Ring.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Computational_Algebra/Factorial_Ring.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -920,7 +920,7 @@
  assumes "is_unit x"
  shows "infinite {n. x^n dvd y}"
 proof -
- from `is_unit x` have "is_unit (x^n)" for n
+ from \<open>is_unit x\<close> have "is_unit (x^n)" for n
    using is_unit_power_iff by auto
  hence "x^n dvd y" for n
    by auto
@@ -2181,7 +2181,7 @@
    using is_unit_power_iff by simp
  hence "p^k dvd x"
    by auto
- moreover from `is_unit p` have "p^k dvd p^multiplicity p x"
+ moreover from \<open>is_unit p\<close> have "p^k dvd p^multiplicity p x"
    using multiplicity_unit_left is_unit_power_iff by simp
  ultimately show ?thesis by simp
 next
@@ -2194,16 +2194,16 @@
    moreover have "p^k dvd x \<Longrightarrow> k = 0"
    proof (rule ccontr)
      assume "p^k dvd x" and "k \<noteq> 0"
-     with `p = 0` have "p^k = 0" by auto
-     with `p^k dvd x` have "0 dvd x" by auto
+     with \<open>p = 0\<close> have "p^k = 0" by auto
+     with \<open>p^k dvd x\<close> have "0 dvd x" by auto
      hence "x = 0" by auto
-     with `x \<noteq> 0` show False by auto
+     with \<open>x \<noteq> 0\<close> show False by auto
    qed
    ultimately show ?thesis
-     by (auto simp add: is_unit_power_iff `\<not> is_unit p`)
+     by (auto simp add: is_unit_power_iff \<open>\<not> is_unit p\<close>)
  next
    case False
-   with `x \<noteq> 0` `\<not> is_unit p` show ?thesis
+   with \<open>x \<noteq> 0\<close> \<open>\<not> is_unit p\<close> show ?thesis
      by (simp add: power_dvd_iff_le_multiplicity dvd_power_iff multiplicity_same_power)
  qed
 qed
--- a/src/HOL/Deriv.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Deriv.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -1611,7 +1611,7 @@
     and derf: "\<And>x. \<lbrakk>a < x; x < b\<rbrakk> \<Longrightarrow> (f has_derivative f' x) (at x)"
   obtains \<xi> where "a < \<xi>" "\<xi> < b" "f b - f a = (f' \<xi>) (b - a)"
 proof -
-  have "\<exists>x. a < x \<and> x < b \<and> (\<lambda>y. f' x y - (f b - f a) / (b - a) * y) = (\<lambda>v. 0)"
+  have "\<exists>\<xi>. a < \<xi> \<and> \<xi> < b \<and> (\<lambda>y. f' \<xi> y - (f b - f a) / (b - a) * y) = (\<lambda>v. 0)"
   proof (intro Rolle_deriv[OF \<open>a < b\<close>])
     fix x
     assume x: "a < x" "x < b"
@@ -1619,12 +1619,8 @@
           has_derivative (\<lambda>y. f' x y - (f b - f a) / (b - a) * y)) (at x)"
       by (intro derivative_intros derf[OF x])
   qed (use assms in \<open>auto intro!: continuous_intros simp: field_simps\<close>)
-  then obtain \<xi> where
-    "a < \<xi>" "\<xi> < b" "(\<lambda>y. f' \<xi> y - (f b - f a) / (b - a) * y) = (\<lambda>v. 0)" 
-    by metis
   then show ?thesis
-    by (metis (no_types, opaque_lifting) that add.right_neutral add_diff_cancel_left' add_diff_eq \<open>a < b\<close>
-                 less_irrefl nonzero_eq_divide_eq)
+    by (smt (verit, ccfv_SIG) pos_le_divide_eq pos_less_divide_eq that)
 qed
 
 theorem MVT:
--- a/src/HOL/Eisbach/match_method.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Eisbach/match_method.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -95,7 +95,7 @@
         else
           let val b = #1 (the opt_dyn)
           in error ("Cannot bind fact name in term match" ^ Position.here (pos_of b)) end)) --
-  Scan.lift (for_fixes -- (\<^keyword>\<open>\<Rightarrow>\<close> |-- Parse.token Parse.text))
+  Scan.lift (for_fixes -- (\<^keyword>\<open>\<Rightarrow>\<close> |-- Parse.token Parse.embedded))
   >> (fn ((ctxt, ts), (fixes, body)) =>
     (case Token.get_value body of
       SOME (Token.Source src) =>
--- a/src/HOL/Equiv_Relations.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Equiv_Relations.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -368,7 +368,7 @@
   have eqv: "equiv S ?r"
     unfolding relation_of_def by (auto intro: comp_equivI)
   have finite: "C \<in> S//?r \<Longrightarrow> finite C" for C
-    by (fact finite_equiv_class[OF `finite S` equiv_type[OF `equiv S ?r`]])
+    by (fact finite_equiv_class[OF \<open>finite S\<close> equiv_type[OF \<open>equiv S ?r\<close>]])
   have disjoint: "A \<in> S//?r \<Longrightarrow> B \<in> S//?r \<Longrightarrow> A \<noteq> B \<Longrightarrow> A \<inter> B = {}" for A B
     using eqv quotient_disj by blast
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Examples/Rewrite_Examples.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -0,0 +1,301 @@
+theory Rewrite_Examples
+imports Main "HOL-Library.Rewrite"
+begin
+
+section \<open>The rewrite Proof Method by Example\<close>
+
+text\<open>
+This theory gives an overview over the features of the pattern-based rewrite proof method.
+
+Documentation: @{url "https://arxiv.org/abs/2111.04082"}
+\<close>
+
+lemma
+  fixes a::int and b::int and c::int
+  assumes "P (b + a)"
+  shows "P (a + b)"
+by (rewrite at "a + b" add.commute)
+   (rule assms)
+
+(* Selecting a specific subterm in a large, ambiguous term. *)
+lemma
+  fixes a b c :: int
+  assumes "f (a - a + (a - a)) + f (   0    + c) = f 0 + f c"
+  shows   "f (a - a + (a - a)) + f ((a - a) + c) = f 0 + f c"
+  by (rewrite in "f _ + f \<hole> = _" diff_self) fact
+
+lemma
+  fixes a b c :: int
+  assumes "f (a - a +    0   ) + f ((a - a) + c) = f 0 + f c"
+  shows   "f (a - a + (a - a)) + f ((a - a) + c) = f 0 + f c"
+  by (rewrite at "f (_ + \<hole>) + f _ = _" diff_self) fact
+
+lemma
+  fixes a b c :: int
+  assumes "f (  0   + (a - a)) + f ((a - a) + c) = f 0 + f c"
+  shows   "f (a - a + (a - a)) + f ((a - a) + c) = f 0 + f c"
+  by (rewrite in "f (\<hole> + _) + _ = _" diff_self) fact
+
+lemma
+  fixes a b c :: int
+  assumes "f (a - a +    0   ) + f ((a - a) + c) = f 0 + f c"
+  shows   "f (a - a + (a - a)) + f ((a - a) + c) = f 0 + f c"
+  by (rewrite in "f (_ + \<hole>) + _ = _" diff_self) fact
+
+lemma
+  fixes x y :: nat
+  shows"x + y > c \<Longrightarrow> y + x > c"
+  by (rewrite at "\<hole> > c" add.commute) assumption
+
+(* We can also rewrite in the assumptions.  *)
+lemma
+  fixes x y :: nat
+  assumes "y + x > c \<Longrightarrow> y + x > c"
+  shows   "x + y > c \<Longrightarrow> y + x > c"
+  by (rewrite in asm add.commute) fact
+
+lemma
+  fixes x y :: nat
+  assumes "y + x > c \<Longrightarrow> y + x > c"
+  shows   "x + y > c \<Longrightarrow> y + x > c"
+  by (rewrite in "x + y > c" at asm add.commute) fact
+
+lemma
+  fixes x y :: nat
+  assumes "y + x > c \<Longrightarrow> y + x > c"
+  shows   "x + y > c \<Longrightarrow> y + x > c"
+  by (rewrite at "\<hole> > c" at asm  add.commute) fact
+
+lemma
+  assumes "P {x::int. y + 1 = 1 + x}"
+  shows   "P {x::int. y + 1 = x + 1}"
+  by (rewrite at "x+1" in "{x::int. \<hole> }" add.commute) fact
+
+lemma
+  assumes "P {x::int. y + 1 = 1 + x}"
+  shows   "P {x::int. y + 1 = x + 1}"
+  by (rewrite at "any_identifier_will_work+1" in "{any_identifier_will_work::int. \<hole> }" add.commute)
+   fact
+
+lemma
+  assumes "P {(x::nat, y::nat, z). x + z * 3 = Q (\<lambda>s t. s * t + y - 3)}"
+  shows   "P {(x::nat, y::nat, z). x + z * 3 = Q (\<lambda>s t. y + s * t - 3)}"
+  by (rewrite at "b + d * e" in "\<lambda>(a, b, c). _ = Q (\<lambda>d e. \<hole>)" add.commute) fact
+
+(* This is not limited to the first assumption *)
+lemma
+  assumes "PROP P \<equiv> PROP Q"
+  shows "PROP R \<Longrightarrow> PROP P \<Longrightarrow> PROP Q"
+    by (rewrite at asm assms)
+
+lemma
+  assumes "PROP P \<equiv> PROP Q"
+  shows "PROP R \<Longrightarrow> PROP R \<Longrightarrow> PROP P \<Longrightarrow> PROP Q"
+    by (rewrite at asm assms)
+
+(* Rewriting "at asm" selects each full assumption, not any parts *)
+lemma
+  assumes "(PROP P \<Longrightarrow> PROP Q) \<equiv> (PROP S \<Longrightarrow> PROP R)"
+  shows "PROP S \<Longrightarrow> (PROP P \<Longrightarrow> PROP Q) \<Longrightarrow> PROP R"
+  apply (rewrite at asm assms)
+  apply assumption
+  done
+
+
+
+(* Rewriting with conditional rewriting rules works just as well. *)
+lemma test_theorem:
+  fixes x :: nat
+  shows "x \<le> y \<Longrightarrow> x \<ge> y \<Longrightarrow> x = y"
+  by (rule Orderings.order_antisym)
+
+(* Premises of the conditional rule yield new subgoals. The
+   assumptions of the goal are propagated into these subgoals
+*)
+lemma
+  fixes f :: "nat \<Rightarrow> nat"
+  shows "f x \<le> 0 \<Longrightarrow> f x \<ge> 0 \<Longrightarrow> f x = 0"
+  apply (rewrite at "f x" to "0" test_theorem)
+  apply assumption
+  apply assumption
+  apply (rule refl)
+  done
+
+(* This holds also for rewriting in assumptions. The order of assumptions is preserved *)
+lemma
+  assumes rewr: "PROP P \<Longrightarrow> PROP Q \<Longrightarrow> PROP R \<equiv> PROP R'"
+  assumes A1: "PROP S \<Longrightarrow> PROP T \<Longrightarrow> PROP U \<Longrightarrow> PROP P"
+  assumes A2: "PROP S \<Longrightarrow> PROP T \<Longrightarrow> PROP U \<Longrightarrow> PROP Q"
+  assumes C: "PROP S \<Longrightarrow> PROP R' \<Longrightarrow> PROP T \<Longrightarrow> PROP U \<Longrightarrow> PROP V"
+  shows "PROP S \<Longrightarrow> PROP R \<Longrightarrow> PROP T \<Longrightarrow> PROP U \<Longrightarrow> PROP V"
+  apply (rewrite at asm rewr)
+  apply (fact A1)
+  apply (fact A2)
+  apply (fact C)
+  done
+
+
+(*
+   Instantiation.
+
+   Since all rewriting is now done via conversions,
+   instantiation becomes fairly easy to do.
+*)
+
+(* We first introduce a function f and an extended
+   version of f that is annotated with an invariant. *)
+fun f :: "nat \<Rightarrow> nat" where "f n = n"
+definition "f_inv (I :: nat \<Rightarrow> bool) n \<equiv> f n"
+
+lemma annotate_f: "f = f_inv I"
+  by (simp add: f_inv_def fun_eq_iff)
+
+(* We have a lemma with a bound variable n, and
+   want to add an invariant to f. *)
+lemma
+  assumes "P (\<lambda>n. f_inv (\<lambda>_. True) n + 1) = x"
+  shows "P (\<lambda>n. f n + 1) = x"
+  by (rewrite to "f_inv (\<lambda>_. True)" annotate_f) fact
+
+(* We can also add an invariant that contains the variable n bound in the outer context.
+   For this, we need to bind this variable to an identifier. *)
+lemma
+  assumes "P (\<lambda>n. f_inv (\<lambda>x. n < x + 1) n + 1) = x"
+  shows "P (\<lambda>n. f n + 1) = x"
+  by (rewrite in "\<lambda>n. \<hole>" to "f_inv (\<lambda>x. n < x + 1)" annotate_f) fact
+
+(* Any identifier will work *)
+lemma
+  assumes "P (\<lambda>n. f_inv (\<lambda>x. n < x + 1) n + 1) = x"
+  shows "P (\<lambda>n. f n + 1) = x"
+  by (rewrite in "\<lambda>abc. \<hole>" to "f_inv (\<lambda>x. abc < x + 1)" annotate_f) fact
+
+(* The "for" keyword. *)
+lemma
+  assumes "P (2 + 1)"
+  shows "\<And>x y. P (1 + 2 :: nat)"
+by (rewrite in "P (1 + 2)" at for (x) add.commute) fact
+
+lemma
+  assumes "\<And>x y. P (y + x)"
+  shows "\<And>x y. P (x + y :: nat)"
+by (rewrite in "P (x + _)" at for (x y) add.commute) fact
+
+lemma
+  assumes "\<And>x y z. y + x + z = z + y + (x::int)"
+  shows   "\<And>x y z. x + y + z = z + y + (x::int)"
+by (rewrite at "x + y" in "x + y + z" in for (x y z) add.commute) fact
+
+lemma
+  assumes "\<And>x y z. z + (x + y) = z + y + (x::int)"
+  shows   "\<And>x y z. x + y + z = z + y + (x::int)"
+by (rewrite at "(_ + y) + z" in for (y z) add.commute) fact
+
+lemma
+  assumes "\<And>x y z. x + y + z = y + z + (x::int)"
+  shows   "\<And>x y z. x + y + z = z + y + (x::int)"
+by (rewrite at "\<hole> + _" at "_ = \<hole>" in for () add.commute) fact
+
+lemma
+  assumes eq: "\<And>x. P x \<Longrightarrow> g x = x"
+  assumes f1: "\<And>x. Q x \<Longrightarrow> P x"
+  assumes f2: "\<And>x. Q x \<Longrightarrow> x"
+  shows "\<And>x. Q x \<Longrightarrow> g x"
+  apply (rewrite at "g x" in for (x) eq)
+  apply (fact f1)
+  apply (fact f2)
+  done
+
+(* The for keyword can be used anywhere in the pattern where there is an \<And>-Quantifier. *)
+lemma
+  assumes "(\<And>(x::int). x < 1 + x)"
+  and     "(x::int) + 1 > x"
+  shows   "(\<And>(x::int). x + 1 > x) \<Longrightarrow> (x::int) + 1 > x"
+by (rewrite at "x + 1" in for (x) at asm add.commute)
+   (rule assms)
+
+(* The rewrite method also has an ML interface *)
+lemma
+  assumes "\<And>a b. P ((a + 1) * (1 + b)) "
+  shows "\<And>a b :: nat. P ((a + 1) * (b + 1))"
+  apply (tactic \<open>
+    let
+      val (x, ctxt) = yield_singleton Variable.add_fixes "x" \<^context>
+      (* Note that the pattern order is reversed *)
+      val pat = [
+        Rewrite.For [(x, SOME \<^Type>\<open>nat\<close>)],
+        Rewrite.In,
+        Rewrite.Term (\<^Const>\<open>plus \<^Type>\<open>nat\<close> for \<open>Free (x, \<^Type>\<open>nat\<close>)\<close> \<^term>\<open>1 :: nat\<close>\<close>, [])]
+      val to = NONE
+    in CCONVERSION (Rewrite.rewrite_conv ctxt (pat, to) @{thms add.commute}) 1 end
+  \<close>)
+  apply (fact assms)
+  done
+
+lemma
+  assumes "Q (\<lambda>b :: int. P (\<lambda>a. a + b) (\<lambda>a. a + b))"
+  shows "Q (\<lambda>b :: int. P (\<lambda>a. a + b) (\<lambda>a. b + a))"
+  apply (tactic \<open>
+    let
+      val (x, ctxt) = yield_singleton Variable.add_fixes "x" \<^context>
+      val pat = [
+        Rewrite.Concl,
+        Rewrite.In,
+        Rewrite.Term (Free ("Q", (\<^Type>\<open>int\<close> --> TVar (("'b",0), [])) --> \<^Type>\<open>bool\<close>)
+          $ Abs ("x", \<^Type>\<open>int\<close>, Rewrite.mk_hole 1 (\<^Type>\<open>int\<close> --> TVar (("'b",0), [])) $ Bound 0), [(x, \<^Type>\<open>int\<close>)]),
+        Rewrite.In,
+        Rewrite.Term (\<^Const>\<open>plus \<^Type>\<open>int\<close> for \<open>Free (x, \<^Type>\<open>int\<close>)\<close> \<open>Var (("c", 0), \<^Type>\<open>int\<close>)\<close>\<close>, [])
+        ]
+      val to = NONE
+    in CCONVERSION (Rewrite.rewrite_conv ctxt (pat, to) @{thms add.commute}) 1 end
+  \<close>)
+  apply (fact assms)
+  done
+
+(* There is also conversion-like rewrite function: *)
+ML \<open>
+  val ct = \<^cprop>\<open>Q (\<lambda>b :: int. P (\<lambda>a. a + b) (\<lambda>a. b + a))\<close>
+  val (x, ctxt) = yield_singleton Variable.add_fixes "x" \<^context>
+  val pat = [
+    Rewrite.Concl,
+    Rewrite.In,
+    Rewrite.Term (Free ("Q", (\<^typ>\<open>int\<close> --> TVar (("'b",0), [])) --> \<^typ>\<open>bool\<close>)
+      $ Abs ("x", \<^typ>\<open>int\<close>, Rewrite.mk_hole 1 (\<^typ>\<open>int\<close> --> TVar (("'b",0), [])) $ Bound 0), [(x, \<^typ>\<open>int\<close>)]),
+    Rewrite.In,
+    Rewrite.Term (\<^Const>\<open>plus \<^Type>\<open>int\<close> for \<open>Free (x, \<^Type>\<open>int\<close>)\<close> \<open>Var (("c", 0), \<^Type>\<open>int\<close>)\<close>\<close>, [])
+    ]
+  val to = NONE
+  val th = Rewrite.rewrite_conv ctxt (pat, to) @{thms add.commute} ct
+\<close>
+
+text \<open>Some regression tests\<close>
+
+ML \<open>
+  val ct = \<^cterm>\<open>(\<lambda>b :: int. (\<lambda>a. b + a))\<close>
+  val (x, ctxt) = yield_singleton Variable.add_fixes "x" \<^context>
+  val pat = [
+    Rewrite.In,
+    Rewrite.Term (\<^Const>\<open>plus \<^Type>\<open>int\<close> for \<open>Var (("c", 0), \<^Type>\<open>int\<close>)\<close> \<open>Var (("c", 0), \<^Type>\<open>int\<close>)\<close>\<close>, [])
+    ]
+  val to = NONE
+  val _ =
+    case try (Rewrite.rewrite_conv ctxt (pat, to) @{thms add.commute}) ct of
+      NONE => ()
+    | _ => error "should not have matched anything"
+\<close>
+
+ML \<open>
+  Rewrite.params_pconv (Conv.all_conv |> K |> K) \<^context> (Vartab.empty, []) \<^cterm>\<open>\<And>x. PROP A\<close>
+\<close>
+
+lemma
+  assumes eq: "PROP A \<Longrightarrow> PROP B \<equiv> PROP C"
+  assumes f1: "PROP D \<Longrightarrow> PROP A"
+  assumes f2: "PROP D \<Longrightarrow> PROP C"
+  shows "\<And>x. PROP D \<Longrightarrow> PROP B"
+  apply (rewrite eq)
+  apply (fact f1)
+  apply (fact f2)
+  done
+
+end
--- a/src/HOL/Examples/document/root.tex	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Examples/document/root.tex	Fri Dec 10 08:58:09 2021 +0100
@@ -1,7 +1,7 @@
 \documentclass[11pt,a4paper]{article}
 \usepackage[T1]{fontenc}
 \usepackage[only,bigsqcap]{stmaryrd}
-\usepackage{ifthen,proof,amssymb,isabelle,isabellesym}
+\usepackage{ifthen,proof,amssymb,isabelle,isabellesym,wasysym}
 
 \isabellestyle{literal}
 \usepackage{pdfsetup}\urlstyle{rm}
--- a/src/HOL/Library/Multiset.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Library/Multiset.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -4,12 +4,13 @@
     Author:     Jasmin Blanchette, Inria, LORIA, MPII
     Author:     Dmitriy Traytel, TU Muenchen
     Author:     Mathias Fleury, MPII
+    Author:     Martin Desharnais, MPI-INF Saarbruecken
 *)
 
 section \<open>(Finite) Multisets\<close>
 
 theory Multiset
-imports Cancellation
+  imports Cancellation
 begin
 
 subsection \<open>The type of multisets\<close>
@@ -2788,8 +2789,6 @@
 
 subsection \<open>The multiset order\<close>
 
-subsubsection \<open>Well-foundedness\<close>
-
 definition mult1 :: "('a \<times> 'a) set \<Rightarrow> ('a multiset \<times> 'a multiset) set" where
   "mult1 r = {(N, M). \<exists>a M0 K. M = add_mset a M0 \<and> N = M0 + K \<and>
       (\<forall>b. b \<in># K \<longrightarrow> (b, a) \<in> r)}"
@@ -2797,6 +2796,9 @@
 definition mult :: "('a \<times> 'a) set \<Rightarrow> ('a multiset \<times> 'a multiset) set" where
   "mult r = (mult1 r)\<^sup>+"
 
+definition multp :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" where
+  "multp r M N \<longleftrightarrow> (M, N) \<in> mult {(x, y). r x y}"
+
 lemma mult1I:
   assumes "M = add_mset a M0" and "N = M0 + K" and "\<And>b. b \<in># K \<Longrightarrow> (b, a) \<in> r"
   shows "(N, M) \<in> mult1 r"
@@ -2809,14 +2811,29 @@
 
 lemma mono_mult1:
   assumes "r \<subseteq> r'" shows "mult1 r \<subseteq> mult1 r'"
-unfolding mult1_def using assms by blast
+  unfolding mult1_def using assms by blast
 
 lemma mono_mult:
   assumes "r \<subseteq> r'" shows "mult r \<subseteq> mult r'"
-unfolding mult_def using mono_mult1[OF assms] trancl_mono by blast
+  unfolding mult_def using mono_mult1[OF assms] trancl_mono by blast
+
+lemma mono_multp[mono]: "r \<le> r' \<Longrightarrow> multp r \<le> multp r'"
+  unfolding le_fun_def le_bool_def
+proof (intro allI impI)
+  fix M N :: "'a multiset"
+  assume "\<forall>x xa. r x xa \<longrightarrow> r' x xa"
+  hence "{(x, y). r x y} \<subseteq> {(x, y). r' x y}"
+    by blast
+  thus "multp r M N \<Longrightarrow> multp r' M N"
+    unfolding multp_def
+    by (fact mono_mult[THEN subsetD, rotated])
+qed
 
 lemma not_less_empty [iff]: "(M, {#}) \<notin> mult1 r"
-by (simp add: mult1_def)
+  by (simp add: mult1_def)
+
+
+subsubsection \<open>Well-foundedness\<close>
 
 lemma less_add:
   assumes mult1: "(N, add_mset a M0) \<in> mult1 r"
@@ -2918,11 +2935,15 @@
   qed
 qed
 
-theorem wf_mult1: "wf r \<Longrightarrow> wf (mult1 r)"
-by (rule acc_wfI) (rule all_accessible)
-
-theorem wf_mult: "wf r \<Longrightarrow> wf (mult r)"
-unfolding mult_def by (rule wf_trancl) (rule wf_mult1)
+lemma wf_mult1: "wf r \<Longrightarrow> wf (mult1 r)"
+  by (rule acc_wfI) (rule all_accessible)
+
+lemma wf_mult: "wf r \<Longrightarrow> wf (mult r)"
+  unfolding mult_def by (rule wf_trancl) (rule wf_mult1)
+
+lemma wfP_multp: "wfP r \<Longrightarrow> wfP (multp r)"
+  unfolding multp_def wfP_def
+  by (simp add: wf_mult)
 
 
 subsubsection \<open>Closure-free presentation\<close>
@@ -2965,6 +2986,9 @@
   qed
 qed
 
+lemmas multp_implies_one_step =
+  mult_implies_one_step[of "{(x, y). r x y}" for r, folded multp_def transp_trans, simplified]
+
 lemma one_step_implies_mult:
   assumes
     "J \<noteq> {#}" and
@@ -2997,6 +3021,9 @@
   qed
 qed
 
+lemmas one_step_implies_multp =
+  one_step_implies_mult[of _ _ "{(x, y). r x y}" for r, folded multp_def, simplified]
+
 lemma subset_implies_mult:
   assumes sub: "A \<subset># B"
   shows "(A, B) \<in> mult r"
@@ -3009,8 +3036,10 @@
     by (rule one_step_implies_mult[of "B - A" "{#}" _ A, unfolded ApBmA, simplified])
 qed
 
-
-subsection \<open>The multiset extension is cancellative for multiset union\<close>
+lemmas subset_implies_multp = subset_implies_mult[of _ _ "{(x, y). r x y}" for r, folded multp_def]
+
+
+subsubsection \<open>The multiset extension is cancellative for multiset union\<close>
 
 lemma mult_cancel:
   assumes "trans s" and "irrefl s"
@@ -3045,10 +3074,18 @@
   thus ?L using one_step_implies_mult[of J K s "I + Z"] by (auto simp: ac_simps)
 qed
 
+lemmas multp_cancel =
+  mult_cancel[of "{(x, y). r x y}" for r,
+    folded multp_def transp_trans irreflp_irrefl_eq, simplified]
+
 lemmas mult_cancel_add_mset =
   mult_cancel[of _ _ "{#_#}", unfolded union_mset_add_mset_right add.comm_neutral]
 
-lemma mult_cancel_max:
+lemmas multp_cancel_add_mset =
+  mult_cancel_add_mset[of "{(x, y). r x y}" for r,
+    folded multp_def transp_trans irreflp_irrefl_eq, simplified]
+
+lemma mult_cancel_max0:
   assumes "trans s" and "irrefl s"
   shows "(X, Y) \<in> mult s \<longleftrightarrow> (X - X \<inter># Y, Y - X \<inter># Y) \<in> mult s" (is "?L \<longleftrightarrow> ?R")
 proof -
@@ -3056,6 +3093,112 @@
   thus ?thesis using mult_cancel[OF assms, of "X - X \<inter># Y"  "X \<inter># Y" "Y - X \<inter># Y"] by auto
 qed
 
+lemmas mult_cancel_max = mult_cancel_max0[simplified]
+
+lemmas multp_cancel_max =
+  mult_cancel_max[of "{(x, y). r x y}" for r,
+    folded multp_def transp_trans irreflp_irrefl_eq, simplified]
+
+
+subsubsection \<open>Partial-order properties\<close>
+
+lemma mult1_lessE:
+  assumes "(N, M) \<in> mult1 {(a, b). r a b}" and "asymp r"
+  obtains a M0 K where "M = add_mset a M0" "N = M0 + K"
+    "a \<notin># K" "\<And>b. b \<in># K \<Longrightarrow> r b a"
+proof -
+  from assms obtain a M0 K where "M = add_mset a M0" "N = M0 + K" and
+    *: "b \<in># K \<Longrightarrow> r b a" for b by (blast elim: mult1E)
+  moreover from * [of a] have "a \<notin># K"
+    using \<open>asymp r\<close> by (meson asymp.cases)
+  ultimately show thesis by (auto intro: that)
+qed
+
+lemma trans_mult: "trans r \<Longrightarrow> trans (mult r)"
+  by (simp add: mult_def)
+
+lemma transp_multp: "transp r \<Longrightarrow> transp (multp r)"
+  unfolding multp_def transp_trans_eq
+  by (fact trans_mult[of "{(x, y). r x y}" for r, folded transp_trans])
+
+lemma irrefl_mult:
+  assumes "trans r" "irrefl r"
+  shows "irrefl (mult r)"
+proof (intro irreflI notI)
+  fix M
+  assume "(M, M) \<in> mult r"
+  then obtain I J K where "M = I + J" and "M = I + K"
+    and "J \<noteq> {#}" and "(\<forall>k\<in>set_mset K. \<exists>j\<in>set_mset J. (k, j) \<in> r)"
+    using mult_implies_one_step[OF \<open>trans r\<close>] by blast
+  then have *: "K \<noteq> {#}" and **: "\<forall>k\<in>set_mset K. \<exists>j\<in>set_mset K. (k, j) \<in> r" by auto
+  have "finite (set_mset K)" by simp
+  hence "set_mset K = {}"
+    using **
+  proof (induction rule: finite_induct)
+    case empty
+    thus ?case by simp
+  next
+    case (insert x F)
+    have False
+      using \<open>irrefl r\<close>[unfolded irrefl_def, rule_format]
+      using \<open>trans r\<close>[THEN transD]
+      by (metis equals0D insert.IH insert.prems insertE insertI1 insertI2)
+    thus ?case ..
+  qed
+  with * show False by simp
+qed
+
+lemmas irreflp_multp =
+  irrefl_mult[of "{(x, y). r x y}" for r,
+    folded transp_trans_eq irreflp_irrefl_eq, simplified, folded multp_def]
+
+instantiation multiset :: (preorder) order begin
+
+definition less_multiset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool"
+  where "M < N \<longleftrightarrow> multp (<) M N"
+
+definition less_eq_multiset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool"
+  where "less_eq_multiset M N \<longleftrightarrow> M < N \<or> M = N"
+
+instance
+proof intro_classes
+  fix M N :: "'a multiset"
+  show "(M < N) = (M \<le> N \<and> \<not> N \<le> M)"
+    unfolding less_eq_multiset_def less_multiset_def
+    by (metis irreflp_def irreflp_less irreflp_multp transpE transp_less transp_multp)
+next
+  fix M :: "'a multiset"
+  show "M \<le> M"
+    unfolding less_eq_multiset_def
+    by simp
+next
+  fix M1 M2 M3 :: "'a multiset"
+  show "M1 \<le> M2 \<Longrightarrow> M2 \<le> M3 \<Longrightarrow> M1 \<le> M3"
+    unfolding less_eq_multiset_def less_multiset_def
+    using transp_multp[OF transp_less, THEN transpD]
+    by blast
+next
+  fix M N :: "'a multiset"
+  show "M \<le> N \<Longrightarrow> N \<le> M \<Longrightarrow> M = N"
+    unfolding less_eq_multiset_def less_multiset_def
+    using transp_multp[OF transp_less, THEN transpD]
+    using irreflp_multp[OF transp_less irreflp_less, unfolded irreflp_def, rule_format]
+    by blast
+qed
+
+end
+
+lemma mset_le_irrefl [elim!]:
+  fixes M :: "'a::preorder multiset"
+  shows "M < M \<Longrightarrow> R"
+  by simp
+
+lemma wfP_less_multiset[simp]:
+  assumes wfP_less: "wfP ((<) :: ('a :: preorder) \<Rightarrow> 'a \<Rightarrow> bool)"
+  shows "wfP ((<) :: 'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool)"
+  using wfP_multp[OF wfP_less] less_multiset_def
+  by (metis wfPUNIVI wfP_induct)
+
 
 subsection \<open>Quasi-executable version of the multiset extension\<close>
 
@@ -3063,22 +3206,22 @@
   Predicate variants of \<open>mult\<close> and the reflexive closure of \<open>mult\<close>, which are
   executable whenever the given predicate \<open>P\<close> is. Together with the standard
   code equations for \<open>(\<inter>#\<close>) and \<open>(-\<close>) this should yield quadratic
-  (with respect to calls to \<open>P\<close>) implementations of \<open>multp\<close> and \<open>multeqp\<close>.
+  (with respect to calls to \<open>P\<close>) implementations of \<open>multp_code\<close> and \<open>multeqp_code\<close>.
 \<close>
 
-definition multp :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" where
-  "multp P N M =
+definition multp_code :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" where
+  "multp_code P N M =
     (let Z = M \<inter># N; X = M - Z in
     X \<noteq> {#} \<and> (let Y = N - Z in (\<forall>y \<in> set_mset Y. \<exists>x \<in> set_mset X. P y x)))"
 
-definition multeqp :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" where
-  "multeqp P N M =
+definition multeqp_code :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" where
+  "multeqp_code P N M =
     (let Z = M \<inter># N; X = M - Z; Y = N - Z in
     (\<forall>y \<in> set_mset Y. \<exists>x \<in> set_mset X. P y x))"
 
-lemma multp_iff:
+lemma multp_code_iff_mult:
   assumes "irrefl R" and "trans R" and [simp]: "\<And>x y. P x y \<longleftrightarrow> (x, y) \<in> R"
-  shows "multp P N M \<longleftrightarrow> (N, M) \<in> mult R" (is "?L \<longleftrightarrow> ?R")
+  shows "multp_code P N M \<longleftrightarrow> (N, M) \<in> mult R" (is "?L \<longleftrightarrow> ?R")
 proof -
   have *: "M \<inter># N + (N - M \<inter># N) = N" "M \<inter># N + (M - M \<inter># N) = M"
     "(M - M \<inter># N) \<inter># (N - M \<inter># N) = {#}" by (auto simp flip: count_inject)
@@ -3086,87 +3229,40 @@
   proof
     assume ?L thus ?R
       using one_step_implies_mult[of "M - M \<inter># N" "N - M \<inter># N" R "M \<inter># N"] *
-      by (auto simp: multp_def Let_def)
+      by (auto simp: multp_code_def Let_def)
   next
     { fix I J K :: "'a multiset" assume "(I + J) \<inter># (I + K) = {#}"
       then have "I = {#}" by (metis inter_union_distrib_right union_eq_empty)
     } note [dest!] = this
     assume ?R thus ?L
       using mult_implies_one_step[OF assms(2), of "N - M \<inter># N" "M - M \<inter># N"]
-        mult_cancel_max[OF assms(2,1), of "N" "M"] * by (auto simp: multp_def)
+        mult_cancel_max[OF assms(2,1), of "N" "M"] * by (auto simp: multp_code_def)
   qed
 qed
 
-lemma multeqp_iff:
+lemma multp_code_eq_multp: "irreflp r \<Longrightarrow> transp r \<Longrightarrow> multp_code r = multp r"
+  using multp_code_iff_mult[of "{(x, y). r x y}" r for r,
+    folded irreflp_irrefl_eq transp_trans multp_def, simplified]
+  by blast
+
+lemma multeqp_code_iff_reflcl_mult:
   assumes "irrefl R" and "trans R" and "\<And>x y. P x y \<longleftrightarrow> (x, y) \<in> R"
-  shows "multeqp P N M \<longleftrightarrow> (N, M) \<in> (mult R)\<^sup>="
+  shows "multeqp_code P N M \<longleftrightarrow> (N, M) \<in> (mult R)\<^sup>="
 proof -
   { assume "N \<noteq> M" "M - M \<inter># N = {#}"
     then obtain y where "count N y \<noteq> count M y" by (auto simp flip: count_inject)
     then have "\<exists>y. count M y < count N y" using \<open>M - M \<inter># N = {#}\<close>
       by (auto simp flip: count_inject dest!: le_neq_implies_less fun_cong[of _ _ y])
   }
-  then have "multeqp P N M \<longleftrightarrow> multp P N M \<or> N = M"
-    by (auto simp: multeqp_def multp_def Let_def in_diff_count)
-  thus ?thesis using multp_iff[OF assms] by simp
-qed
-
-
-subsubsection \<open>Partial-order properties\<close>
-
-lemma (in preorder) mult1_lessE:
-  assumes "(N, M) \<in> mult1 {(a, b). a < b}"
-  obtains a M0 K where "M = add_mset a M0" "N = M0 + K"
-    "a \<notin># K" "\<And>b. b \<in># K \<Longrightarrow> b < a"
-proof -
-  from assms obtain a M0 K where "M = add_mset a M0" "N = M0 + K" and
-    *: "b \<in># K \<Longrightarrow> b < a" for b by (blast elim: mult1E)
-  moreover from * [of a] have "a \<notin># K" by auto
-  ultimately show thesis by (auto intro: that)
+  then have "multeqp_code P N M \<longleftrightarrow> multp_code P N M \<or> N = M"
+    by (auto simp: multeqp_code_def multp_code_def Let_def in_diff_count)
+  thus ?thesis using multp_code_iff_mult[OF assms] by simp
 qed
 
-instantiation multiset :: (preorder) order
-begin
-
-definition less_multiset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool"
-  where "M' < M \<longleftrightarrow> (M', M) \<in> mult {(x', x). x' < x}"
-
-definition less_eq_multiset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool"
-  where "less_eq_multiset M' M \<longleftrightarrow> M' < M \<or> M' = M"
-
-instance
-proof -
-  have irrefl: "\<not> M < M" for M :: "'a multiset"
-  proof
-    assume "M < M"
-    then have MM: "(M, M) \<in> mult {(x, y). x < y}" by (simp add: less_multiset_def)
-    have "trans {(x'::'a, x). x' < x}"
-      by (metis (mono_tags, lifting) case_prodD case_prodI less_trans mem_Collect_eq transI)
-    moreover note MM
-    ultimately have "\<exists>I J K. M = I + J \<and> M = I + K
-      \<and> J \<noteq> {#} \<and> (\<forall>k\<in>set_mset K. \<exists>j\<in>set_mset J. (k, j) \<in> {(x, y). x < y})"
-      by (rule mult_implies_one_step)
-    then obtain I J K where "M = I + J" and "M = I + K"
-      and "J \<noteq> {#}" and "(\<forall>k\<in>set_mset K. \<exists>j\<in>set_mset J. (k, j) \<in> {(x, y). x < y})" by blast
-    then have *: "K \<noteq> {#}" and **: "\<forall>k\<in>set_mset K. \<exists>j\<in>set_mset K. k < j" by auto
-    have "finite (set_mset K)" by simp
-    moreover note **
-    ultimately have "set_mset K = {}"
-      by (induct rule: finite_induct) (auto intro: order_less_trans)
-    with * show False by simp
-  qed
-  have trans: "K < M \<Longrightarrow> M < N \<Longrightarrow> K < N" for K M N :: "'a multiset"
-    unfolding less_multiset_def mult_def by (blast intro: trancl_trans)
-  show "OFCLASS('a multiset, order_class)"
-    by standard (auto simp add: less_eq_multiset_def irrefl dest: trans)
-qed
-
-end
-
-lemma mset_le_irrefl [elim!]:
-  fixes M :: "'a::preorder multiset"
-  shows "M < M \<Longrightarrow> R"
-  by simp
+lemma multeqp_code_eq_reflclp_multp: "irreflp r \<Longrightarrow> transp r \<Longrightarrow> multeqp_code r = (multp r)\<^sup>=\<^sup>="
+  using multeqp_code_iff_reflcl_mult[of "{(x, y). r x y}" r for r,
+    folded irreflp_irrefl_eq transp_trans, simplified, folded multp_def]
+  by blast
 
 
 subsubsection \<open>Monotonicity of multiset union\<close>
@@ -3175,7 +3271,7 @@
   by (force simp: mult1_def)
 
 lemma union_le_mono2: "B < D \<Longrightarrow> C + B < C + (D::'a::preorder multiset)"
-apply (unfold less_multiset_def mult_def)
+apply (unfold less_multiset_def multp_def mult_def)
 apply (erule trancl_induct)
  apply (blast intro: mult1_union)
 apply (blast intro: mult1_union trancl_trans)
--- a/src/HOL/Library/Multiset_Order.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Library/Multiset_Order.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -11,6 +11,135 @@
 
 subsection \<open>Alternative Characterizations\<close>
 
+subsubsection \<open>The Dershowitz--Manna Ordering\<close>
+
+definition multp\<^sub>D\<^sub>M where
+  "multp\<^sub>D\<^sub>M r M N \<longleftrightarrow>
+   (\<exists>X Y. X \<noteq> {#} \<and> X \<subseteq># N \<and> M = (N - X) + Y \<and> (\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> r k a)))"
+
+lemma multp\<^sub>D\<^sub>M_imp_multp:
+  "multp\<^sub>D\<^sub>M r M N \<Longrightarrow> multp r M N"
+proof -
+  assume "multp\<^sub>D\<^sub>M r M N"
+  then obtain X Y where
+    "X \<noteq> {#}" and "X \<subseteq># N" and "M = N - X + Y" and "\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> r k a)"
+    unfolding multp\<^sub>D\<^sub>M_def by blast
+  then have "multp r (N - X + Y) (N - X + X)"
+    by (intro one_step_implies_multp) (auto simp: Bex_def trans_def)
+  with \<open>M = N - X + Y\<close> \<open>X \<subseteq># N\<close> show "multp r M N"
+    by (metis subset_mset.diff_add)
+qed
+
+subsubsection \<open>The Huet--Oppen Ordering\<close>
+
+definition multp\<^sub>H\<^sub>O where
+  "multp\<^sub>H\<^sub>O r M N \<longleftrightarrow> M \<noteq> N \<and> (\<forall>y. count N y < count M y \<longrightarrow> (\<exists>x. r y x \<and> count M x < count N x))"
+
+lemma multp_imp_multp\<^sub>H\<^sub>O:
+  assumes "asymp r" and "transp r"
+  shows "multp r M N \<Longrightarrow> multp\<^sub>H\<^sub>O r M N"
+  unfolding multp_def mult_def
+proof (induction rule: trancl_induct)
+  case (base P)
+  then show ?case
+    using \<open>asymp r\<close>
+    by (auto elim!: mult1_lessE simp: count_eq_zero_iff multp\<^sub>H\<^sub>O_def split: if_splits
+        dest!: Suc_lessD)
+next
+  case (step N P)
+  from step(3) have "M \<noteq> N" and
+    **: "\<And>y. count N y < count M y \<Longrightarrow> (\<exists>x. r y x \<and> count M x < count N x)"
+    by (simp_all add: multp\<^sub>H\<^sub>O_def)
+  from step(2) obtain M0 a K where
+    *: "P = add_mset a M0" "N = M0 + K" "a \<notin># K" "\<And>b. b \<in># K \<Longrightarrow> r b a"
+    using \<open>asymp r\<close> by (auto elim: mult1_lessE)
+  from \<open>M \<noteq> N\<close> ** *(1,2,3) have "M \<noteq> P"
+    using *(4) \<open>asymp r\<close>
+    by (metis asymp.cases add_cancel_right_right add_diff_cancel_left' add_mset_add_single count_inI
+        count_union diff_diff_add_mset diff_single_trivial in_diff_count multi_member_last)
+  moreover
+  { assume "count P a \<le> count M a"
+    with \<open>a \<notin># K\<close> have "count N a < count M a" unfolding *(1,2)
+      by (auto simp add: not_in_iff)
+      with ** obtain z where z: "r a z" "count M z < count N z"
+        by blast
+      with * have "count N z \<le> count P z"
+        using \<open>asymp r\<close>
+        by (metis add_diff_cancel_left' add_mset_add_single asymp.cases diff_diff_add_mset
+            diff_single_trivial in_diff_count not_le_imp_less)
+      with z have "\<exists>z. r a z \<and> count M z < count P z" by auto
+  } note count_a = this
+  { fix y
+    assume count_y: "count P y < count M y"
+    have "\<exists>x. r y x \<and> count M x < count P x"
+    proof (cases "y = a")
+      case True
+      with count_y count_a show ?thesis by auto
+    next
+      case False
+      show ?thesis
+      proof (cases "y \<in># K")
+        case True
+        with *(4) have "r y a" by simp
+        then show ?thesis
+          by (cases "count P a \<le> count M a") (auto dest: count_a intro: \<open>transp r\<close>[THEN transpD])
+      next
+        case False
+        with \<open>y \<noteq> a\<close> have "count P y = count N y" unfolding *(1,2)
+          by (simp add: not_in_iff)
+        with count_y ** obtain z where z: "r y z" "count M z < count N z" by auto
+        show ?thesis
+        proof (cases "z \<in># K")
+          case True
+          with *(4) have "r z a" by simp
+          with z(1) show ?thesis
+            by (cases "count P a \<le> count M a") (auto dest!: count_a intro: \<open>transp r\<close>[THEN transpD])
+        next
+          case False
+          with \<open>a \<notin># K\<close> have "count N z \<le> count P z" unfolding *
+            by (auto simp add: not_in_iff)
+          with z show ?thesis by auto
+        qed
+      qed
+    qed
+  }
+  ultimately show ?case unfolding multp\<^sub>H\<^sub>O_def by blast
+qed
+
+lemma multp\<^sub>H\<^sub>O_imp_multp\<^sub>D\<^sub>M: "multp\<^sub>H\<^sub>O r M N \<Longrightarrow> multp\<^sub>D\<^sub>M r M N"
+unfolding multp\<^sub>D\<^sub>M_def
+proof (intro iffI exI conjI)
+  assume "multp\<^sub>H\<^sub>O r M N"
+  then obtain z where z: "count M z < count N z"
+    unfolding multp\<^sub>H\<^sub>O_def by (auto simp: multiset_eq_iff nat_neq_iff)
+  define X where "X = N - M"
+  define Y where "Y = M - N"
+  from z show "X \<noteq> {#}" unfolding X_def by (auto simp: multiset_eq_iff not_less_eq_eq Suc_le_eq)
+  from z show "X \<subseteq># N" unfolding X_def by auto
+  show "M = (N - X) + Y" unfolding X_def Y_def multiset_eq_iff count_union count_diff by force
+  show "\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> r k a)"
+  proof (intro allI impI)
+    fix k
+    assume "k \<in># Y"
+    then have "count N k < count M k" unfolding Y_def
+      by (auto simp add: in_diff_count)
+    with \<open>multp\<^sub>H\<^sub>O r M N\<close> obtain a where "r k a" and "count M a < count N a"
+      unfolding multp\<^sub>H\<^sub>O_def by blast
+    then show "\<exists>a. a \<in># X \<and> r k a" unfolding X_def
+      by (auto simp add: in_diff_count)
+  qed
+qed
+
+lemma multp_eq_multp\<^sub>D\<^sub>M: "asymp r \<Longrightarrow> transp r \<Longrightarrow> multp r = multp\<^sub>D\<^sub>M r"
+  using multp\<^sub>D\<^sub>M_imp_multp multp_imp_multp\<^sub>H\<^sub>O[THEN multp\<^sub>H\<^sub>O_imp_multp\<^sub>D\<^sub>M]
+  by blast
+
+lemma multp_eq_multp\<^sub>H\<^sub>O: "asymp r \<Longrightarrow> transp r \<Longrightarrow> multp r = multp\<^sub>H\<^sub>O r"
+  using multp\<^sub>H\<^sub>O_imp_multp\<^sub>D\<^sub>M[THEN multp\<^sub>D\<^sub>M_imp_multp] multp_imp_multp\<^sub>H\<^sub>O
+  by blast
+
+subsubsection \<open>Properties of Preorders\<close>
+
 context preorder
 begin
 
@@ -59,107 +188,29 @@
 
 lemma mult_imp_less_multiset\<^sub>H\<^sub>O:
   "(M, N) \<in> mult {(x, y). x < y} \<Longrightarrow> less_multiset\<^sub>H\<^sub>O M N"
-proof (unfold mult_def, induct rule: trancl_induct)
-  case (base P)
-  then show ?case
-    by (auto elim!: mult1_lessE simp add: count_eq_zero_iff less_multiset\<^sub>H\<^sub>O_def split: if_splits dest!: Suc_lessD)
-next
-  case (step N P)
-  from step(3) have "M \<noteq> N" and
-    **: "\<And>y. count N y < count M y \<Longrightarrow> (\<exists>x>y. count M x < count N x)"
-    by (simp_all add: less_multiset\<^sub>H\<^sub>O_def)
-  from step(2) obtain M0 a K where
-    *: "P = add_mset a M0" "N = M0 + K" "a \<notin># K" "\<And>b. b \<in># K \<Longrightarrow> b < a"
-    by (blast elim: mult1_lessE)
-  from \<open>M \<noteq> N\<close> ** *(1,2,3) have "M \<noteq> P" by (force dest: *(4) elim!: less_asym split: if_splits )
-  moreover
-  { assume "count P a \<le> count M a"
-    with \<open>a \<notin># K\<close> have "count N a < count M a" unfolding *(1,2)
-      by (auto simp add: not_in_iff)
-      with ** obtain z where z: "z > a" "count M z < count N z"
-        by blast
-      with * have "count N z \<le> count P z" 
-        by (auto elim: less_asym intro: count_inI)
-      with z have "\<exists>z > a. count M z < count P z" by auto
-  } note count_a = this
-  { fix y
-    assume count_y: "count P y < count M y"
-    have "\<exists>x>y. count M x < count P x"
-    proof (cases "y = a")
-      case True
-      with count_y count_a show ?thesis by auto
-    next
-      case False
-      show ?thesis
-      proof (cases "y \<in># K")
-        case True
-        with *(4) have "y < a" by simp
-        then show ?thesis by (cases "count P a \<le> count M a") (auto dest: count_a intro: less_trans)
-      next
-        case False
-        with \<open>y \<noteq> a\<close> have "count P y = count N y" unfolding *(1,2)
-          by (simp add: not_in_iff)
-        with count_y ** obtain z where z: "z > y" "count M z < count N z" by auto
-        show ?thesis
-        proof (cases "z \<in># K")
-          case True
-          with *(4) have "z < a" by simp
-          with z(1) show ?thesis
-            by (cases "count P a \<le> count M a") (auto dest!: count_a intro: less_trans)
-        next
-          case False
-          with \<open>a \<notin># K\<close> have "count N z \<le> count P z" unfolding *
-            by (auto simp add: not_in_iff)
-          with z show ?thesis by auto
-        qed
-      qed
-    qed
-  }
-  ultimately show ?case unfolding less_multiset\<^sub>H\<^sub>O_def by blast
-qed
+  unfolding multp_def[of "(<)", symmetric]
+  using multp_imp_multp\<^sub>H\<^sub>O[of "(<)"]
+  by (simp add: less_multiset\<^sub>H\<^sub>O_def multp\<^sub>H\<^sub>O_def)
 
 lemma less_multiset\<^sub>D\<^sub>M_imp_mult:
   "less_multiset\<^sub>D\<^sub>M M N \<Longrightarrow> (M, N) \<in> mult {(x, y). x < y}"
-proof -
-  assume "less_multiset\<^sub>D\<^sub>M M N"
-  then obtain X Y where
-    "X \<noteq> {#}" and "X \<subseteq># N" and "M = N - X + Y" and "\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> k < a)"
-    unfolding less_multiset\<^sub>D\<^sub>M_def by blast
-  then have "(N - X + Y, N - X + X) \<in> mult {(x, y). x < y}"
-    by (intro one_step_implies_mult) (auto simp: Bex_def trans_def)
-  with \<open>M = N - X + Y\<close> \<open>X \<subseteq># N\<close> show "(M, N) \<in> mult {(x, y). x < y}"
-    by (metis subset_mset.diff_add)
-qed
+  unfolding multp_def[of "(<)", symmetric]
+  by (rule multp\<^sub>D\<^sub>M_imp_multp[of "(<)" M N]) (simp add: less_multiset\<^sub>D\<^sub>M_def multp\<^sub>D\<^sub>M_def)
 
 lemma less_multiset\<^sub>H\<^sub>O_imp_less_multiset\<^sub>D\<^sub>M: "less_multiset\<^sub>H\<^sub>O M N \<Longrightarrow> less_multiset\<^sub>D\<^sub>M M N"
-unfolding less_multiset\<^sub>D\<^sub>M_def
-proof (intro iffI exI conjI)
-  assume "less_multiset\<^sub>H\<^sub>O M N"
-  then obtain z where z: "count M z < count N z"
-    unfolding less_multiset\<^sub>H\<^sub>O_def by (auto simp: multiset_eq_iff nat_neq_iff)
-  define X where "X = N - M"
-  define Y where "Y = M - N"
-  from z show "X \<noteq> {#}" unfolding X_def by (auto simp: multiset_eq_iff not_less_eq_eq Suc_le_eq)
-  from z show "X \<subseteq># N" unfolding X_def by auto
-  show "M = (N - X) + Y" unfolding X_def Y_def multiset_eq_iff count_union count_diff by force
-  show "\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> k < a)"
-  proof (intro allI impI)
-    fix k
-    assume "k \<in># Y"
-    then have "count N k < count M k" unfolding Y_def
-      by (auto simp add: in_diff_count)
-    with \<open>less_multiset\<^sub>H\<^sub>O M N\<close> obtain a where "k < a" and "count M a < count N a"
-      unfolding less_multiset\<^sub>H\<^sub>O_def by blast
-    then show "\<exists>a. a \<in># X \<and> k < a" unfolding X_def
-      by (auto simp add: in_diff_count)
-  qed
-qed
+  unfolding less_multiset\<^sub>D\<^sub>M_def less_multiset\<^sub>H\<^sub>O_def
+  unfolding multp\<^sub>D\<^sub>M_def[symmetric] multp\<^sub>H\<^sub>O_def[symmetric]
+  by (rule multp\<^sub>H\<^sub>O_imp_multp\<^sub>D\<^sub>M)
 
 lemma mult_less_multiset\<^sub>D\<^sub>M: "(M, N) \<in> mult {(x, y). x < y} \<longleftrightarrow> less_multiset\<^sub>D\<^sub>M M N"
-  by (metis less_multiset\<^sub>D\<^sub>M_imp_mult less_multiset\<^sub>H\<^sub>O_imp_less_multiset\<^sub>D\<^sub>M mult_imp_less_multiset\<^sub>H\<^sub>O)
+  unfolding multp_def[of "(<)", symmetric]
+  using multp_eq_multp\<^sub>D\<^sub>M[of "(<)", simplified]
+  by (simp add: multp\<^sub>D\<^sub>M_def less_multiset\<^sub>D\<^sub>M_def)
 
 lemma mult_less_multiset\<^sub>H\<^sub>O: "(M, N) \<in> mult {(x, y). x < y} \<longleftrightarrow> less_multiset\<^sub>H\<^sub>O M N"
-  by (metis less_multiset\<^sub>D\<^sub>M_imp_mult less_multiset\<^sub>H\<^sub>O_imp_less_multiset\<^sub>D\<^sub>M mult_imp_less_multiset\<^sub>H\<^sub>O)
+  unfolding multp_def[of "(<)", symmetric]
+  using multp_eq_multp\<^sub>H\<^sub>O[of "(<)", simplified]
+  by (simp add: multp\<^sub>H\<^sub>O_def less_multiset\<^sub>H\<^sub>O_def)
 
 lemmas mult\<^sub>D\<^sub>M = mult_less_multiset\<^sub>D\<^sub>M[unfolded less_multiset\<^sub>D\<^sub>M_def]
 lemmas mult\<^sub>H\<^sub>O = mult_less_multiset\<^sub>H\<^sub>O[unfolded less_multiset\<^sub>H\<^sub>O_def]
@@ -167,10 +218,15 @@
 end
 
 lemma less_multiset_less_multiset\<^sub>H\<^sub>O: "M < N \<longleftrightarrow> less_multiset\<^sub>H\<^sub>O M N"
-  unfolding less_multiset_def mult\<^sub>H\<^sub>O less_multiset\<^sub>H\<^sub>O_def ..
+  unfolding less_multiset_def multp_def mult\<^sub>H\<^sub>O less_multiset\<^sub>H\<^sub>O_def ..
 
-lemmas less_multiset\<^sub>D\<^sub>M = mult\<^sub>D\<^sub>M[folded less_multiset_def]
-lemmas less_multiset\<^sub>H\<^sub>O = mult\<^sub>H\<^sub>O[folded less_multiset_def]
+lemma less_multiset\<^sub>D\<^sub>M:
+  "M < N \<longleftrightarrow> (\<exists>X Y. X \<noteq> {#} \<and> X \<subseteq># N \<and> M = N - X + Y \<and> (\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> k < a)))"
+  by (rule mult\<^sub>D\<^sub>M[folded multp_def less_multiset_def])
+
+lemma less_multiset\<^sub>H\<^sub>O:
+  "M < N \<longleftrightarrow> M \<noteq> N \<and> (\<forall>y. count N y < count M y \<longrightarrow> (\<exists>x>y. count M x < count N x))"
+  by (rule mult\<^sub>H\<^sub>O[folded multp_def less_multiset_def])
 
 lemma subset_eq_imp_le_multiset:
   shows "M \<subseteq># N \<Longrightarrow> M \<le> N"
@@ -198,7 +254,7 @@
 
 (* FIXME: "le" should be "less" in this and other names *)
 lemma le_multiset_empty_right[simp]: "\<not> M < {#}"
-  using subset_mset.le_zero_eq less_multiset\<^sub>D\<^sub>M by blast
+  using subset_mset.le_zero_eq less_multiset_def multp_def less_multiset\<^sub>D\<^sub>M by blast
 
 (* FIXME: "le" should be "less" in this and other names *)
 lemma union_le_diff_plus: "P \<subseteq># M \<Longrightarrow> N < P \<Longrightarrow> M - P + N < M"
@@ -381,9 +437,9 @@
 begin
 
 lemma wf_less_multiset: "wf {(M :: 'a multiset, N). M < N}"
-  unfolding less_multiset_def by (auto intro: wf_mult wf)
+  unfolding less_multiset_def multp_def by (auto intro: wf_mult wf)
 
-instance by standard (metis less_multiset_def wf wf_def wf_mult)
+instance by standard (metis less_multiset_def multp_def wf wf_def wf_mult)
 
 end
 
--- a/src/HOL/Library/Rewrite.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Library/Rewrite.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -2,6 +2,8 @@
     Author:     Christoph Traut, Lars Noschinski, TU Muenchen
 
 Proof method "rewrite" with support for subterm-selection based on patterns.
+
+Documentation: https://arxiv.org/abs/2111.04082
 *)
 
 theory Rewrite
--- a/src/HOL/Library/Tools/smt_word.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Library/Tools/smt_word.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -19,14 +19,17 @@
 
 (* "QF_AUFBV" is too restrictive for Isabelle's problems, which contain aritmetic and quantifiers.
    Better set the logic to "" and make at least Z3 happy. *)
-fun smtlib_logic ts =
-  if exists (Term.exists_type (Term.exists_subtype is_wordT)) ts then SOME "" else NONE
+fun smtlib_logic "z3" ts =
+    if exists (Term.exists_type (Term.exists_subtype is_wordT)) ts then SOME "" else NONE
+  | smtlib_logic "verit" _ = NONE
+  | smtlib_logic _ ts =
+    if exists (Term.exists_type (Term.exists_subtype is_wordT)) ts then SOME "AUFBVLIRA" else NONE
 
 
 (* SMT-LIB builtins *)
 
 local
-  val smtlibC = SMTLIB_Interface.smtlibC
+  val smtlibC = SMTLIB_Interface.smtlibC @ SMTLIB_Interface.bvsmlibC
 
   val wordT = \<^typ>\<open>'a::len word\<close>
 
--- a/src/HOL/List.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/List.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -4238,6 +4238,8 @@
   case (Cons x xs) thus ?case by (fastforce split: if_splits)
 qed
 
+lemmas find_None_iff2 = find_None_iff[THEN eq_iff_swap]
+
 lemma find_Some_iff:
   "List.find P xs = Some x \<longleftrightarrow>
   (\<exists>i<length xs. P (xs!i) \<and> x = xs!i \<and> (\<forall>j<i. \<not> P (xs!j)))"
@@ -4249,6 +4251,8 @@
     using diff_Suc_1[unfolded One_nat_def] less_Suc_eq_0_disj by fastforce
 qed
 
+lemmas find_Some_iff2 = find_Some_iff[THEN eq_iff_swap]
+
 lemma find_cong[fundef_cong]:
   assumes "xs = ys" and "\<And>x. x \<in> set ys \<Longrightarrow> P x = Q x"
   shows "List.find P xs = List.find Q ys"
--- a/src/HOL/Map.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Map.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -667,6 +667,10 @@
 lemma fun_upd_None_if_notin_dom[simp]: "k \<notin> dom m \<Longrightarrow> m(k := None) = m"
   by auto
 
+lemma ran_map_upd_Some:
+  "\<lbrakk> m x = Some y; inj_on m (dom m); z \<notin> ran m \<rbrakk> \<Longrightarrow> ran(m(x := Some z)) = ran m - {y} \<union> {z}"
+by(force simp add: ran_def domI inj_onD)
+
 lemma ran_map_add:
   assumes "dom m1 \<inter> dom m2 = {}"
   shows "ran (m1 ++ m2) = ran m1 \<union> ran m2"
--- a/src/HOL/Nitpick_Examples/Tests_Nits.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Nitpick_Examples/Tests_Nits.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -11,6 +11,6 @@
 imports Main
 begin
 
-ML \<open>() |> getenv "KODKODI" <> "" ? Nitpick_Tests.run_all_tests\<close>
+ML \<open>if getenv "KODKODI" = "" then () else Nitpick_Tests.run_all_tests \<^context>\<close>
 
 end
--- a/src/HOL/Nitpick_Examples/minipick.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Nitpick_Examples/minipick.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -395,11 +395,12 @@
 fun solve_any_kodkod_problem thy problems =
   let
     val {debug, overlord, timeout, ...} = Nitpick_Commands.default_params thy []
+    val kodkod_scala = Config.get_global thy Kodkod.kodkod_scala
     val deadline = Timeout.end_time timeout
     val max_threads = 1
     val max_solutions = 1
   in
-    case solve_any_problem debug overlord deadline max_threads max_solutions
+    case solve_any_problem kodkod_scala debug overlord deadline max_threads max_solutions
                            problems of
       Normal ([], _, _) => "none"
     | Normal _ => "genuine"
--- a/src/HOL/ROOT	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/ROOT	Fri Dec 10 08:58:09 2021 +0100
@@ -36,6 +36,7 @@
     "ML"
     Peirce
     Records
+    Rewrite_Examples
     Seq
     Sqrt
   document_files
@@ -695,7 +696,6 @@
     Reflection_Examples
     Refute_Examples
     Residue_Ring
-    Rewrite_Examples
     SOS
     SOS_Cert
     Serbian
--- a/src/HOL/Relation.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Relation.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -241,6 +241,11 @@
 lemma irrefl_distinct [code]: "irrefl r \<longleftrightarrow> (\<forall>(a, b) \<in> r. a \<noteq> b)"
   by (auto simp add: irrefl_def)
 
+lemma (in preorder) irreflp_less[simp]: "irreflp (<)"
+  by (simp add: irreflpI)
+
+lemma (in preorder) irreflp_greater[simp]: "irreflp (>)"
+  by (simp add: irreflpI)
 
 subsubsection \<open>Asymmetry\<close>
 
@@ -259,6 +264,17 @@
 lemma asym_iff: "asym R \<longleftrightarrow> (\<forall>x y. (x,y) \<in> R \<longrightarrow> (y,x) \<notin> R)"
   by (blast intro: asymI dest: asymD)
 
+context preorder begin
+
+lemma asymp_less[simp]: "asymp (<)"
+  by (auto intro: asympI dual_order.asym)
+
+lemma asymp_greater[simp]: "asymp (>)"
+  by (auto intro: asympI dual_order.asym)
+
+end
+
+
 subsubsection \<open>Symmetry\<close>
 
 definition sym :: "'a rel \<Rightarrow> bool"
--- a/src/HOL/Set_Interval.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Set_Interval.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -1581,7 +1581,7 @@
   assume "finite S" and "\<not> Suc (Max S) \<ge> card S"
   then have "Suc (Max S) < card S"
     by simp
-  with `finite S` have "S \<subseteq> {0..Max S}"
+  with \<open>finite S\<close> have "S \<subseteq> {0..Max S}"
     by auto
   hence "card S \<le> card {0..Max S}"
     by (intro card_mono; auto)
@@ -2110,14 +2110,14 @@
     let ?S' = "S - {Max S}"
     from Suc have "Max S \<in> S" by (auto intro: Max_in)
     hence cards: "card S = Suc (card ?S')"
-      using `finite S` by (intro card.remove; auto)
+      using \<open>finite S\<close> by (intro card.remove; auto)
     hence "\<Sum> {0..<card ?S'} \<le> \<Sum> ?S'"
       using Suc by (intro Suc; auto)
 
     hence "\<Sum> {0..<card ?S'} + x \<le> \<Sum> ?S' + Max S"
-      using `Max S \<ge> x` by simp
+      using \<open>Max S \<ge> x\<close> by simp
     also have "... = \<Sum> S"
-      using sum.remove[OF `finite S` `Max S \<in> S`, where g="\<lambda>x. x"]
+      using sum.remove[OF \<open>finite S\<close> \<open>Max S \<in> S\<close>, where g="\<lambda>x. x"]
       by simp
     finally show ?case
       using cards Suc by auto
--- a/src/HOL/Tools/Nitpick/kodkod.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/Nitpick/kodkod.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -166,7 +166,8 @@
   val is_problem_trivially_false : problem -> bool
   val problems_equivalent : problem * problem -> bool
   val solve_any_problem :
-    bool -> bool -> Time.time -> int -> int -> problem list -> outcome
+    bool -> bool -> bool -> Time.time -> int -> int -> problem list -> outcome
+  val kodkod_scala : bool Config.T
 end;
 
 structure Kodkod : KODKOD =
@@ -958,7 +959,7 @@
    is partly due to the JVM and partly due to the ML "bash" function. *)
 val fudge_ms = 250
 
-fun uncached_solve_any_problem overlord deadline max_threads0 max_solutions problems =
+fun uncached_solve_any_problem kodkod_scala overlord deadline max_threads0 max_solutions problems =
   let
     val j = find_index (curry (op =) True o #formula) problems
     val indexed_problems = if j >= 0 then
@@ -975,8 +976,7 @@
       then Options.default_int \<^system_option>\<open>kodkod_max_threads\<close>
       else max_threads0
 
-    val external_process =
-      not (Options.default_bool \<^system_option>\<open>kodkod_scala\<close>) orelse overlord
+    val external_process = not kodkod_scala orelse overlord
     val timeout0 = Time.toMilliseconds (deadline - Time.now ())
     val timeout = if external_process then timeout0 - fudge_ms else timeout0
 
@@ -1058,11 +1058,11 @@
   Synchronized.var "Kodkod.cached_outcome"
                    (NONE : ((int * problem list) * outcome) option)
 
-fun solve_any_problem debug overlord deadline max_threads max_solutions
+fun solve_any_problem kodkod_scala debug overlord deadline max_threads max_solutions
                       problems =
   let
     fun do_solve () =
-      uncached_solve_any_problem overlord deadline max_threads max_solutions
+      uncached_solve_any_problem kodkod_scala overlord deadline max_threads max_solutions
                                  problems
   in
     if debug orelse overlord then
@@ -1085,4 +1085,6 @@
         end
   end
 
+val kodkod_scala = Attrib.setup_option_bool (\<^system_option>\<open>kodkod_scala\<close>, \<^here>)
+
 end;
--- a/src/HOL/Tools/Nitpick/nitpick.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/Nitpick/nitpick.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -692,6 +692,7 @@
     fun solve_any_problem (found_really_genuine, max_potential, max_genuine,
                            donno) first_time problems =
       let
+        val kodkod_scala = Config.get ctxt KK.kodkod_scala
         val max_potential = Int.max (0, max_potential)
         val max_genuine = Int.max (0, max_genuine)
         fun print_and_check genuine (j, bounds) =
@@ -702,7 +703,7 @@
         if max_solutions <= 0 then
           (found_really_genuine, 0, 0, donno)
         else
-          case KK.solve_any_problem debug overlord deadline max_threads
+          case KK.solve_any_problem kodkod_scala debug overlord deadline max_threads
                                     max_solutions (map fst problems) of
             KK.Normal ([], unsat_js, s) =>
             (update_checked_problems problems unsat_js; show_kodkod_warning s;
--- a/src/HOL/Tools/Nitpick/nitpick_tests.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/Nitpick/nitpick_tests.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -7,7 +7,7 @@
 
 signature NITPICK_TESTS =
 sig
-  val run_all_tests : unit -> unit
+  val run_all_tests : Proof.context -> unit
 end;
 
 structure Nitpick_Tests : NITPICK_TESTS =
@@ -208,15 +208,17 @@
      formula = formula}
   end
 
-fun run_all_tests () =
+fun run_all_tests ctxt =
   let
-    val {debug, overlord, timeout, ...} = Nitpick_Commands.default_params \<^theory> []
+    val thy = Proof_Context.theory_of ctxt
+    val {debug, overlord, timeout, ...} = Nitpick_Commands.default_params thy []
+    val kodkod_scala = Config.get ctxt Kodkod.kodkod_scala
     val deadline = Timeout.end_time timeout
     val max_threads = 1
     val max_solutions = 1
   in
-    case Kodkod.solve_any_problem debug overlord deadline max_threads max_solutions
-                                  (map (problem_for_nut \<^context>) tests) of
+    case Kodkod.solve_any_problem kodkod_scala debug overlord deadline max_threads max_solutions
+                                  (map (problem_for_nut ctxt) tests) of
       Kodkod.Normal ([], _, _) => ()
     | _ => error "Tests failed"
   end
--- a/src/HOL/Tools/SMT/cvc4_interface.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/SMT/cvc4_interface.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -23,7 +23,7 @@
 local
   fun translate_config order ctxt =
     {order = order,
-     logic = K "(set-logic ALL_SUPPORTED)\n",
+     logic = K (K "(set-logic ALL_SUPPORTED)\n"),
      fp_kinds = [BNF_Util.Least_FP, BNF_Util.Greatest_FP],
      serialize = #serialize (SMTLIB_Interface.translate_config order ctxt)}
 in
--- a/src/HOL/Tools/SMT/smt_config.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/SMT/smt_config.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -34,6 +34,7 @@
   val monomorph_instances: int Config.T
   val explicit_application: int Config.T
   val higher_order: bool Config.T
+  val native_bv: bool Config.T
   val nat_as_int: bool Config.T
   val infer_triggers: bool Config.T
   val debug_files: string Config.T
@@ -194,6 +195,7 @@
 val monomorph_instances = Attrib.setup_config_int \<^binding>\<open>smt_monomorph_instances\<close> (K 500)
 val explicit_application = Attrib.setup_config_int \<^binding>\<open>smt_explicit_application\<close> (K 1)
 val higher_order = Attrib.setup_config_bool \<^binding>\<open>smt_higher_order\<close> (K false)
+val native_bv = Attrib.setup_config_bool \<^binding>\<open>native_bv\<close> (K true)
 val nat_as_int = Attrib.setup_config_bool \<^binding>\<open>smt_nat_as_int\<close> (K false)
 val infer_triggers = Attrib.setup_config_bool \<^binding>\<open>smt_infer_triggers\<close> (K false)
 val debug_files = Attrib.setup_config_string \<^binding>\<open>smt_debug_files\<close> (K "")
--- a/src/HOL/Tools/SMT/smt_real.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/SMT/smt_real.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -10,7 +10,7 @@
 
 (* SMT-LIB logic *)
 
-fun smtlib_logic ts =
+fun smtlib_logic _ ts =
   if exists (Term.exists_type (Term.exists_subtype (equal \<^typ>\<open>real\<close>))) ts
   then SOME "AUFLIRA"
   else NONE
--- a/src/HOL/Tools/SMT/smt_solver.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/SMT/smt_solver.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -132,7 +132,7 @@
     val (str, replay_data as {context = ctxt', ...}) =
       ithms
       |> tap (trace_assms ctxt)
-      |> SMT_Translate.translate ctxt smt_options comments
+      |> SMT_Translate.translate ctxt name smt_options comments
       ||> tap trace_replay_data
   in (run_solver ctxt' name (make_command command options) str, replay_data) end
 
--- a/src/HOL/Tools/SMT/smt_systems.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/SMT/smt_systems.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -77,6 +77,8 @@
     else
       if Config.get ctxt SMT_Config.higher_order then
         SMTLIB_Interface.hosmtlibC
+      else if Config.get ctxt SMT_Config.native_bv then
+        SMTLIB_Interface.bvsmlibC
       else
         SMTLIB_Interface.smtlibC
 in
@@ -147,7 +149,9 @@
     ["-smt2"]
 
   fun select_class ctxt =
-    if Config.get ctxt z3_extensions then Z3_Interface.smtlib_z3C else SMTLIB_Interface.smtlibC
+    if Config.get ctxt z3_extensions then Z3_Interface.smtlib_z3C
+    else if Config.get ctxt SMT_Config.native_bv then SMTLIB_Interface.bvsmlibC
+    else SMTLIB_Interface.smtlibC
 in
 
 val z3: SMT_Solver.solver_config = {
--- a/src/HOL/Tools/SMT/smt_translate.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/SMT/smt_translate.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -22,7 +22,7 @@
     funcs: (string * (string list * string)) list }
   type config = {
     order: SMT_Util.order,
-    logic: term list -> string,
+    logic: string -> term list -> string,
     fp_kinds: BNF_Util.fp_kind list,
     serialize: (string * string) list -> string list -> sign -> sterm list -> string }
   type replay_data = {
@@ -35,7 +35,7 @@
 
   (*translation*)
   val add_config: SMT_Util.class * (Proof.context -> config) -> Context.generic -> Context.generic
-  val translate: Proof.context -> (string * string) list -> string list -> (int * thm) list ->
+  val translate: Proof.context -> string -> (string * string) list -> string list -> (int * thm) list ->
     string * replay_data
 end;
 
@@ -66,7 +66,7 @@
 
 type config = {
   order: SMT_Util.order,
-  logic: term list -> string,
+  logic: string -> term list -> string,
   fp_kinds: BNF_Util.fp_kind list,
   serialize: (string * string) list -> string list -> sign -> sterm list -> string }
 
@@ -487,7 +487,7 @@
         "for solver class " ^ quote (SMT_Util.string_of_class cs)))
   end
 
-fun translate ctxt smt_options comments ithms =
+fun translate ctxt prover smt_options comments ithms =
   let
     val {order, logic, fp_kinds, serialize} = get_config ctxt
 
@@ -529,7 +529,7 @@
       |>> order = SMT_Util.First_Order ? apfst (cons fun_app_eq)
   in
     (ts4, tr_context)
-    |-> intermediate logic dtyps (builtin SMT_Builtin.dest_builtin) ctxt2
+    |-> intermediate (logic prover) dtyps (builtin SMT_Builtin.dest_builtin) ctxt2
     |>> uncurry (serialize smt_options comments)
     ||> replay_data_of ctxt2 ll_defs rewrite_rules ithms
   end
--- a/src/HOL/Tools/SMT/smtlib_interface.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/SMT/smtlib_interface.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -9,7 +9,9 @@
 sig
   val smtlibC: SMT_Util.class
   val hosmtlibC: SMT_Util.class
-  val add_logic: int * (term list -> string option) -> Context.generic -> Context.generic
+  val bvsmlibC: SMT_Util.class
+  val add_logic: int * (string -> term list -> string option) -> Context.generic -> Context.generic
+  val del_logic: int * (string -> term list -> string option) -> Context.generic -> Context.generic
   val translate_config: SMT_Util.order -> Proof.context -> SMT_Translate.config
   val assert_name_of_index: int -> string
   val assert_index_of_name: string -> int
@@ -23,7 +25,7 @@
 
 val smtlibC = ["smtlib"]   (* SMT-LIB 2 *)
 val hosmtlibC = smtlibC @ hoC   (* possibly SMT-LIB 3 *)
-
+val bvsmlibC = smtlibC @ ["BV"] (* if BV are supported *)
 
 (* builtins *)
 
@@ -73,17 +75,18 @@
 
 structure Logics = Generic_Data
 (
-  type T = (int * (term list -> string option)) list
+  type T = (int * (string -> term list -> string option)) list
   val empty = []
   fun merge data = Ord_List.merge fst_int_ord data
 )
 
 fun add_logic pf = Logics.map (Ord_List.insert fst_int_ord pf)
+fun del_logic pf = Logics.map (Ord_List.remove fst_int_ord pf)
 
-fun choose_logic ctxt ts =
+fun choose_logic ctxt prover ts =
   let
     fun choose [] = "AUFLIA"
-      | choose ((_, f) :: fs) = (case f ts of SOME s => s | NONE => choose fs)
+      | choose ((_, f) :: fs) = (case f prover ts of SOME s => s | NONE => choose fs)
   in
     (case choose (Logics.get (Context.Proof ctxt)) of
       "" => "" (* for default Z3 logic, a subset of everything *)
--- a/src/HOL/Tools/SMT/verit_proof.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/SMT/verit_proof.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -174,7 +174,8 @@
     map fst strategies
   end
 
-fun verit_tac ctxt = SMT_Solver.smt_tac (Context.proof_map (SMT_Config.select_solver "verit") ctxt)
+val select_verit = SMT_Config.select_solver "verit"
+fun verit_tac ctxt = SMT_Solver.smt_tac (Config.put SMT_Config.native_bv false ((Context.proof_map select_verit ctxt)))
 fun verit_tac_stgy stgy ctxt = verit_tac (Context.proof_of (select_veriT_stgy stgy (Context.Proof ctxt)))
 
 datatype raw_veriT_node = Raw_VeriT_Node of {
--- a/src/HOL/Tools/SMT/z3_interface.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/SMT/z3_interface.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -26,7 +26,7 @@
 
 val z3C = ["z3"]
 
-val smtlib_z3C = SMTLIB_Interface.smtlibC @ z3C
+val smtlib_z3C = SMTLIB_Interface.smtlibC @ SMTLIB_Interface.bvsmlibC @ z3C
 
 
 (* interface *)
@@ -34,7 +34,7 @@
 local
   fun translate_config ctxt =
     {order = SMT_Util.First_Order,
-     logic = K "",
+     logic = K (K ""),
      fp_kinds = [BNF_Util.Least_FP],
      serialize = #serialize (SMTLIB_Interface.translate_config SMT_Util.First_Order ctxt)}
 
--- a/src/HOL/Tools/etc/options	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/etc/options	Fri Dec 10 08:58:09 2021 +0100
@@ -38,7 +38,7 @@
 public option MaSh : string = "sml"
   -- "machine learning algorithm to use by Sledgehammer (nb_knn, nb, knn, none)"
 
-public option kodkod_scala : bool = true
+public option kodkod_scala : bool = false
   -- "invoke Nitpick/Kodkod via Isabelle/Scala (instead of external process)"
 
 public option kodkod_max_threads : int = 0
--- a/src/HOL/Tools/try0.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Tools/try0.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -24,7 +24,7 @@
 fun can_apply timeout_opt pre post tac st =
   let val {goal, ...} = Proof.goal st in
     (case (case timeout_opt of
-            SOME timeout => Timeout.apply timeout
+            SOME timeout => Timeout.apply_physical timeout
           | NONE => fn f => fn x => f x) (Seq.pull o tac) (pre st) of
       SOME (x, _) => Thm.nprems_of (post x) < Thm.nprems_of goal
     | NONE => false)
--- a/src/HOL/Wellfounded.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/Wellfounded.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -79,6 +79,9 @@
 lemma (in wellorder) wf: "wf {(x, y). x < y}"
   unfolding wf_def by (blast intro: less_induct)
 
+lemma (in wellorder) wfP_less[simp]: "wfP (<)"
+  by (simp add: wf wfP_def)
+
 
 subsection \<open>Basic Results\<close>
 
--- a/src/HOL/ex/Cartouche_Examples.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/HOL/ex/Cartouche_Examples.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -5,10 +5,8 @@
 section \<open>Some examples with text cartouches\<close>
 
 theory Cartouche_Examples
-imports Main
-keywords
-  "cartouche" :: diag and
-  "text_cartouche" :: thy_decl
+  imports Main
+  keywords "cartouche" :: diag
 begin
 
 subsection \<open>Regular outer syntax\<close>
@@ -135,14 +133,7 @@
 
 subsubsection \<open>Uniform nesting of sub-languages: document source, ML, term, string literals\<close>
 
-ML \<open>
-  Outer_Syntax.command
-    \<^command_keyword>\<open>text_cartouche\<close> ""
-    (Parse.opt_target -- Parse.input Parse.cartouche
-      >> Pure_Syn.document_command {markdown = true})
-\<close>
-
-text_cartouche
+text
 \<open>
   \<^ML>\<open>
       (
--- a/src/HOL/ex/Rewrite_Examples.thy	Fri Dec 10 08:39:34 2021 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,300 +0,0 @@
-theory Rewrite_Examples
-imports Main "HOL-Library.Rewrite"
-begin
-
-section \<open>The rewrite Proof Method by Example\<close>
-
-(* This file is intended to give an overview over
-   the features of the pattern-based rewrite proof method.
-
-   See also https://www21.in.tum.de/~noschinl/Pattern-2014/
-*)
-lemma
-  fixes a::int and b::int and c::int
-  assumes "P (b + a)"
-  shows "P (a + b)"
-by (rewrite at "a + b" add.commute)
-   (rule assms)
-
-(* Selecting a specific subterm in a large, ambiguous term. *)
-lemma
-  fixes a b c :: int
-  assumes "f (a - a + (a - a)) + f (   0    + c) = f 0 + f c"
-  shows   "f (a - a + (a - a)) + f ((a - a) + c) = f 0 + f c"
-  by (rewrite in "f _ + f \<hole> = _" diff_self) fact
-
-lemma
-  fixes a b c :: int
-  assumes "f (a - a +    0   ) + f ((a - a) + c) = f 0 + f c"
-  shows   "f (a - a + (a - a)) + f ((a - a) + c) = f 0 + f c"
-  by (rewrite at "f (_ + \<hole>) + f _ = _" diff_self) fact
-
-lemma
-  fixes a b c :: int
-  assumes "f (  0   + (a - a)) + f ((a - a) + c) = f 0 + f c"
-  shows   "f (a - a + (a - a)) + f ((a - a) + c) = f 0 + f c"
-  by (rewrite in "f (\<hole> + _) + _ = _" diff_self) fact
-
-lemma
-  fixes a b c :: int
-  assumes "f (a - a +    0   ) + f ((a - a) + c) = f 0 + f c"
-  shows   "f (a - a + (a - a)) + f ((a - a) + c) = f 0 + f c"
-  by (rewrite in "f (_ + \<hole>) + _ = _" diff_self) fact
-
-lemma
-  fixes x y :: nat
-  shows"x + y > c \<Longrightarrow> y + x > c"
-  by (rewrite at "\<hole> > c" add.commute) assumption
-
-(* We can also rewrite in the assumptions.  *)
-lemma
-  fixes x y :: nat
-  assumes "y + x > c \<Longrightarrow> y + x > c"
-  shows   "x + y > c \<Longrightarrow> y + x > c"
-  by (rewrite in asm add.commute) fact
-
-lemma
-  fixes x y :: nat
-  assumes "y + x > c \<Longrightarrow> y + x > c"
-  shows   "x + y > c \<Longrightarrow> y + x > c"
-  by (rewrite in "x + y > c" at asm add.commute) fact
-
-lemma
-  fixes x y :: nat
-  assumes "y + x > c \<Longrightarrow> y + x > c"
-  shows   "x + y > c \<Longrightarrow> y + x > c"
-  by (rewrite at "\<hole> > c" at asm  add.commute) fact
-
-lemma
-  assumes "P {x::int. y + 1 = 1 + x}"
-  shows   "P {x::int. y + 1 = x + 1}"
-  by (rewrite at "x+1" in "{x::int. \<hole> }" add.commute) fact
-
-lemma
-  assumes "P {x::int. y + 1 = 1 + x}"
-  shows   "P {x::int. y + 1 = x + 1}"
-  by (rewrite at "any_identifier_will_work+1" in "{any_identifier_will_work::int. \<hole> }" add.commute)
-   fact
-
-lemma
-  assumes "P {(x::nat, y::nat, z). x + z * 3 = Q (\<lambda>s t. s * t + y - 3)}"
-  shows   "P {(x::nat, y::nat, z). x + z * 3 = Q (\<lambda>s t. y + s * t - 3)}"
-  by (rewrite at "b + d * e" in "\<lambda>(a, b, c). _ = Q (\<lambda>d e. \<hole>)" add.commute) fact
-
-(* This is not limited to the first assumption *)
-lemma
-  assumes "PROP P \<equiv> PROP Q"
-  shows "PROP R \<Longrightarrow> PROP P \<Longrightarrow> PROP Q"
-    by (rewrite at asm assms)
-
-lemma
-  assumes "PROP P \<equiv> PROP Q"
-  shows "PROP R \<Longrightarrow> PROP R \<Longrightarrow> PROP P \<Longrightarrow> PROP Q"
-    by (rewrite at asm assms)
-
-(* Rewriting "at asm" selects each full assumption, not any parts *)
-lemma
-  assumes "(PROP P \<Longrightarrow> PROP Q) \<equiv> (PROP S \<Longrightarrow> PROP R)"
-  shows "PROP S \<Longrightarrow> (PROP P \<Longrightarrow> PROP Q) \<Longrightarrow> PROP R"
-  apply (rewrite at asm assms)
-  apply assumption
-  done
-
-
-
-(* Rewriting with conditional rewriting rules works just as well. *)
-lemma test_theorem:
-  fixes x :: nat
-  shows "x \<le> y \<Longrightarrow> x \<ge> y \<Longrightarrow> x = y"
-  by (rule Orderings.order_antisym)
-
-(* Premises of the conditional rule yield new subgoals. The
-   assumptions of the goal are propagated into these subgoals
-*)
-lemma
-  fixes f :: "nat \<Rightarrow> nat"
-  shows "f x \<le> 0 \<Longrightarrow> f x \<ge> 0 \<Longrightarrow> f x = 0"
-  apply (rewrite at "f x" to "0" test_theorem)
-  apply assumption
-  apply assumption
-  apply (rule refl)
-  done
-
-(* This holds also for rewriting in assumptions. The order of assumptions is preserved *)
-lemma
-  assumes rewr: "PROP P \<Longrightarrow> PROP Q \<Longrightarrow> PROP R \<equiv> PROP R'"
-  assumes A1: "PROP S \<Longrightarrow> PROP T \<Longrightarrow> PROP U \<Longrightarrow> PROP P"
-  assumes A2: "PROP S \<Longrightarrow> PROP T \<Longrightarrow> PROP U \<Longrightarrow> PROP Q"
-  assumes C: "PROP S \<Longrightarrow> PROP R' \<Longrightarrow> PROP T \<Longrightarrow> PROP U \<Longrightarrow> PROP V"
-  shows "PROP S \<Longrightarrow> PROP R \<Longrightarrow> PROP T \<Longrightarrow> PROP U \<Longrightarrow> PROP V"
-  apply (rewrite at asm rewr)
-  apply (fact A1)
-  apply (fact A2)
-  apply (fact C)
-  done
-
-
-(*
-   Instantiation.
-
-   Since all rewriting is now done via conversions,
-   instantiation becomes fairly easy to do.
-*)
-
-(* We first introduce a function f and an extended
-   version of f that is annotated with an invariant. *)
-fun f :: "nat \<Rightarrow> nat" where "f n = n"
-definition "f_inv (I :: nat \<Rightarrow> bool) n \<equiv> f n"
-
-lemma annotate_f: "f = f_inv I"
-  by (simp add: f_inv_def fun_eq_iff)
-
-(* We have a lemma with a bound variable n, and
-   want to add an invariant to f. *)
-lemma
-  assumes "P (\<lambda>n. f_inv (\<lambda>_. True) n + 1) = x"
-  shows "P (\<lambda>n. f n + 1) = x"
-  by (rewrite to "f_inv (\<lambda>_. True)" annotate_f) fact
-
-(* We can also add an invariant that contains the variable n bound in the outer context.
-   For this, we need to bind this variable to an identifier. *)
-lemma
-  assumes "P (\<lambda>n. f_inv (\<lambda>x. n < x + 1) n + 1) = x"
-  shows "P (\<lambda>n. f n + 1) = x"
-  by (rewrite in "\<lambda>n. \<hole>" to "f_inv (\<lambda>x. n < x + 1)" annotate_f) fact
-
-(* Any identifier will work *)
-lemma
-  assumes "P (\<lambda>n. f_inv (\<lambda>x. n < x + 1) n + 1) = x"
-  shows "P (\<lambda>n. f n + 1) = x"
-  by (rewrite in "\<lambda>abc. \<hole>" to "f_inv (\<lambda>x. abc < x + 1)" annotate_f) fact
-
-(* The "for" keyword. *)
-lemma
-  assumes "P (2 + 1)"
-  shows "\<And>x y. P (1 + 2 :: nat)"
-by (rewrite in "P (1 + 2)" at for (x) add.commute) fact
-
-lemma
-  assumes "\<And>x y. P (y + x)"
-  shows "\<And>x y. P (x + y :: nat)"
-by (rewrite in "P (x + _)" at for (x y) add.commute) fact
-
-lemma
-  assumes "\<And>x y z. y + x + z = z + y + (x::int)"
-  shows   "\<And>x y z. x + y + z = z + y + (x::int)"
-by (rewrite at "x + y" in "x + y + z" in for (x y z) add.commute) fact
-
-lemma
-  assumes "\<And>x y z. z + (x + y) = z + y + (x::int)"
-  shows   "\<And>x y z. x + y + z = z + y + (x::int)"
-by (rewrite at "(_ + y) + z" in for (y z) add.commute) fact
-
-lemma
-  assumes "\<And>x y z. x + y + z = y + z + (x::int)"
-  shows   "\<And>x y z. x + y + z = z + y + (x::int)"
-by (rewrite at "\<hole> + _" at "_ = \<hole>" in for () add.commute) fact
-
-lemma
-  assumes eq: "\<And>x. P x \<Longrightarrow> g x = x"
-  assumes f1: "\<And>x. Q x \<Longrightarrow> P x"
-  assumes f2: "\<And>x. Q x \<Longrightarrow> x"
-  shows "\<And>x. Q x \<Longrightarrow> g x"
-  apply (rewrite at "g x" in for (x) eq)
-  apply (fact f1)
-  apply (fact f2)
-  done
-
-(* The for keyword can be used anywhere in the pattern where there is an \<And>-Quantifier. *)
-lemma
-  assumes "(\<And>(x::int). x < 1 + x)"
-  and     "(x::int) + 1 > x"
-  shows   "(\<And>(x::int). x + 1 > x) \<Longrightarrow> (x::int) + 1 > x"
-by (rewrite at "x + 1" in for (x) at asm add.commute)
-   (rule assms)
-
-(* The rewrite method also has an ML interface *)
-lemma
-  assumes "\<And>a b. P ((a + 1) * (1 + b)) "
-  shows "\<And>a b :: nat. P ((a + 1) * (b + 1))"
-  apply (tactic \<open>
-    let
-      val (x, ctxt) = yield_singleton Variable.add_fixes "x" \<^context>
-      (* Note that the pattern order is reversed *)
-      val pat = [
-        Rewrite.For [(x, SOME \<^Type>\<open>nat\<close>)],
-        Rewrite.In,
-        Rewrite.Term (\<^Const>\<open>plus \<^Type>\<open>nat\<close> for \<open>Free (x, \<^Type>\<open>nat\<close>)\<close> \<^term>\<open>1 :: nat\<close>\<close>, [])]
-      val to = NONE
-    in CCONVERSION (Rewrite.rewrite_conv ctxt (pat, to) @{thms add.commute}) 1 end
-  \<close>)
-  apply (fact assms)
-  done
-
-lemma
-  assumes "Q (\<lambda>b :: int. P (\<lambda>a. a + b) (\<lambda>a. a + b))"
-  shows "Q (\<lambda>b :: int. P (\<lambda>a. a + b) (\<lambda>a. b + a))"
-  apply (tactic \<open>
-    let
-      val (x, ctxt) = yield_singleton Variable.add_fixes "x" \<^context>
-      val pat = [
-        Rewrite.Concl,
-        Rewrite.In,
-        Rewrite.Term (Free ("Q", (\<^Type>\<open>int\<close> --> TVar (("'b",0), [])) --> \<^Type>\<open>bool\<close>)
-          $ Abs ("x", \<^Type>\<open>int\<close>, Rewrite.mk_hole 1 (\<^Type>\<open>int\<close> --> TVar (("'b",0), [])) $ Bound 0), [(x, \<^Type>\<open>int\<close>)]),
-        Rewrite.In,
-        Rewrite.Term (\<^Const>\<open>plus \<^Type>\<open>int\<close> for \<open>Free (x, \<^Type>\<open>int\<close>)\<close> \<open>Var (("c", 0), \<^Type>\<open>int\<close>)\<close>\<close>, [])
-        ]
-      val to = NONE
-    in CCONVERSION (Rewrite.rewrite_conv ctxt (pat, to) @{thms add.commute}) 1 end
-  \<close>)
-  apply (fact assms)
-  done
-
-(* There is also conversion-like rewrite function: *)
-ML \<open>
-  val ct = \<^cprop>\<open>Q (\<lambda>b :: int. P (\<lambda>a. a + b) (\<lambda>a. b + a))\<close>
-  val (x, ctxt) = yield_singleton Variable.add_fixes "x" \<^context>
-  val pat = [
-    Rewrite.Concl,
-    Rewrite.In,
-    Rewrite.Term (Free ("Q", (\<^typ>\<open>int\<close> --> TVar (("'b",0), [])) --> \<^typ>\<open>bool\<close>)
-      $ Abs ("x", \<^typ>\<open>int\<close>, Rewrite.mk_hole 1 (\<^typ>\<open>int\<close> --> TVar (("'b",0), [])) $ Bound 0), [(x, \<^typ>\<open>int\<close>)]),
-    Rewrite.In,
-    Rewrite.Term (\<^Const>\<open>plus \<^Type>\<open>int\<close> for \<open>Free (x, \<^Type>\<open>int\<close>)\<close> \<open>Var (("c", 0), \<^Type>\<open>int\<close>)\<close>\<close>, [])
-    ]
-  val to = NONE
-  val th = Rewrite.rewrite_conv ctxt (pat, to) @{thms add.commute} ct
-\<close>
-
-section \<open>Regression tests\<close>
-
-ML \<open>
-  val ct = \<^cterm>\<open>(\<lambda>b :: int. (\<lambda>a. b + a))\<close>
-  val (x, ctxt) = yield_singleton Variable.add_fixes "x" \<^context>
-  val pat = [
-    Rewrite.In,
-    Rewrite.Term (\<^Const>\<open>plus \<^Type>\<open>int\<close> for \<open>Var (("c", 0), \<^Type>\<open>int\<close>)\<close> \<open>Var (("c", 0), \<^Type>\<open>int\<close>)\<close>\<close>, [])
-    ]
-  val to = NONE
-  val _ =
-    case try (Rewrite.rewrite_conv ctxt (pat, to) @{thms add.commute}) ct of
-      NONE => ()
-    | _ => error "should not have matched anything"
-\<close>
-
-ML \<open>
-  Rewrite.params_pconv (Conv.all_conv |> K |> K) \<^context> (Vartab.empty, []) \<^cterm>\<open>\<And>x. PROP A\<close>
-\<close>
-
-lemma
-  assumes eq: "PROP A \<Longrightarrow> PROP B \<equiv> PROP C"
-  assumes f1: "PROP D \<Longrightarrow> PROP A"
-  assumes f2: "PROP D \<Longrightarrow> PROP C"
-  shows "\<And>x. PROP D \<Longrightarrow> PROP B"
-  apply (rewrite eq)
-  apply (fact f1)
-  apply (fact f2)
-  done
-
-end
--- a/src/Pure/Admin/build_doc.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Admin/build_doc.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -52,6 +52,7 @@
       {
         case (doc, session) =>
           try {
+            progress.expose_interrupt()
             progress.echo("Documentation " + quote(doc) + " ...")
 
             using(store.open_database_context())(db_context =>
--- a/src/Pure/Admin/build_log.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Admin/build_log.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -491,7 +491,7 @@
         match {
           case Some((SESSION_NAME, session) :: props) =>
             for (theory <- Markup.Name.unapply(props))
-            yield (session, theory -> Markup.Timing_Properties.parse(props))
+            yield (session, theory -> Markup.Timing_Properties.get(props))
           case _ => None
         }
     }
--- a/src/Pure/Admin/build_release.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Admin/build_release.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -17,6 +17,9 @@
   private def execute_tar(dir: Path, args: String, strip: Int = 0): Unit =
     Isabelle_System.gnutar(args, dir = dir, strip = strip).check
 
+  private def bash_java_opens(args: String*): String =
+    Bash.strings(args.toList.flatMap(arg => List("--add-opens", arg + "=ALL-UNNAMED")))
+
   object Release_Context
   {
     def apply(
@@ -490,7 +493,6 @@
     context: Release_Context,
     afp_rev: String = "",
     platform_families: List[Platform.Family.Value] = default_platform_families,
-    java_home: Path = default_java_home,
     more_components: List[Path] = Nil,
     website: Option[Path] = None,
     build_sessions: List[String] = Nil,
@@ -703,9 +705,21 @@
                     "    <cp>%EXEDIR%\\" + File.platform_path(cp).replace('/', '\\') + "</cp>")))
                 .replace("\\jdk\\", "\\" + jdk_component + "\\"))
 
+            val java_opts =
+              bash_java_opens(
+                "java.base/java.io",
+                "java.base/java.lang",
+                "java.base/java.lang.reflect",
+                "java.base/java.text",
+                "java.base/java.util",
+                "java.desktop/java.awt.font")
+            val launch4j_jar =
+              Path.explode("windows_app/launch4j-" + Platform.family + "/launch4j.jar")
+
             execute(tmp_dir,
-              "env JAVA_HOME=" + File.bash_platform_path(java_home) +
-              " \"windows_app/launch4j-${ISABELLE_PLATFORM_FAMILY}/launch4j\" isabelle.xml")
+              cat_lines(List(
+                "export LAUNCH4J=" + File.bash_platform_path(launch4j_jar),
+                "isabelle java " + java_opts + " -jar \"$LAUNCH4J\" isabelle.xml")))
 
             Isabelle_System.copy_file(app_template + Path.explode("manifest.xml"),
               isabelle_target + isabelle_exe.ext("manifest"))
@@ -824,7 +838,7 @@
             val other_isabelle = context.other_isabelle(tmp_dir)
 
             Isabelle_System.make_directory(other_isabelle.etc)
-            File.write(other_isabelle.etc_preferences, "ML_system_64 = true\n")
+            File.write(other_isabelle.etc_settings, "ML_OPTIONS=\"--minheap 1000 --maxheap 4000\"\n")
 
             other_isabelle.bash("bin/isabelle build -f -j " + parallel_jobs +
               " -o browser_info -o document=pdf -o document_variants=document:outline=/proof,/ML" +
@@ -844,15 +858,12 @@
 
   /** command line entry point **/
 
-  def default_java_home: Path = Path.explode("$JAVA_HOME").expand
-
   def main(args: Array[String]): Unit =
   {
     Command_Line.tool {
       var afp_rev = ""
       var components_base: Path = Components.default_components_base
       var target_dir = Path.current
-      var java_home = default_java_home
       var release_name = ""
       var source_archive = ""
       var website: Option[Path] = None
@@ -872,7 +883,6 @@
     -C DIR       base directory for Isabelle components (default: """ +
         Components.default_components_base + """)
     -D DIR       target directory (default ".")
-    -J JAVA_HOME Java version for running launch4j (e.g. version 11)
     -R RELEASE   explicit release name
     -S ARCHIVE   use existing source archive (file or URL)
     -W WEBSITE   produce minimal website in given directory
@@ -889,7 +899,6 @@
         "A:" -> (arg => afp_rev = arg),
         "C:" -> (arg => components_base = Path.explode(arg)),
         "D:" -> (arg => target_dir = Path.explode(arg)),
-        "J:" -> (arg => java_home = Path.explode(arg)),
         "R:" -> (arg => release_name = arg),
         "S:" -> (arg => source_archive = arg),
         "W:" -> (arg => website = Some(Path.explode(arg))),
@@ -936,7 +945,7 @@
         }
 
       build_release(options, context, afp_rev = afp_rev, platform_families = platform_families,
-        java_home = java_home, more_components = more_components, build_sessions = build_sessions,
+        more_components = more_components, build_sessions = build_sessions,
         build_library = build_library, parallel_jobs = parallel_jobs, website = website)
     }
   }
--- a/src/Pure/Admin/build_status.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Admin/build_status.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -204,7 +204,7 @@
     profiles: List[Profile] = default_profiles,
     only_sessions: Set[String] = Set.empty,
     ml_statistics: Boolean = false,
-    ml_statistics_domain: String => Boolean = (key: String) => true,
+    ml_statistics_domain: String => Boolean = _ => true,
     verbose: Boolean = false): Data =
   {
     val date = Date.now()
@@ -270,9 +270,11 @@
               threads1 max threads2
             }
             val ml_platform = res.string(Build_Log.Settings.ML_PLATFORM)
+            val ml_platform_64 =
+              ml_platform.startsWith("x86_64-") || ml_platform.startsWith("arm64-")
             val data_name =
               profile.description +
-                (if (ml_platform.startsWith("x86_64-")) ", 64bit" else "") +
+                (if (ml_platform_64) ", 64bit" else "") +
                 (if (threads == 1) "" else ", " + threads + " threads")
 
             res.get_string(Build_Log.Prop.build_host).foreach(host =>
@@ -375,7 +377,7 @@
           List(HTML.description(
             List(HTML.text("status date:") -> HTML.text(data.date.toString))))),
         HTML.par(
-          List(HTML.itemize(data.entries.map({ case data_entry =>
+          List(HTML.itemize(data.entries.map(data_entry =>
             List(
               HTML.link(clean_name(data_entry.name) + "/index.html",
                 HTML.text(data_entry.name))) :::
@@ -386,7 +388,7 @@
                 List(HTML.span(HTML.error_message, HTML.text("Failed sessions:"))) :::
                 List(HTML.itemize(sessions.map(s => s.head.present_errors(s.name))))
             })
-          }))))))
+          ))))))
 
     for (data_entry <- data.entries) {
       val data_name = data_entry.name
@@ -421,10 +423,10 @@
                       entry.ml_timing.elapsed.minutes.toString,
                       entry.ml_timing.resources.minutes.toString,
                       entry.maximum_code.toString,
-                      entry.maximum_code.toString,
+                      entry.average_code.toString,
+                      entry.maximum_stack.toString,
                       entry.average_stack.toString,
-                      entry.maximum_stack.toString,
-                      entry.average_heap.toString,
+                      entry.maximum_heap.toString,
                       entry.average_heap.toString,
                       entry.stored_heap.toString).mkString(" "))))
 
@@ -606,7 +608,7 @@
         "l:" -> (arg => options = options + ("build_log_history=" + arg)),
         "o:" -> (arg => options = options + arg),
         "s:" -> (arg =>
-          space_explode('x', arg).map(Value.Int.parse(_)) match {
+          space_explode('x', arg).map(Value.Int.parse) match {
             case List(w, h) if w > 0 && h > 0 => image_size = (w, h)
             case _ => error("Error bad PNG image size: " + quote(arg))
           }),
--- a/src/Pure/Admin/build_verit.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Admin/build_verit.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -9,7 +9,7 @@
 
 object Build_VeriT
 {
-  val default_download_url = "https://verit.loria.fr/rmx/2021.06/verit-2021.06-rmx.tar.gz"
+  val default_download_url = "https://verit.loria.fr/rmx/2021.06.2/verit-2021.06.2-rmx.tar.gz"
 
 
   /* build veriT */
--- a/src/Pure/Admin/isabelle_cronjob.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Admin/isabelle_cronjob.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -314,7 +314,7 @@
   {
     List(
       List(Remote_Build("Linux A", "augsburg1",
-          options = "-m32 -B -M1x2,2,4" +
+          options = "-m32 -B -M4" +
             " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind" +
             " -e ISABELLE_GHC_SETUP=true" +
             " -e ISABELLE_MLTON=mlton" +
--- a/src/Pure/Admin/isabelle_devel.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Admin/isabelle_devel.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -44,7 +44,6 @@
           val context = Build_Release.Release_Context(target_dir)
           Build_Release.build_release_archive(context, rev)
           Build_Release.build_release(options, context, afp_rev = afp_rev,
-            java_home = Path.explode("$BUILD_JAVA_HOME"),
             build_sessions = List(Isabelle_System.getenv("ISABELLE_LOGIC")),
             website = Some(website_dir))
         })
--- a/src/Pure/Concurrent/timeout.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Concurrent/timeout.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -14,6 +14,7 @@
   val scale_time: Time.time -> Time.time
   val end_time: Time.time -> Time.time
   val apply: Time.time -> ('a -> 'b) -> 'a -> 'b
+  val apply_physical: Time.time -> ('a -> 'b) -> 'a -> 'b
   val print: Time.time -> string
 end;
 
@@ -29,7 +30,7 @@
 
 fun end_time timeout = Time.now () + scale_time timeout;
 
-fun apply timeout f x =
+fun apply' {physical, timeout} f x =
   if ignored timeout then f x
   else
     Thread_Attributes.with_attributes Thread_Attributes.no_interrupts (fn orig_atts =>
@@ -38,7 +39,7 @@
         val start = Time.now ();
 
         val request =
-          Event_Timer.request {physical = false} (start + scale_time timeout)
+          Event_Timer.request {physical = physical} (start + scale_time timeout)
             (fn () => Isabelle_Thread.interrupt_unsynchronized self);
         val result =
           Exn.capture (fn () => Thread_Attributes.with_attributes orig_atts (fn _ => f x)) ();
@@ -52,6 +53,9 @@
         else (Exn.release test; Exn.release result)
       end);
 
+fun apply timeout f x = apply' {physical = false, timeout = timeout} f x;
+fun apply_physical timeout f x = apply' {physical = true, timeout = timeout} f x;
+
 fun print t = "Timeout after " ^ Value.print_time t ^ "s";
 
 end;
--- a/src/Pure/General/file.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/General/file.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -292,4 +292,36 @@
     else if (Platform.is_windows) Isabelle_System.chmod("a-x", path)
     else path.file.setExecutable(flag, false)
   }
+
+
+  /* content */
+
+  object Content
+  {
+    def apply(path: Path, content: Bytes): Content = new Content_Bytes(path, content)
+    def apply(path: Path, content: String): Content = new Content_String(path, content)
+    def apply(path: Path, content: XML.Body): Content_XML = new Content_XML(path, content)
+  }
+
+  trait Content
+  {
+    def path: Path
+    def write(dir: Path): Unit
+  }
+
+  final class Content_Bytes private[File](val path: Path, content: Bytes) extends Content
+  {
+    def write(dir: Path): Unit = Bytes.write(dir + path, content)
+  }
+
+  final class Content_String private[File](val path: Path, content: String) extends Content
+  {
+    def write(dir: Path): Unit = File.write(dir + path, content)
+  }
+
+  final class Content_XML private[File](val path: Path, content: XML.Body)
+  {
+    def output(out: XML.Body => String): Content_String =
+      new Content_String(path, out(content))
+  }
 }
--- a/src/Pure/General/scan.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/General/scan.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -24,7 +24,6 @@
   abstract class Line_Context
   case object Finished extends Line_Context
   case class Quoted(quote: String) extends Line_Context
-  case object Verbatim extends Line_Context
   case class Cartouche(depth: Int) extends Line_Context
   case class Comment_Prefix(symbol: Symbol.Symbol) extends Line_Context
   case class Cartouche_Comment(depth: Int) extends Line_Context
@@ -136,41 +135,6 @@
       quote ~ quoted_body(quote) ^^ { case x ~ y => x + y }
 
 
-    /* verbatim text */
-
-    private def verbatim_body: Parser[String] =
-      rep(many1(sym => sym != "*") | """\*(?!\})""".r) ^^ (_.mkString)
-
-    def verbatim: Parser[String] =
-    {
-      "{*" ~ verbatim_body ~ "*}" ^^ { case x ~ y ~ z => x + y + z }
-    }.named("verbatim")
-
-    def verbatim_content(source: String): String =
-    {
-      require(parseAll(verbatim, source).successful, "no verbatim text")
-      source.substring(2, source.length - 2)
-    }
-
-    def verbatim_line(ctxt: Line_Context): Parser[(String, Line_Context)] =
-    {
-      ctxt match {
-        case Finished =>
-          "{*" ~ verbatim_body ~ opt_term("*}") ^^
-            { case x ~ y ~ Some(z) => (x + y + z, Finished)
-              case x ~ y ~ None => (x + y, Verbatim) }
-        case Verbatim =>
-          verbatim_body ~ opt_term("*}") ^^
-            { case x ~ Some(y) => (x + y, Finished)
-              case x ~ None => (x, Verbatim) }
-        case _ => failure("")
-      }
-    }.named("verbatim_line")
-
-    val recover_verbatim: Parser[String] =
-      "{*" ~ verbatim_body ^^ { case x ~ y => x + y }
-
-
     /* nested text cartouches */
 
     def cartouche_depth(depth: Int): Parser[(String, Int)] = new Parser[(String, Int)]
--- a/src/Pure/Isar/args.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Isar/args.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -27,9 +27,6 @@
   val name_position: (string * Position.T) parser
   val cartouche_inner_syntax: string parser
   val cartouche_input: Input.source parser
-  val text_token: Token.T parser
-  val text_input: Input.source parser
-  val text: string parser
   val binding: binding parser
   val alt_name: string parser
   val liberal_name: string parser
@@ -47,8 +44,7 @@
   val named_fact: (string -> string option * thm list) -> thm list parser
   val named_attribute: (string * Position.T -> morphism -> attribute) ->
     (morphism -> attribute) parser
-  val text_declaration: (Input.source -> declaration) -> declaration parser
-  val cartouche_declaration: (Input.source -> declaration) -> declaration parser
+  val embedded_declaration: (Input.source -> declaration) -> declaration parser
   val typ_abbrev: typ context_parser
   val typ: typ context_parser
   val term: term context_parser
@@ -110,10 +106,6 @@
 val cartouche_inner_syntax = cartouche >> Token.inner_syntax_of;
 val cartouche_input = cartouche >> Token.input_of;
 
-val text_token = Parse.token (Parse.embedded || Parse.verbatim);
-val text_input = text_token >> Token.input_of;
-val text = text_token >> Token.content_of;
-
 val binding = Parse.input name >> (Binding.make o Input.source_content);
 val alt_name = alt_string >> Token.content_of;
 val liberal_name = (symbolic >> Token.content_of) || name;
@@ -157,11 +149,9 @@
   name_token >>
     Token.evaluate Token.Attribute (fn tok => att (Token.content_of tok, Token.pos_of tok));
 
-fun text_declaration read =
-  internal_declaration || text_token >> Token.evaluate Token.Declaration (read o Token.input_of);
-
-fun cartouche_declaration read =
-  internal_declaration || cartouche >> Token.evaluate Token.Declaration (read o Token.input_of);
+fun embedded_declaration read =
+  internal_declaration ||
+  Parse.token Parse.embedded >> Token.evaluate Token.Declaration (read o Token.input_of);
 
 
 (* terms and types *)
--- a/src/Pure/Isar/method.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Isar/method.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -327,7 +327,7 @@
 
 val parse_tactic =
   Scan.state :|-- (fn context =>
-    Scan.lift (Args.text_declaration (fn source =>
+    Scan.lift (Args.embedded_declaration (fn source =>
       let
         val tac =
           context
@@ -749,7 +749,7 @@
   in Parse.read_embedded ctxt keywords (Scan.many Token.not_eof) #> read_closure ctxt end;
 
 val text_closure =
-  Args.context -- Scan.lift (Parse.token Parse.text) >> (fn (ctxt, tok) =>
+  Args.context -- Scan.lift (Parse.token Parse.embedded) >> (fn (ctxt, tok) =>
     (case Token.get_value tok of
       SOME (Token.Source src) => read ctxt src
     | _ =>
--- a/src/Pure/Isar/parse.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Isar/parse.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -30,7 +30,6 @@
   val string: string parser
   val string_position: (string * Position.T) parser
   val alt_string: string parser
-  val verbatim: string parser
   val cartouche: string parser
   val control: Antiquote.control parser
   val eof: string parser
@@ -70,7 +69,6 @@
   val embedded_inner_syntax: string parser
   val embedded_input: Input.source parser
   val embedded_position: (string * Position.T) parser
-  val text: string parser
   val path_input: Input.source parser
   val path: string parser
   val path_binding: (string * Position.T) parser
@@ -120,7 +118,6 @@
   val thms1: (Facts.ref * Token.src list) list parser
   val options: ((string * Position.T) * (string * Position.T)) list parser
   val embedded_ml: ML_Lex.token Antiquote.antiquote list parser
-  val embedded_ml_underscore: ML_Lex.token Antiquote.antiquote list parser
   val read_antiq: Keyword.keywords -> 'a parser -> Symbol_Pos.T list * Position.T -> 'a
   val read_embedded: Proof.context -> Keyword.keywords -> 'a parser -> Input.source -> 'a
   val read_embedded_src: Proof.context -> Keyword.keywords -> 'a parser -> Token.src -> 'a
@@ -200,7 +197,6 @@
 val float_number = kind Token.Float;
 val string = kind Token.String;
 val alt_string = kind Token.Alt_String;
-val verbatim = kind Token.Verbatim;
 val cartouche = kind Token.Cartouche;
 val control = token (kind Token.control_kind) >> (the o Token.get_control);
 val eof = kind Token.EOF;
@@ -289,8 +285,6 @@
 val embedded_input = input embedded;
 val embedded_position = embedded_input >> Input.source_content;
 
-val text = group (fn () => "text") (embedded || verbatim);
-
 val path_input = group (fn () => "file name/path specification") embedded_input;
 val path = path_input >> Input.string_of;
 val path_binding = group (fn () => "path binding (strict file name)") (position embedded);
@@ -400,8 +394,8 @@
 
 (* embedded source text *)
 
-val ML_source = input (group (fn () => "ML source") text);
-val document_source = input (group (fn () => "document source") text);
+val ML_source = input (group (fn () => "ML source") embedded);
+val document_source = input (group (fn () => "document source") embedded);
 
 val document_marker =
   group (fn () => "document marker")
@@ -441,7 +435,7 @@
 
 val argument_kinds =
  [Token.Ident, Token.Long_Ident, Token.Sym_Ident, Token.Var, Token.Type_Ident, Token.Type_Var,
-  Token.Nat, Token.Float, Token.String, Token.Alt_String, Token.Cartouche, Token.Verbatim];
+  Token.Nat, Token.Float, Token.String, Token.Alt_String, Token.Cartouche];
 
 fun arguments is_symid =
   let
@@ -505,12 +499,10 @@
 (* embedded ML *)
 
 val embedded_ml =
+  input underscore >> ML_Lex.read_source ||
   embedded_input >> ML_Lex.read_source ||
   control >> (ML_Lex.read_symbols o Antiquote.control_symbols);
 
-val embedded_ml_underscore =
-  input underscore >> ML_Lex.read_source || embedded_ml;
-
 
 (* read embedded source, e.g. for antiquotations *)
 
--- a/src/Pure/Isar/parse.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Isar/parse.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -65,9 +65,9 @@
     def nat: Parser[Int] = atom("natural number", _.is_nat) ^^ (s => Integer.parseInt(s))
     def name: Parser[String] = atom("name", _.is_name)
     def embedded: Parser[String] = atom("embedded content", _.is_embedded)
-    def text: Parser[String] = atom("text", _.is_text)
-    def ML_source: Parser[String] = atom("ML source", _.is_text)
-    def document_source: Parser[String] = atom("document source", _.is_text)
+    def text: Parser[String] = atom("text", _.is_embedded)
+    def ML_source: Parser[String] = atom("ML source", _.is_embedded)
+    def document_source: Parser[String] = atom("document source", _.is_embedded)
 
     def opt_keyword(s: String): Parser[Boolean] =
       ($$$("(") ~! $$$(s) ~ $$$(")")) ^^ { case _ => true } | success(false)
--- a/src/Pure/Isar/token.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Isar/token.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -11,7 +11,7 @@
     Command | Keyword | Ident | Long_Ident | Sym_Ident | Var | Type_Ident | Type_Var | Nat |
     Float | Space |
     (*delimited content*)
-    String | Alt_String | Verbatim | Cartouche |
+    String | Alt_String | Cartouche |
     Control of Antiquote.control |
     Comment of Comment.kind option |
     (*special content*)
@@ -31,7 +31,8 @@
     Fact of string option * thm list |
     Attribute of morphism -> attribute |
     Declaration of declaration |
-    Files of file Exn.result list
+    Files of file Exn.result list |
+    Output of XML.body option
   val pos_of: T -> Position.T
   val adjust_offsets: (int -> int option) -> T -> T
   val eof: T
@@ -73,6 +74,8 @@
   val file_source: file -> Input.source
   val get_files: T -> file Exn.result list
   val put_files: file Exn.result list -> T -> T
+  val get_output: T -> XML.body option
+  val put_output: XML.body -> T -> T
   val get_value: T -> value option
   val reports_of_value: T -> Position.report list
   val name_value: name_value -> value
@@ -120,7 +123,7 @@
   Command | Keyword | Ident | Long_Ident | Sym_Ident | Var | Type_Ident | Type_Var | Nat |
   Float | Space |
   (*delimited content*)
-  String | Alt_String | Verbatim | Cartouche |
+  String | Alt_String | Cartouche |
   Control of Antiquote.control |
   Comment of Comment.kind option |
   (*special content*)
@@ -148,7 +151,6 @@
   | Space => "white space"
   | String => "quoted string"
   | Alt_String => "back-quoted string"
-  | Verbatim => "verbatim text"
   | Cartouche => "text cartouche"
   | Control _ => "control cartouche"
   | Comment NONE => "informal comment"
@@ -163,7 +165,6 @@
 val delimited_kind =
   (fn String => true
     | Alt_String => true
-    | Verbatim => true
     | Cartouche => true
     | Control _ => true
     | Comment _ => true
@@ -197,7 +198,8 @@
   Fact of string option * thm list |  (*optional name for dynamic fact, i.e. fact "variable"*)
   Attribute of morphism -> attribute |
   Declaration of declaration |
-  Files of file Exn.result list;
+  Files of file Exn.result list |
+  Output of XML.body option;
 
 type src = T list;
 
@@ -319,7 +321,6 @@
   | Type_Var => (Markup.tvar, "")
   | String => (Markup.string, "")
   | Alt_String => (Markup.alt_string, "")
-  | Verbatim => (Markup.verbatim, "")
   | Cartouche => (Markup.cartouche, "")
   | Control _ => (Markup.cartouche, "")
   | Comment _ => (Markup.comment, "")
@@ -370,7 +371,6 @@
   (case kind of
     String => Symbol_Pos.quote_string_qq x
   | Alt_String => Symbol_Pos.quote_string_bq x
-  | Verbatim => enclose "{*" "*}" x
   | Cartouche => cartouche x
   | Control control => Symbol_Pos.content (Antiquote.control_symbols control)
   | Comment NONE => enclose "(*" "*)" x
@@ -411,6 +411,15 @@
   | put_files _ tok = raise Fail ("Cannot put inlined files here" ^ Position.here (pos_of tok));
 
 
+(* document output *)
+
+fun get_output (Token (_, _, Value (SOME (Output output)))) = output
+  | get_output _ = NONE;
+
+fun put_output output (Token (x, y, Slot)) = Token (x, y, Value (SOME (Output (SOME output))))
+  | put_output _ tok = raise Fail ("Cannot put document output here" ^ Position.here (pos_of tok));
+
+
 (* access values *)
 
 fun get_value (Token (_, _, Value v)) = v
@@ -491,7 +500,8 @@
     | Fact (a, ths) => Fact (a, Morphism.fact phi ths)
     | Attribute att => Attribute (Morphism.transform phi att)
     | Declaration decl => Declaration (Morphism.transform phi decl)
-    | Files _ => v));
+    | Files _ => v
+    | Output _ => v));
 
 
 (* static binding *)
@@ -610,22 +620,6 @@
   | ident_or_symbolic s = Symbol_Pos.is_identifier s orelse is_symid s;
 
 
-(* scan verbatim text *)
-
-val scan_verb =
-  $$$ "*" --| Scan.ahead (~$$ "}") ||
-  Scan.one (fn (s, _) => s <> "*" andalso Symbol.not_eof s) >> single;
-
-val scan_verbatim =
-  Scan.ahead ($$ "{" -- $$ "*") |--
-    !!! "unclosed verbatim text"
-      ((Symbol_Pos.scan_pos --| $$ "{" --| $$ "*") --
-        (Scan.repeats scan_verb -- ($$ "*" |-- $$ "}" |-- Symbol_Pos.scan_pos)));
-
-val recover_verbatim =
-  $$$ "{" @@@ $$$ "*" @@@ Scan.repeats scan_verb;
-
-
 (* scan cartouche *)
 
 val scan_cartouche =
@@ -664,7 +658,6 @@
 fun scan_token keywords = !!! "bad input"
   (Symbol_Pos.scan_string_qq err_prefix >> token_range String ||
     Symbol_Pos.scan_string_bq err_prefix >> token_range Alt_String ||
-    scan_verbatim >> token_range Verbatim ||
     scan_comment >> token_range (Comment NONE) ||
     Comment.scan_outer >> (fn (k, ss) => token (Comment (SOME k)) ss) ||
     scan_cartouche >> token_range Cartouche ||
@@ -687,7 +680,6 @@
 fun recover msg =
   (Symbol_Pos.recover_string_qq ||
     Symbol_Pos.recover_string_bq ||
-    recover_verbatim ||
     Symbol_Pos.recover_cartouche ||
     Symbol_Pos.recover_comment ||
     Scan.one (Symbol.not_eof o Symbol_Pos.symbol) >> single)
--- a/src/Pure/Isar/token.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Isar/token.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -32,7 +32,6 @@
     /*delimited content*/
     val STRING = Value("string")
     val ALT_STRING = Value("back-quoted string")
-    val VERBATIM = Value("verbatim text")
     val CARTOUCHE = Value("text cartouche")
     val CONTROL = Value("control cartouche")
     val INFORMAL_COMMENT = Value("informal comment")
@@ -53,13 +52,12 @@
     {
       val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
       val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
-      val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x))
       val cmt = comment ^^ (x => Token(Token.Kind.INFORMAL_COMMENT, x))
       val formal_cmt = comment_cartouche ^^ (x => Token(Token.Kind.FORMAL_COMMENT, x))
       val cart = cartouche ^^ (x => Token(Token.Kind.CARTOUCHE, x))
       val ctrl = control_cartouche ^^ (x => Token(Token.Kind.CONTROL, x))
 
-      string | (alt_string | (verb | (cmt | (formal_cmt | (cart | ctrl)))))
+      string | (alt_string | (cmt | (formal_cmt | (cart | ctrl))))
     }
 
     private def other_token(keywords: Keyword.Keywords): Parser[Token] =
@@ -99,8 +97,7 @@
       val recover_delimited =
         (recover_quoted("\"") |
           (recover_quoted("`") |
-            (recover_verbatim |
-              (recover_cartouche | recover_comment)))) ^^ (x => Token(Token.Kind.ERROR, x))
+            (recover_cartouche | recover_comment))) ^^ (x => Token(Token.Kind.ERROR, x))
 
       val bad = one(_ => true) ^^ (x => Token(Token.Kind.ERROR, x))
 
@@ -119,14 +116,13 @@
         quoted_line("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
       val alt_string =
         quoted_line("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
-      val verb = verbatim_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
       val cart = cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
       val cmt = comment_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.INFORMAL_COMMENT, x), c) }
       val formal_cmt =
         comment_cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.FORMAL_COMMENT, x), c) }
       val other = other_token(keywords) ^^ { case x => (x, Scan.Finished) }
 
-      string | (alt_string | (verb | (cart | (cmt | (formal_cmt | other)))))
+      string | (alt_string | (cart | (cmt | (formal_cmt | other))))
     }
   }
 
@@ -286,7 +282,6 @@
     kind == Token.Kind.VAR ||
     kind == Token.Kind.TYPE_IDENT ||
     kind == Token.Kind.TYPE_VAR
-  def is_text: Boolean = is_embedded || kind == Token.Kind.VERBATIM
   def is_space: Boolean = kind == Token.Kind.SPACE
   def is_informal_comment: Boolean = kind == Token.Kind.INFORMAL_COMMENT
   def is_formal_comment: Boolean = kind == Token.Kind.FORMAL_COMMENT
@@ -302,7 +297,6 @@
   def is_unfinished: Boolean = is_error &&
    (source.startsWith("\"") ||
     source.startsWith("`") ||
-    source.startsWith("{*") ||
     source.startsWith("(*") ||
     source.startsWith(Symbol.open) ||
     source.startsWith(Symbol.open_decoded))
@@ -319,7 +313,6 @@
   def content: String =
     if (kind == Token.Kind.STRING) Scan.Parsers.quoted_content("\"", source)
     else if (kind == Token.Kind.ALT_STRING) Scan.Parsers.quoted_content("`", source)
-    else if (kind == Token.Kind.VERBATIM) Scan.Parsers.verbatim_content(source)
     else if (kind == Token.Kind.CARTOUCHE) Scan.Parsers.cartouche_content(source)
     else if (kind == Token.Kind.INFORMAL_COMMENT) Scan.Parsers.comment_content(source)
     else if (kind == Token.Kind.FORMAL_COMMENT) Comment.content(source)
--- a/src/Pure/Isar/toplevel.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Isar/toplevel.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -17,6 +17,7 @@
   val is_skipped_proof: state -> bool
   val level: state -> int
   val previous_theory_of: state -> theory option
+  val output_of: state -> Latex.text option
   val context_of: state -> Proof.context
   val generic_theory_of: state -> generic_theory
   val theory_of: state -> theory
@@ -30,6 +31,7 @@
   val pretty_state: state -> Pretty.T list
   val string_of_state: state -> string
   val pretty_abstract: state -> Pretty.T
+  type presentation = state -> Latex.text option
   type transition
   val empty: transition
   val name_of: transition -> string
@@ -44,6 +46,7 @@
   val is_init: transition -> bool
   val modify_init: (unit -> theory) -> transition -> transition
   val exit: transition -> transition
+  val present: (state -> Latex.text) -> transition -> transition
   val keep: (state -> unit) -> transition -> transition
   val keep': (bool -> state -> unit) -> transition -> transition
   val keep_proof: (state -> unit) -> transition -> transition
@@ -62,7 +65,7 @@
     (bool -> local_theory -> local_theory) -> transition -> transition
   val local_theory: (bool * Position.T) option -> (xstring * Position.T) option ->
     (local_theory -> local_theory) -> transition -> transition
-  val present_local_theory: (xstring * Position.T) option -> (state -> unit) ->
+  val present_local_theory: (xstring * Position.T) option -> (state -> Latex.text) ->
     transition -> transition
   val local_theory_to_proof': (bool * Position.T) option -> (xstring * Position.T) option ->
     (bool -> local_theory -> Proof.state) -> transition -> transition
@@ -137,20 +140,20 @@
 fun node_presentation node =
   (node, cases_node init_presentation Context.proof_of Proof.context_of node);
 
-
 datatype state =
-  State of node_presentation * theory option;
-    (*current node with presentation context, previous theory*)
+  State of node_presentation * (theory option * Latex.text option);
+    (*current node with presentation context, previous theory, document output*)
 
 fun node_of (State ((node, _), _)) = node;
-fun previous_theory_of (State (_, prev_thy)) = prev_thy;
+fun previous_theory_of (State (_, (prev_thy, _))) = prev_thy;
+fun output_of (State (_, (_, output))) = output;
 
-fun init_toplevel () = State (node_presentation Toplevel, NONE);
-fun theory_toplevel thy = State (node_presentation (Theory (Context.Theory thy)), NONE);
+fun init_toplevel () = State (node_presentation Toplevel, (NONE, NONE));
+fun theory_toplevel thy = State (node_presentation (Theory (Context.Theory thy)), (NONE, NONE));
 
 val prev_theory = Config.declare_int ("prev_theory", Position.none) (K 0);
 fun get_prev_theory thy = Config.get_global thy prev_theory;
-fun set_prev_theory (State (_, SOME prev_thy)) (Theory gthy) =
+fun set_prev_theory (State (_, (SOME prev_thy, _))) (Theory gthy) =
       let
         val put = Config.put_global prev_theory (Context.theory_identifier prev_thy);
         val gthy' = gthy |> Context.mapping put (Local_Theory.raw_theory put);
@@ -202,10 +205,10 @@
     Proof (prf, _) => Proof_Node.position prf
   | _ => ~1);
 
-fun is_end_theory (State ((Toplevel, _), SOME _)) = true
+fun is_end_theory (State ((Toplevel, _), (SOME _, _))) = true
   | is_end_theory _ = false;
 
-fun end_theory _ (State ((Toplevel, _), SOME thy)) = thy
+fun end_theory _ (State ((Toplevel, _), (SOME thy, _))) = thy
   | end_theory pos _ = error ("Malformed theory" ^ Position.here pos);
 
 
@@ -221,11 +224,11 @@
 
 fun presentation_context (state as State (current, _)) =
   presentation_context0 state
-  |> Presentation_State.put (SOME (State (current, NONE)));
+  |> Presentation_State.put (SOME (State (current, (NONE, NONE))));
 
 fun presentation_state ctxt =
   (case Presentation_State.get ctxt of
-    NONE => State (node_presentation (Theory (Context.Proof ctxt)), NONE)
+    NONE => State (node_presentation (Theory (Context.Proof ctxt)), (NONE, NONE))
   | SOME state => state);
 
 
@@ -261,6 +264,13 @@
 
 (** toplevel transitions **)
 
+(* presentation *)
+
+type presentation = state -> Latex.text option;
+val no_presentation: presentation = K NONE;
+fun presentation g : presentation = SOME o g;
+
+
 (* primitive transitions *)
 
 datatype trans =
@@ -269,36 +279,37 @@
   (*formal exit of theory*)
   Exit |
   (*peek at state*)
-  Keep of bool -> state -> unit |
+  Keep of bool -> presentation |
   (*node transaction and presentation*)
-  Transaction of (bool -> node -> node_presentation) * (state -> unit);
+  Transaction of (bool -> node -> node_presentation) * presentation;
 
 local
 
 exception FAILURE of state * exn;
 
+fun apply_presentation g (st as State (node, (prev_thy, _))) =
+  State (node, (prev_thy, g st));
+
 fun apply f g node =
   let
     val node_pr = node_presentation node;
     val context = cases_proper_node I (Context.Proof o Proof.context_of) node;
-    fun state_error e node_pr' = (State (node_pr', get_theory node), e);
+    fun make_state node_pr' = State (node_pr', (get_theory node, NONE));
 
-    val (result, err) =
-      node
-      |> Runtime.controlled_execution (SOME context) f
-      |> state_error NONE
-      handle exn => state_error (SOME exn) node_pr;
+    val (st', err) =
+      (Runtime.controlled_execution (SOME context) (f #> make_state #> apply_presentation g) node,
+        NONE) handle exn => (make_state node_pr, SOME exn);
   in
     (case err of
-      NONE => tap g result
-    | SOME exn => raise FAILURE (result, exn))
+      NONE => st'
+    | SOME exn => raise FAILURE (st', exn))
   end;
 
 fun apply_tr int trans state =
   (case (trans, node_of state) of
     (Init f, Toplevel) =>
       Runtime.controlled_execution NONE (fn () =>
-        State (node_presentation (Theory (Context.Theory (f ()))), NONE)) ()
+        State (node_presentation (Theory (Context.Theory (f ()))), (NONE, NONE))) ()
   | (Exit, node as Theory (Context.Theory thy)) =>
       let
         val State ((node', pr_ctxt), _) =
@@ -306,11 +317,15 @@
             (fn _ =>
               node_presentation
                 (Theory (Context.Theory (tap Thm.expose_theory (Theory.end_theory thy)))))
-            (K ());
-      in State ((Toplevel, pr_ctxt), get_theory node') end
+            no_presentation;
+      in State ((Toplevel, pr_ctxt), (get_theory node', NONE)) end
   | (Keep f, node) =>
       Runtime.controlled_execution (try generic_theory_of state)
-        (fn () => (f int state; State (node_presentation node, previous_theory_of state))) ()
+        (fn () =>
+          let
+            val prev_thy = previous_theory_of state;
+            val state' = State (node_presentation node, (prev_thy, NONE));
+          in apply_presentation (fn st => f int st) state' end) ()
   | (Transaction _, Toplevel) => raise UNDEF
   | (Transaction (f, g), node) => apply (fn x => f int x) g (set_prev_theory state node)
   | _ => raise UNDEF);
@@ -403,13 +418,14 @@
 fun modify_init f tr = if is_init tr then init_theory f (reset_trans tr) else tr;
 
 val exit = add_trans Exit;
-val keep' = add_trans o Keep;
 
 fun present_transaction f g = add_trans (Transaction (f, g));
-fun transaction f = present_transaction f (K ());
-fun transaction0 f = present_transaction (node_presentation oo f) (K ());
+fun transaction f = present_transaction f no_presentation;
+fun transaction0 f = present_transaction (node_presentation oo f) no_presentation;
 
-fun keep f = add_trans (Keep (fn _ => f));
+fun present f = add_trans (Keep (K (presentation f)));
+fun keep f = add_trans (Keep (fn _ => fn st => let val () = f st in NONE end));
+fun keep' f = add_trans (Keep (fn int => fn st => let val () = f int st in NONE end));
 
 fun keep_proof f =
   keep (fn st =>
@@ -495,15 +511,16 @@
             |> Local_Theory.reset_group;
         in (Theory (finish lthy'), lthy') end
     | _ => raise UNDEF))
-  (K ());
+  no_presentation;
 
 fun local_theory restricted target f = local_theory' restricted target (K f);
 
-fun present_local_theory target = present_transaction (fn _ =>
+fun present_local_theory target g = present_transaction (fn _ =>
   (fn Theory gthy =>
         let val (finish, lthy) = Target_Context.switch_named_cmd target gthy;
         in (Theory (finish lthy), lthy) end
-    | _ => raise UNDEF));
+    | _ => raise UNDEF))
+  (presentation g);
 
 
 (* proof transitions *)
--- a/src/Pure/ML/ml_antiquotations.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/ML/ml_antiquotations.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -205,14 +205,14 @@
 
 val keywords = Keyword.add_minor_keywords ["for", "=>"] Keyword.empty_keywords;
 
-val parse_name = Parse.input Parse.name;
+val parse_name_args =
+  Parse.input Parse.name -- Scan.repeat Parse.embedded_ml;
 
-val parse_args = Scan.repeat Parse.embedded_ml_underscore;
-val parse_for_args = Scan.optional (Parse.$$$ "for" |-- Parse.!!! parse_args) [];
+val parse_for_args =
+  Scan.optional (Parse.$$$ "for" |-- Parse.!!! (Scan.repeat1 Parse.embedded_ml)) [];
 
 fun parse_body b =
-  if b then Parse.$$$ "=>" |-- Parse.!!! Parse.embedded_input >> (ML_Lex.read_source #> single)
-  else Scan.succeed [];
+  if b then Parse.$$$ "=>" |-- Parse.!!! (Parse.embedded_ml >> single) else Scan.succeed [];
 
 fun is_dummy [Antiquote.Text tok] = ML_Lex.content_of tok = "_"
   | is_dummy _ = false;
@@ -233,7 +233,7 @@
     (fn range => fn src => fn ctxt =>
       let
         val ((s, type_args), fn_body) = src
-          |> Parse.read_embedded_src ctxt keywords (parse_name -- parse_args -- parse_body function);
+          |> Parse.read_embedded_src ctxt keywords (parse_name_args -- parse_body function);
         val pos = Input.pos_of s;
 
         val Type (c, Ts) =
@@ -269,7 +269,7 @@
       let
         val (((s, type_args), term_args), fn_body) = src
           |> Parse.read_embedded_src ctxt keywords
-              (parse_name -- parse_args -- parse_for_args -- parse_body function);
+              (parse_name_args -- parse_for_args -- parse_body function);
 
         val Const (c, T) =
           Proof_Context.read_const {proper = true, strict = true} ctxt
--- a/src/Pure/ML/ml_statistics.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/ML/ml_statistics.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -194,7 +194,7 @@
   val empty: ML_Statistics = apply(Nil)
 
   def apply(ml_statistics0: List[Properties.T], heading: String = "",
-    domain: String => Boolean = (key: String) => true): ML_Statistics =
+    domain: String => Boolean = _ => true): ML_Statistics =
   {
     require(ml_statistics0.forall(props => Now.unapply(props).isDefined), "missing \"now\" field")
 
@@ -260,6 +260,11 @@
   val time_start: Double,
   val duration: Double)
 {
+  override def toString: String =
+    if (content.isEmpty) "ML_Statistics.empty"
+    else "ML_Statistics(length = " + content.length + ", fields = " + fields.size + ")"
+
+
   /* content */
 
   def maximum(field: String): Double =
@@ -286,7 +291,7 @@
 
   def update_data(data: XYSeriesCollection, selected_fields: List[String]): Unit =
   {
-    data.removeAllSeries
+    data.removeAllSeries()
     for (field <- selected_fields) {
       val series = new XYSeries(field)
       content.foreach(entry => series.add(entry.time, entry.get(field)))
--- a/src/Pure/ML/ml_system.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/ML/ml_system.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -28,8 +28,8 @@
 val platform_is_macos = String.isSuffix "darwin" platform;
 val platform_is_windows = String.isSuffix "windows" platform;
 val platform_is_unix = platform_is_linux orelse platform_is_macos;
-val platform_is_64 = String.isPrefix "x86_64-" platform;
-val platform_is_arm = String.isPrefix "arm64-" platform;
+val platform_is_64 = String.isPrefix "x86_64-" platform orelse String.isPrefix "arm64-" platform;
+val platform_is_arm = String.isPrefix "arm64_32-" platform orelse String.isPrefix "arm64-" platform;
 
 val platform_path =
   if platform_is_windows then
--- a/src/Pure/PIDE/command.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/PIDE/command.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -147,14 +147,6 @@
     val token_reports = map (reports_of_token keywords) span;
     val _ = Position.reports_text (maps #2 token_reports @ maps command_reports span);
 
-    val verbatim =
-      span |> map_filter (fn tok =>
-        if Token.kind_of tok = Token.Verbatim then SOME (Token.pos_of tok) else NONE);
-    val _ =
-      if null verbatim then ()
-      else legacy_feature ("Old-style {* verbatim *} token -- use \<open>cartouche\<close> instead" ^
-        Position.here_list verbatim);
-
     val core_range = Token.core_range_of span;
     val tr =
       if exists #1 token_reports
--- a/src/Pure/PIDE/markup.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/PIDE/markup.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -149,6 +149,17 @@
   val document_markerN: string val document_marker: T
   val document_tagN: string val document_tag: string -> T
   val document_latexN: string val document_latex: T
+  val latex_outputN: string val latex_output: T
+  val latex_macro0N: string val latex_macro0: string -> T
+  val latex_macroN: string val latex_macro: string -> T
+  val latex_environmentN: string val latex_environment: string -> T
+  val latex_headingN: string val latex_heading: string -> T
+  val latex_bodyN: string val latex_body: string -> T
+  val latex_index_itemN: string val latex_index_item: T
+  val latex_index_entryN: string val latex_index_entry: string -> T
+  val latex_delimN: string val latex_delim: string -> T
+  val latex_tagN: string val latex_tag: string -> T
+  val optional_argumentN: string val optional_argument: string -> T -> T
   val markdown_paragraphN: string val markdown_paragraph: T
   val markdown_itemN: string val markdown_item: T
   val markdown_bulletN: string val markdown_bullet: int -> T
@@ -569,8 +580,25 @@
 val (document_markerN, document_marker) = markup_elem "document_marker";
 val (document_tagN, document_tag) = markup_string "document_tag" nameN;
 
+
+(* LaTeX *)
+
 val (document_latexN, document_latex) = markup_elem "document_latex";
 
+val (latex_outputN, latex_output) = markup_elem "latex_output";
+val (latex_macro0N, latex_macro0) = markup_string "latex_macro0" nameN;
+val (latex_macroN, latex_macro) = markup_string "latex_macro" nameN;
+val (latex_environmentN, latex_environment) = markup_string "latex_environment" nameN;
+val (latex_headingN, latex_heading) = markup_string "latex_heading" kindN;
+val (latex_bodyN, latex_body) = markup_string "latex_body" kindN;
+val (latex_index_itemN, latex_index_item) = markup_elem "latex_index_item";
+val (latex_index_entryN, latex_index_entry) = markup_string "latex_index_entry" kindN;
+val (latex_delimN, latex_delim) = markup_string "latex_delim" nameN;
+val (latex_tagN, latex_tag) = markup_string "latex_tag" nameN;
+
+val optional_argumentN = "optional_argument";
+fun optional_argument arg = properties [(optional_argumentN, arg)];
+
 
 (* Markdown document structure *)
 
--- a/src/Pure/PIDE/markup.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/PIDE/markup.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -61,31 +61,41 @@
   val Empty: Markup = Markup("", Nil)
   val Broken: Markup = Markup("broken", Nil)
 
+  class Markup_Elem(val name: String)
+  {
+    def apply(props: Properties.T = Nil): Markup = Markup(name, props)
+    def unapply(markup: Markup): Option[Properties.T] =
+      if (markup.name == name) Some(markup.properties) else None
+  }
+
   class Markup_String(val name: String, prop: String)
   {
-    private val Prop = new Properties.String(prop)
+    val Prop: Properties.String = new Properties.String(prop)
 
     def apply(s: String): Markup = Markup(name, Prop(s))
     def unapply(markup: Markup): Option[String] =
       if (markup.name == name) Prop.unapply(markup.properties) else None
+    def get(markup: Markup): String = unapply(markup).getOrElse("")
   }
 
   class Markup_Int(val name: String, prop: String)
   {
-    private val Prop = new Properties.Int(prop)
+    val Prop: Properties.Int = new Properties.Int(prop)
 
     def apply(i: Int): Markup = Markup(name, Prop(i))
     def unapply(markup: Markup): Option[Int] =
       if (markup.name == name) Prop.unapply(markup.properties) else None
+    def get(markup: Markup): Int = unapply(markup).getOrElse(0)
   }
 
   class Markup_Long(val name: String, prop: String)
   {
-    private val Prop = new Properties.Long(prop)
+    val Prop: Properties.Long = new Properties.Long(prop)
 
     def apply(i: Long): Markup = Markup(name, Prop(i))
     def unapply(markup: Markup): Option[Long] =
       if (markup.name == name) Prop.unapply(markup.properties) else None
+    def get(markup: Markup): Long = unapply(markup).getOrElse(0)
   }
 
 
@@ -104,21 +114,11 @@
   val BINDING = "binding"
   val ENTITY = "entity"
 
-  val Def = new Properties.Long("def")
-  val Ref = new Properties.Long("ref")
-
   object Entity
   {
-    object Def
-    {
-      def unapply(markup: Markup): Option[Long] =
-        if (markup.name == ENTITY) Markup.Def.unapply(markup.properties) else None
-    }
-    object Ref
-    {
-      def unapply(markup: Markup): Option[Long] =
-        if (markup.name == ENTITY) Markup.Ref.unapply(markup.properties) else None
-    }
+    val Def = new Markup_Long(ENTITY, "def")
+    val Ref = new Markup_Long(ENTITY, "ref")
+
     object Occ
     {
       def unapply(markup: Markup): Option[Long] =
@@ -127,10 +127,7 @@
 
     def unapply(markup: Markup): Option[(String, String)] =
       markup match {
-        case Markup(ENTITY, props) =>
-          val kind = Kind.unapply(props).getOrElse("")
-          val name = Name.unapply(props).getOrElse("")
-          Some((kind, name))
+        case Markup(ENTITY, props) => Some((Kind.get(props), Name.get(props)))
         case _ => None
       }
   }
@@ -183,8 +180,7 @@
   {
     def unapply(markup: Markup): Option[String] =
       markup match {
-        case Markup(EXPRESSION, Kind(kind)) => Some(kind)
-        case Markup(EXPRESSION, _) => Some("")
+        case Markup(EXPRESSION, props) => Some(Kind.get(props))
         case _ => None
       }
   }
@@ -263,8 +259,8 @@
         (if (i != 0) Indent(i) else Nil))
     def unapply(markup: Markup): Option[(Boolean, Int)] =
       if (markup.name == name) {
-        val c = Consistent.unapply(markup.properties).getOrElse(false)
-        val i = Indent.unapply(markup.properties).getOrElse(0)
+        val c = Consistent.get(markup.properties)
+        val i = Indent.get(markup.properties)
         Some((c, i))
       }
       else None
@@ -279,8 +275,8 @@
         (if (i != 0) Indent(i) else Nil))
     def unapply(markup: Markup): Option[(Int, Int)] =
       if (markup.name == name) {
-        val w = Width.unapply(markup.properties).getOrElse(0)
-        val i = Indent.unapply(markup.properties).getOrElse(0)
+        val w = Width.get(markup.properties)
+        val i = Indent.get(markup.properties)
         Some((w, i))
       }
       else None
@@ -364,20 +360,30 @@
   val PARAGRAPH = "paragraph"
   val TEXT_FOLD = "text_fold"
 
-  object Document_Tag
+  object Document_Tag extends Markup_String("document_tag", NAME)
   {
-    val ELEMENT = "document_tag"
     val IMPORTANT = "important"
     val UNIMPORTANT = "unimportant"
-
-    def unapply(markup: Markup): Option[String] =
-      markup match {
-        case Markup(ELEMENT, Name(name)) => Some(name)
-        case _ => None
-      }
   }
 
-  val DOCUMENT_LATEX = "document_latex"
+
+  /* LaTeX */
+
+  val Document_Latex = new Markup_Elem("document_latex")
+
+  val Latex_Output = new Markup_Elem("latex_output")
+  val Latex_Macro0 = new Markup_String("latex_macro0", NAME)
+  val Latex_Macro = new Markup_String("latex_macro", NAME)
+  val Latex_Environment = new Markup_String("latex_environment", NAME)
+  val Latex_Heading = new Markup_String("latex_heading", KIND)
+  val Latex_Body = new Markup_String("latex_body", KIND)
+  val Latex_Delim = new Markup_String("latex_delim", NAME)
+  val Latex_Tag = new Markup_String("latex_tag", NAME)
+
+  val Latex_Index_Item = new Markup_Elem("latex_index_item")
+  val Latex_Index_Entry = new Markup_String("latex_index_entry", KIND)
+
+  val Optional_Argument = new Properties.String("optional_argument")
 
 
   /* Markdown document structure */
@@ -427,7 +433,6 @@
   val OPERATOR = "operator"
   val STRING = "string"
   val ALT_STRING = "alt_string"
-  val VERBATIM = "verbatim"
   val CARTOUCHE = "cartouche"
   val COMMENT = "comment"
 
@@ -459,8 +464,8 @@
         case _ => None
       }
 
-    def parse(props: Properties.T): isabelle.Timing =
-      unapply(props) getOrElse isabelle.Timing.zero
+    def get(props: Properties.T): isabelle.Timing =
+      unapply(props).getOrElse(isabelle.Timing.zero)
   }
 
   val TIMING = "timing"
@@ -499,12 +504,7 @@
 
   /* command indentation */
 
-  object Command_Indent
-  {
-    val name = "command_indent"
-    def unapply(markup: Markup): Option[Int] =
-      if (markup.name == name) Indent.unapply(markup.properties) else None
-  }
+  val Command_Indent = new Markup_Int("command_indent", Indent.name)
 
 
   /* goals */
@@ -779,6 +779,8 @@
 {
   def is_empty: Boolean = name.isEmpty
 
+  def position_properties: Position.T = properties.filter(Markup.position_property)
+
   def markup(s: String): String =
     YXML.string_of_tree(XML.Elem(this, List(XML.Text(s))))
 
--- a/src/Pure/PIDE/rendering.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/PIDE/rendering.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -117,7 +117,6 @@
     Markup.OPERATOR -> Color.operator,
     Markup.STRING -> Color.main,
     Markup.ALT_STRING -> Color.main,
-    Markup.VERBATIM -> Color.main,
     Markup.CARTOUCHE -> Color.main,
     Markup.LITERAL -> Color.keyword1,
     Markup.DELIMITER -> Color.main,
@@ -151,7 +150,6 @@
     Map(
       Markup.STRING -> Color.quoted,
       Markup.ALT_STRING -> Color.quoted,
-      Markup.VERBATIM -> Color.quoted,
       Markup.CARTOUCHE -> Color.quoted,
       Markup.ANTIQUOTED -> Color.antiquoted)
 
@@ -209,7 +207,7 @@
     Markup.Elements(Markup.COMPLETION, Markup.NO_COMPLETION)
 
   val language_context_elements =
-    Markup.Elements(Markup.STRING, Markup.ALT_STRING, Markup.VERBATIM,
+    Markup.Elements(Markup.STRING, Markup.ALT_STRING,
       Markup.CARTOUCHE, Markup.COMMENT, Markup.LANGUAGE,
       Markup.ML_STRING, Markup.ML_COMMENT)
 
@@ -257,7 +255,7 @@
       Markup.META_DATE, Markup.META_DESCRIPTION, Markup.META_LICENSE)
 
   val document_tag_elements =
-    Markup.Elements(Markup.Document_Tag.ELEMENT)
+    Markup.Elements(Markup.Document_Tag.name)
 
   val markdown_elements =
     Markup.Elements(Markup.MARKDOWN_PARAGRAPH, Markup.MARKDOWN_ITEM, Markup.Markdown_List.name,
--- a/src/Pure/PIDE/resources.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/PIDE/resources.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -418,7 +418,7 @@
   Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) =>
    (check ctxt NONE source;
     Latex.string (Latex.output_ascii_breakable "/" (Input.string_of source))
-    |> Latex.enclose_text "\\isatt{" "}"));
+    |> Latex.macro "isatt"));
 
 fun ML_antiq check =
   Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) =>
--- a/src/Pure/PIDE/xml.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/PIDE/xml.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -37,6 +37,8 @@
   val blob: string list -> body
   val is_empty: tree -> bool
   val is_empty_body: body -> bool
+  val string: string -> body
+  val enclose: string -> string -> body -> body
   val xml_elemN: string
   val xml_nameN: string
   val xml_bodyN: string
@@ -84,6 +86,11 @@
 
 val is_empty_body = forall is_empty;
 
+fun string "" = []
+  | string s = [Text s];
+
+fun enclose bg en body = string bg @ body @ string en;
+
 
 (* wrapped elements *)
 
@@ -118,9 +125,7 @@
 fun trim_blanks trees =
   trees |> maps
     (fn Elem (markup, body) => [Elem (markup, trim_blanks body)]
-      | Text s =>
-          let val s' = s |> raw_explode |> trim Symbol.is_blank |> implode;
-          in if s' = "" then [] else [Text s'] end);
+      | Text s => s |> raw_explode |> trim Symbol.is_blank |> implode |> string);
 
 
 
@@ -155,13 +160,13 @@
 
 fun element name atts body =
   let val b = implode body in
-    if b = "" then enclose "<" "/>" (elem name atts)
-    else enclose "<" ">" (elem name atts) ^ b ^ enclose "</" ">" name
+    if b = "" then Library.enclose "<" "/>" (elem name atts)
+    else Library.enclose "<" ">" (elem name atts) ^ b ^ Library.enclose "</" ">" name
   end;
 
 fun output_markup (markup as (name, atts)) =
   if Markup.is_empty markup then Markup.no_output
-  else (enclose "<" ">" (elem name atts), enclose "</" ">" name);
+  else (Library.enclose "<" ">" (elem name atts), Library.enclose "</" ">" name);
 
 
 (* output content *)
--- a/src/Pure/PIDE/xml.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/PIDE/xml.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -44,6 +44,11 @@
   val no_text: Text = Text("")
   val newline: Text = Text("\n")
 
+  def string(s: String): Body = if (s.isEmpty) Nil else List(Text(s))
+
+  def enclose(bg: String, en:String, body: Body): Body =
+    string(bg) ::: body ::: string(en)
+
 
   /* name space */
 
--- a/src/Pure/Pure.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Pure.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -301,13 +301,13 @@
 val _ =
   Outer_Syntax.local_theory \<^command_keyword>\<open>attribute_setup\<close> "define attribute in ML"
     (Parse.name_position --
-        Parse.!!! (\<^keyword>\<open>=\<close> |-- Parse.ML_source -- Scan.optional Parse.text "")
+        Parse.!!! (\<^keyword>\<open>=\<close> |-- Parse.ML_source -- Scan.optional Parse.embedded "")
       >> (fn (name, (txt, cmt)) => Attrib.attribute_setup name txt cmt));
 
 val _ =
   Outer_Syntax.local_theory \<^command_keyword>\<open>method_setup\<close> "define proof method in ML"
     (Parse.name_position --
-        Parse.!!! (\<^keyword>\<open>=\<close> |-- Parse.ML_source -- Scan.optional Parse.text "")
+        Parse.!!! (\<^keyword>\<open>=\<close> |-- Parse.ML_source -- Scan.optional Parse.embedded "")
       >> (fn (name, (txt, cmt)) => Method.method_setup name txt cmt));
 
 val _ =
@@ -572,7 +572,7 @@
 val _ =
   Outer_Syntax.local_theory \<^command_keyword>\<open>named_theorems\<close>
     "declare named collection of theorems"
-    (Parse.and_list1 (Parse.binding -- Scan.optional Parse.text "") >>
+    (Parse.and_list1 (Parse.binding -- Scan.optional Parse.embedded "") >>
       fold (fn (b, descr) => snd o Named_Theorems.declare b descr));
 
 in end\<close>
--- a/src/Pure/ROOT	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/ROOT	Fri Dec 10 08:58:09 2021 +0100
@@ -26,11 +26,16 @@
   description "
     Miscellaneous examples and experiments for Isabelle/Pure.
   "
+  options [document_heading_prefix = ""]
   sessions
     "Pure-Examples"
-  theories
+  theories [document = false]
     Def
     Def_Examples
     Guess
     Guess_Examples
-
+  theories
+    Alternative_Headings
+    Alternative_Headings_Examples
+  document_files
+    "root.tex"
--- a/src/Pure/System/options.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/System/options.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -33,14 +33,21 @@
     typ: Type,
     value: String,
     default_value: String,
+    standard_value: Option[String],
     description: String,
     section: String)
   {
+    private def print_value(x: String): String = if (typ == Options.String) quote(x) else x
+    private def print_standard: String =
+      standard_value match {
+        case None => ""
+        case Some(s) if s == default_value => " (standard)"
+        case Some(s) => " (standard " + print_value(s) + ")"
+      }
     private def print(default: Boolean): String =
     {
       val x = if (default) default_value else value
-      "option " + name + " : " + typ.print + " = " +
-        (if (typ == Options.String) quote(x) else x) +
+      "option " + name + " : " + typ.print + " = " + print_value(x) + print_standard +
         (if (description == "") "" else "\n  -- " + quote(description))
     }
 
@@ -67,14 +74,17 @@
   private val SECTION = "section"
   private val PUBLIC = "public"
   private val OPTION = "option"
+  private val STANDARD = "standard"
   private val OPTIONS = Path.explode("etc/options")
   private val PREFS = Path.explode("$ISABELLE_HOME_USER/etc/preferences")
 
   val options_syntax: Outer_Syntax =
-    Outer_Syntax.empty + ":" + "=" + "--" + Symbol.comment + Symbol.comment_decoded +
+    Outer_Syntax.empty + ":" + "=" + "--" + "(" + ")" +
+      Symbol.comment + Symbol.comment_decoded +
       (SECTION, Keyword.DOCUMENT_HEADING) +
       (PUBLIC, Keyword.BEFORE_COMMAND) +
-      (OPTION, Keyword.THY_DECL)
+      (OPTION, Keyword.THY_DECL) +
+      STANDARD
 
   val prefs_syntax: Outer_Syntax = Outer_Syntax.empty + "="
 
@@ -86,6 +96,8 @@
       opt(token("-", tok => tok.is_sym_ident && tok.content == "-")) ~ atom("nat", _.is_nat) ^^
         { case s ~ n => if (s.isDefined) "-" + n else n } |
       atom("option value", tok => tok.is_name || tok.is_float)
+    val option_standard: Parser[Option[String]] =
+      $$$("(") ~! $$$(STANDARD) ~ opt(option_value) ~ $$$(")") ^^ { case _ ~ _ ~ a ~ _ => a }
   }
 
   private object Parser extends Parser
@@ -98,9 +110,10 @@
       command(SECTION) ~! text ^^
         { case _ ~ a => (options: Options) => options.set_section(a) } |
       opt($$$(PUBLIC)) ~ command(OPTION) ~! (position(option_name) ~ $$$(":") ~ option_type ~
-      $$$("=") ~ option_value ~ (comment_marker ~! text ^^ { case _ ~ x => x } | success(""))) ^^
-        { case a ~ _ ~ ((b, pos) ~ _ ~ c ~ _ ~ d ~ e) =>
-            (options: Options) => options.declare(a.isDefined, pos, b, c, d, e) }
+      $$$("=") ~ option_value ~ opt(option_standard) ~
+        (comment_marker ~! text ^^ { case _ ~ x => x } | success(""))) ^^
+        { case a ~ _ ~ ((b, pos) ~ _ ~ c ~ _ ~ d ~ e ~ f) =>
+            (options: Options) => options.declare(a.isDefined, pos, b, c, d, e, f) }
     }
 
     val prefs_entry: Parser[Options => Options] =
@@ -302,6 +315,7 @@
     name: String,
     typ_name: String,
     value: String,
+    standard: Option[Option[String]],
     description: String): Options =
   {
     options.get(name) match {
@@ -319,7 +333,16 @@
               error("Unknown type for option " + quote(name) + " : " + quote(typ_name) +
                 Position.here(pos))
           }
-        val opt = Options.Opt(public, pos, name, typ, value, value, description, section)
+        val standard_value =
+          standard match {
+            case None => None
+            case Some(_) if typ == Options.Bool =>
+              error("Illegal standard value for option " + quote(name) + " : " + typ_name +
+                Position.here)
+            case Some(s) => Some(s.getOrElse(value))
+          }
+        val opt =
+          Options.Opt(public, pos, name, typ, value, value, standard_value, description, section)
         (new Options(options + (name -> opt), section)).check_value(name)
     }
   }
@@ -328,7 +351,7 @@
   {
     if (options.isDefinedAt(name)) this + (name, value)
     else {
-      val opt = Options.Opt(false, Position.none, name, Options.Unknown, value, value, "", "")
+      val opt = Options.Opt(false, Position.none, name, Options.Unknown, value, value, None, "", "")
       new Options(options + (name -> opt), section)
     }
   }
@@ -342,9 +365,9 @@
   def + (name: String, opt_value: Option[String]): Options =
   {
     val opt = check_name(name)
-    opt_value match {
+    opt_value orElse opt.standard_value match {
       case Some(value) => this + (name, value)
-      case None if opt.typ == Options.Bool | opt.typ == Options.String => this + (name, "true")
+      case None if opt.typ == Options.Bool => this + (name, "true")
       case None => error("Missing value for option " + quote(name) + " : " + opt.typ.print)
     }
   }
--- a/src/Pure/System/scala_compiler.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/System/scala_compiler.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -58,8 +58,7 @@
 val args = Scan.optional (Parse.$$$ "(" |-- arguments --| Parse.$$$ ")") " _";
 
 fun scala_name name =
-  Latex.string (Latex.output_ascii_breakable "." name)
-  |> Latex.enclose_text "\\isatt{" "}";
+  Latex.macro "isatt" (Latex.string (Latex.output_ascii_breakable "." name));
 
 in
 
--- a/src/Pure/Thy/bibtex.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/bibtex.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -42,8 +42,7 @@
   Theory.setup
    (Document_Antiquotation.setup_option \<^binding>\<open>cite_macro\<close> (Config.put cite_macro) #>
     Document_Output.antiquotation_raw \<^binding>\<open>cite\<close>
-      (Scan.lift
-        (Scan.option (Parse.verbatim || Parse.cartouche) -- Parse.and_list1 Args.name_position))
+      (Scan.lift (Scan.option Parse.cartouche -- Parse.and_list1 Args.name_position))
       (fn ctxt => fn (opt, citations) =>
         let
           val _ =
--- a/src/Pure/Thy/document_antiquotations.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/document_antiquotations.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -131,17 +131,17 @@
 
 local
 
-fun nested_antiquotation name s1 s2 =
+fun nested_antiquotation name macro =
   Document_Output.antiquotation_raw_embedded name (Scan.lift Args.cartouche_input)
     (fn ctxt => fn txt =>
       (Context_Position.reports ctxt (Document_Output.document_reports txt);
-        Latex.enclose_text s1 s2 (Document_Output.output_document ctxt {markdown = false} txt)));
+        Latex.macro macro (Document_Output.output_document ctxt {markdown = false} txt)));
 
 val _ =
   Theory.setup
-   (nested_antiquotation \<^binding>\<open>footnote\<close> "\\footnote{" "}" #>
-    nested_antiquotation \<^binding>\<open>emph\<close> "\\emph{" "}" #>
-    nested_antiquotation \<^binding>\<open>bold\<close> "\\textbf{" "}");
+   (nested_antiquotation \<^binding>\<open>footnote\<close> "footnote" #>
+    nested_antiquotation \<^binding>\<open>emph\<close> "emph" #>
+    nested_antiquotation \<^binding>\<open>bold\<close> "textbf");
 
 in end;
 
@@ -195,7 +195,7 @@
   Input.source_content #> #1 #> Document_Antiquotation.prepare_lines ctxt;
 
 fun text_antiquotation name =
-  Document_Output.antiquotation_raw_embedded name (Scan.lift Args.text_input)
+  Document_Output.antiquotation_raw_embedded name (Scan.lift Parse.embedded_input)
     (fn ctxt => fn text =>
       let
         val _ = report_text ctxt text;
@@ -206,7 +206,7 @@
       end);
 
 val theory_text_antiquotation =
-  Document_Output.antiquotation_raw_embedded \<^binding>\<open>theory_text\<close> (Scan.lift Args.text_input)
+  Document_Output.antiquotation_raw_embedded \<^binding>\<open>theory_text\<close> (Scan.lift Parse.embedded_input)
     (fn ctxt => fn text =>
       let
         val keywords = Thy_Header.get_keywords' ctxt;
@@ -273,7 +273,7 @@
 (* verbatim text *)
 
 val _ = Theory.setup
-  (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>verbatim\<close> (Scan.lift Args.text_input)
+  (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>verbatim\<close> (Scan.lift Parse.embedded_input)
     (fn ctxt => fn text =>
       let
         val pos = Input.pos_of text;
@@ -327,17 +327,17 @@
       ML_Lex.read (ML_Syntax.print_string (ML_Lex.content_of tok))
   | test_functor _ = raise Fail "Bad ML functor specification";
 
-val parse_ml0 = Args.text_input >> (fn source => ("", (source, Input.empty)));
+val parse_ml0 = Parse.embedded_input >> (fn source => ("", (source, Input.empty)));
 
 val parse_ml =
-  Args.text_input -- Scan.optional (Args.colon |-- Args.text_input) Input.empty >> pair "";
+  Parse.embedded_input -- Scan.optional (Args.colon |-- Parse.embedded_input) Input.empty >> pair "";
 
 val parse_exn =
-  Args.text_input -- Scan.optional (Args.$$$ "of" |-- Args.text_input) Input.empty >> pair "";
+  Parse.embedded_input -- Scan.optional (Args.$$$ "of" |-- Parse.embedded_input) Input.empty >> pair "";
 
 val parse_type =
   (Parse.type_args >> (fn [] => "" | [a] => a ^ " " | bs => enclose "(" ") " (commas bs))) --
-    (Args.text_input -- Scan.optional (Args.$$$ "=" |-- Args.text_input) Input.empty);
+    (Parse.embedded_input -- Scan.optional (Args.$$$ "=" |-- Parse.embedded_input) Input.empty);
 
 fun eval ctxt pos ml =
   ML_Context.eval_in (SOME ctxt) ML_Compiler.flags pos ml
@@ -423,24 +423,24 @@
         val _ =
           Context_Position.reports ctxt
             [(pos, Markup.language_url delimited), (pos, Markup.url url)];
-      in Latex.enclose_text "\\url{" "}" (Latex.string (escape_url url)) end));
+      in Latex.macro "url" (Latex.string (escape_url url)) end));
 
 
 (* formal entities *)
 
 local
 
-fun entity_antiquotation name check bg en =
+fun entity_antiquotation name check macro =
   Document_Output.antiquotation_raw name (Scan.lift Args.name_position)
     (fn ctxt => fn (name, pos) =>
       let val _ = check ctxt (name, pos)
-      in Latex.enclose_text bg en (Latex.string (Output.output name)) end);
+      in Latex.macro macro (Latex.string (Output.output name)) end);
 
 val _ =
   Theory.setup
-   (entity_antiquotation \<^binding>\<open>command\<close> Outer_Syntax.check_command "\\isacommand{" "}" #>
-    entity_antiquotation \<^binding>\<open>method\<close> Method.check_name "\\isa{" "}" #>
-    entity_antiquotation \<^binding>\<open>attribute\<close> Attrib.check_name "\\isa{" "}");
+   (entity_antiquotation \<^binding>\<open>command\<close> Outer_Syntax.check_command "isacommand" #>
+    entity_antiquotation \<^binding>\<open>method\<close> Method.check_name "isa" #>
+    entity_antiquotation \<^binding>\<open>attribute\<close> Attrib.check_name "isa");
 
 in end;
 
--- a/src/Pure/Thy/document_build.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/document_build.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -11,24 +11,6 @@
 {
   /* document variants */
 
-  object Content
-  {
-    def apply(path: Path, content: Bytes): Content = new Content_Bytes(path, content)
-    def apply(path: Path, content: String): Content = new Content_String(path, content)
-  }
-  trait Content
-  {
-    def write(dir: Path): Unit
-  }
-  final class Content_Bytes private[Document_Build](path: Path, content: Bytes) extends Content
-  {
-    def write(dir: Path): Unit = Bytes.write(dir + path, content)
-  }
-  final class Content_String private[Document_Build](path: Path, content: String) extends Content
-  {
-    def write(dir: Path): Unit = File.write(dir + path, content)
-  }
-
   abstract class Document_Name
   {
     def name: String
@@ -39,36 +21,17 @@
 
   object Document_Variant
   {
-    def parse(name: String, tags: String): Document_Variant =
-      Document_Variant(name, Library.space_explode(',', tags))
-
     def parse(opt: String): Document_Variant =
       Library.space_explode('=', opt) match {
-        case List(name) => Document_Variant(name, Nil)
-        case List(name, tags) => parse(name, tags)
+        case List(name) => Document_Variant(name, Latex.Tags.empty)
+        case List(name, tags) => Document_Variant(name, Latex.Tags(tags))
         case _ => error("Malformed document variant: " + quote(opt))
       }
   }
 
-  sealed case class Document_Variant(name: String, tags: List[String]) extends Document_Name
+  sealed case class Document_Variant(name: String, tags: Latex.Tags) extends Document_Name
   {
-    def print_tags: String = tags.mkString(",")
-    def print: String = if (tags.isEmpty) name else name + "=" + print_tags
-
-    def isabelletags: Content =
-    {
-      val path = Path.explode("isabelletags.sty")
-      val content =
-        Library.terminate_lines(
-          tags.map(tag =>
-            tag.toList match {
-              case '/' :: cs => "\\isafoldtag{" + cs.mkString + "}"
-              case '-' :: cs => "\\isadroptag{" + cs.mkString + "}"
-              case '+' :: cs => "\\isakeeptag{" + cs.mkString + "}"
-              case cs => "\\isakeeptag{" + cs.mkString + "}"
-            }))
-      Content(path, content)
-    }
+    def print: String = if (tags.toString.isEmpty) name else name + "=" + tags.toString
   }
 
   sealed case class Document_Input(name: String, sources: SHA1.Digest)
@@ -155,9 +118,11 @@
 
   /* context */
 
+  val texinputs: Path = Path.explode("~~/lib/texinputs")
+
   val isabelle_styles: List[Path] =
-    List("comment.sty", "isabelle.sty", "isabellesym.sty", "pdfsetup.sty", "railsetup.sty").
-      map(name => Path.explode("~~/lib/texinputs") + Path.basic(name))
+    List("isabelle.sty", "isabellesym.sty", "pdfsetup.sty", "railsetup.sty").
+      map(name => texinputs + Path.basic(name))
 
   def context(
     session: String,
@@ -167,7 +132,7 @@
   {
     val info = deps.sessions_structure(session)
     val base = deps(session)
-    val hierarchy = deps.sessions_structure.hierarchy(session)
+    val hierarchy = deps.sessions_structure.build_hierarchy(session)
     new Context(info, base, hierarchy, db_context, progress)
   }
 
@@ -211,32 +176,31 @@
     def session_theories: List[Document.Node.Name] = base.session_theories
     def document_theories: List[Document.Node.Name] = session_theories ::: base.document_theories
 
-    lazy val tex_files: List[Content] =
+    lazy val document_latex: List[File.Content_XML] =
       for (name <- document_theories)
       yield {
         val path = Path.basic(tex_name(name))
-        val xml = YXML.parse_body(get_export(name.theory, Export.DOCUMENT_LATEX).text)
-        val content = Latex.output(xml, file_pos = name.path.implode_symbolic)
-        Content(path, content)
+        val content = YXML.parse_body(get_export(name.theory, Export.DOCUMENT_LATEX).text)
+        File.Content(path, content)
       }
 
-    lazy val session_graph: Content =
+    lazy val session_graph: File.Content =
     {
       val path = Presentation.session_graph_path
       val content = graphview.Graph_File.make_pdf(options, base.session_graph_display)
-      Content(path, content)
+      File.Content(path, content)
     }
 
-    lazy val session_tex: Content =
+    lazy val session_tex: File.Content =
     {
       val path = Path.basic("session.tex")
       val content =
         Library.terminate_lines(
           base.session_theories.map(name => "\\input{" + tex_name(name) + "}"))
-      Content(path, content)
+      File.Content(path, content)
     }
 
-    lazy val isabelle_logo: Option[Content] =
+    lazy val isabelle_logo: Option[File.Content] =
     {
       document_logo.map(logo_name =>
         Isabelle_System.with_tmp_file("logo", ext = "pdf")(tmp_path =>
@@ -244,14 +208,14 @@
           Logo.create_logo(logo_name, output_file = tmp_path, quiet = true)
           val path = Path.basic("isabelle_logo.pdf")
           val content = Bytes.read(tmp_path)
-          Content(path, content)
+          File.Content(path, content)
         }))
     }
 
 
     /* document directory */
 
-    def prepare_directory(dir: Path, doc: Document_Variant): Directory =
+    def prepare_directory(dir: Path, doc: Document_Variant, latex_output: Latex.Output): Directory =
     {
       val doc_dir = Isabelle_System.make_directory(dir + Path.basic(doc.name))
 
@@ -259,14 +223,23 @@
       /* actual sources: with SHA1 digest */
 
       isabelle_styles.foreach(Isabelle_System.copy_file(_, doc_dir))
-      doc.isabelletags.write(doc_dir)
+
+      val comment_latex = options.bool("document_comment_latex")
+      if (!comment_latex) {
+        Isabelle_System.copy_file(texinputs + Path.basic("comment.sty"), doc_dir)
+      }
+      doc.tags.sty(comment_latex).write(doc_dir)
 
       for ((base_dir, src) <- info.document_files) {
         Isabelle_System.copy_file_base(info.dir + base_dir, src, doc_dir)
       }
 
       session_tex.write(doc_dir)
-      tex_files.foreach(_.write(doc_dir))
+
+      for (content <- document_latex) {
+        content.output(latex_output(_, file_pos = content.path.implode_symbolic))
+          .write(doc_dir)
+      }
 
       val root_name1 = "root_" + doc.name
       val root_name = if ((doc_dir + Path.explode(root_name1).tex).is_file) root_name1 else "root"
@@ -350,7 +323,7 @@
   abstract class Bash_Engine(name: String) extends Engine(name)
   {
     def prepare_directory(context: Context, dir: Path, doc: Document_Variant): Directory =
-      context.prepare_directory(dir, doc)
+      context.prepare_directory(dir, doc, new Latex.Output(context.options))
 
     def use_pdflatex: Boolean = false
     def latex_script(context: Context, directory: Directory): String =
--- a/src/Pure/Thy/document_output.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/document_output.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -8,6 +8,8 @@
 sig
   val document_reports: Input.source -> Position.report list
   val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text
+  val document_output: {markdown: bool, markup: Latex.text -> Latex.text} ->
+    (xstring * Position.T) option * Input.source -> Toplevel.transition -> Toplevel.transition
   val check_comments: Proof.context -> Symbol_Pos.T list -> unit
   val output_token: Proof.context -> Token.T -> Latex.text
   val output_source: Proof.context -> string -> Latex.text
@@ -62,11 +64,11 @@
       Input.cartouche_content syms
       |> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false)
           {markdown = false}
-      |> Latex.enclose_text "%\n\\isamarkupcmt{" "%\n}"
+      |> XML.enclose "%\n\\isamarkupcmt{" "%\n}"
   | Comment.Cancel =>
       Symbol_Pos.cartouche_content syms
       |> Latex.symbols_output
-      |> Latex.enclose_text "%\n\\isamarkupcancel{" "}"
+      |> XML.enclose "%\n\\isamarkupcancel{" "}"
   | Comment.Latex => Latex.symbols (Symbol_Pos.cartouche_content syms)
   | Comment.Marker => [])
 and output_comment_document ctxt (comment, syms) =
@@ -90,7 +92,7 @@
     fun output_block (Markdown.Par lines) =
           separate (XML.Text "\n") (map (Latex.block o output_line) lines)
       | output_block (Markdown.List {kind, body, ...}) =
-          Latex.environment_text (Markdown.print_kind kind) (output_blocks body)
+          Latex.environment (Markdown.print_kind kind) (output_blocks body)
     and output_blocks blocks =
       separate (XML.Text "\n\n") (map (Latex.block o output_block) blocks);
   in
@@ -111,6 +113,22 @@
       in output_antiquotes ants end
   end;
 
+fun document_output {markdown, markup} (loc, txt) =
+  let
+    fun output st =
+      let
+        val ctxt = Toplevel.presentation_context st;
+        val _ = Context_Position.reports ctxt (document_reports txt);
+      in txt |> output_document ctxt {markdown = markdown} |> markup end;
+  in
+    Toplevel.present (fn st =>
+      (case loc of
+        NONE => output st
+      | SOME (_, pos) =>
+          error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
+    Toplevel.present_local_theory loc output
+  end;
+
 
 (* output tokens with formal comments *)
 
@@ -122,7 +140,7 @@
         Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) @
           Latex.symbols_output body
     | Antiquote.Antiq {body, ...} =>
-        Latex.enclose_text "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (Latex.symbols_output body));
+        XML.enclose "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (Latex.symbols_output body));
 
 fun output_comment_symbols ctxt {antiq} (comment, syms) =
   (case (comment, antiq) of
@@ -135,7 +153,7 @@
 fun output_body ctxt antiq bg en syms =
   Comment.read_body syms
   |> maps (output_comment_symbols ctxt {antiq = antiq})
-  |> Latex.enclose_text bg en;
+  |> XML.enclose bg en;
 
 in
 
@@ -154,7 +172,6 @@
         else output false "" ""
     | Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}"
     | Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}"
-    | Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}"
     | Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}"
     | Token.Control control => output_body ctxt false "" "" (Antiquote.control_symbols control)
     | _ => output false "" "")
@@ -184,9 +201,7 @@
 datatype token =
     Ignore
   | Token of Token.T
-  | Heading of string * Input.source
-  | Body of string * Input.source
-  | Raw of Input.source;
+  | Output of Latex.text;
 
 fun token_with pred (Token tok) = pred tok
   | token_with _ _ = false;
@@ -200,19 +215,13 @@
   (case tok of
     Ignore => []
   | Token tok => output_token ctxt tok
-  | Heading (cmd, source) =>
-      Latex.enclose_text ("%\n\\isamarkup" ^ cmd ^ "{") "%\n}\n"
-        (output_document ctxt {markdown = false} source)
-  | Body (cmd, source) =>
-      Latex.environment_text ("isamarkup" ^ cmd) (output_document ctxt {markdown = true} source)
-  | Raw source =>
-      Latex.string "%\n" @ output_document ctxt {markdown = true} source @ Latex.string "\n");
+  | Output output => output);
 
 
 (* command spans *)
 
-type command = string * Position.T;   (*name, position*)
-type source = (token * (string * int)) list;        (*token, markup flag, meta-comment depth*)
+type command = string * Position.T;  (*name, position*)
+type source = (token * (string * int)) list;  (*token, markup flag, meta-comment depth*)
 
 datatype span = Span of command * (source * source * source * source) * bool;
 
@@ -242,10 +251,15 @@
   if x = y then I
   else (case which (x, y) of NONE => I | SOME txt => fold cons (Latex.string (f txt)));
 
-val begin_tag = edge #2 Latex.begin_tag;
-val end_tag = edge #1 Latex.end_tag;
-fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e;
-fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e;
+val markup_tag = YXML.output_markup o Markup.latex_tag;
+val markup_delim = YXML.output_markup o Markup.latex_delim;
+val bg_delim = #1 o markup_delim;
+val en_delim = #2 o markup_delim;
+
+val begin_tag = edge #2 (#1 o markup_tag);
+val end_tag = edge #1 (#2 o markup_tag);
+fun open_delim delim e = edge #2 bg_delim e #> delim #> edge #2 en_delim e;
+fun close_delim delim e = edge #1 bg_delim e #> delim #> edge #1 en_delim e;
 
 fun document_tag cmd_pos state state' tagging_stack =
   let
@@ -343,11 +357,39 @@
 
 (* present_thy *)
 
+type segment =
+  {span: Command_Span.span, command: Toplevel.transition,
+   prev_state: Toplevel.state, state: Toplevel.state};
+
 local
 
 val markup_true = "\\isamarkuptrue%\n";
 val markup_false = "\\isamarkupfalse%\n";
 
+fun command_output output tok =
+  if Token.is_command tok then SOME (Token.put_output output tok) else NONE;
+
+fun segment_content (segment: segment) =
+  let val toks = Command_Span.content (#span segment) in
+    (case Toplevel.output_of (#state segment) of
+      NONE => toks
+    | SOME output => map_filter (command_output output) toks)
+  end;
+
+fun output_command keywords = Scan.some (fn tok =>
+  if Token.is_command tok then
+    let
+      val name = Token.content_of tok;
+      val is_document = Keyword.is_document keywords name;
+      val is_document_raw = Keyword.is_document_raw keywords name;
+      val flag = if is_document andalso not is_document_raw then markup_true else "";
+    in
+      if is_document andalso is_some (Token.get_output tok)
+      then SOME ((name, Token.pos_of tok), the (Token.get_output tok), flag)
+      else NONE
+    end
+  else NONE);
+
 val opt_newline = Scan.option (Scan.one Token.is_newline);
 
 val ignore =
@@ -357,17 +399,8 @@
     (if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline)
     >> pair (d - 1));
 
-val locale =
-  Scan.option ((Parse.$$$ "(" -- Document_Source.improper -- Parse.$$$ "in") |--
-    Parse.!!!
-      (Document_Source.improper |-- Parse.name --| (Document_Source.improper -- Parse.$$$ ")")));
-
 in
 
-type segment =
-  {span: Command_Span.span, command: Toplevel.transition,
-   prev_state: Toplevel.state, state: Toplevel.state};
-
 fun present_thy options thy (segments: segment list) =
   let
     val keywords = Thy_Header.get_keywords thy;
@@ -376,18 +409,11 @@
     (* tokens *)
 
     val ignored = Scan.state --| ignore
-      >> (fn d => (NONE, (Ignore, ("", d))));
+      >> (fn d => [(NONE, (Ignore, ("", d)))]);
 
-    fun markup pred mk flag = Scan.peek (fn d =>
-      Document_Source.improper |--
-        Parse.position (Scan.one (fn tok =>
-          Token.is_command tok andalso pred keywords (Token.content_of tok))) --
-      (Document_Source.annotation |--
-        Parse.!!!! ((Document_Source.improper -- locale -- Document_Source.improper) |--
-          Parse.document_source --| Document_Source.improper_end))
-            >> (fn ((tok, pos'), source) =>
-              let val name = Token.content_of tok
-              in (SOME (name, pos'), (mk (name, source), (flag, d))) end));
+    val output = Scan.peek (fn d =>
+      Document_Source.improper |-- output_command keywords --| Document_Source.improper_end
+        >> (fn (kind, body, flag) => [(SOME kind, (Output body, (flag, d)))]));
 
     val command = Scan.peek (fn d =>
       Scan.optional (Scan.one Token.is_command_modifier ::: Document_Source.improper) [] --
@@ -398,18 +424,12 @@
             (Token cmd, (markup_false, d)))]));
 
     val cmt = Scan.peek (fn d =>
-      Scan.one Document_Source.is_black_comment >> (fn tok => (NONE, (Token tok, ("", d)))));
+      Scan.one Document_Source.is_black_comment >> (fn tok => [(NONE, (Token tok, ("", d)))]));
 
     val other = Scan.peek (fn d =>
-       Parse.not_eof >> (fn tok => (NONE, (Token tok, ("", d)))));
+       Parse.not_eof >> (fn tok => [(NONE, (Token tok, ("", d)))]));
 
-    val tokens =
-      (ignored ||
-        markup Keyword.is_document_heading Heading markup_true ||
-        markup Keyword.is_document_body Body markup_true ||
-        markup Keyword.is_document_raw (Raw o #2) "") >> single ||
-      command ||
-      (cmt || other) >> single;
+    val tokens = ignored || output || command || cmt || other;
 
 
     (* spans *)
@@ -436,7 +456,7 @@
           make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4))));
 
     val spans = segments
-      |> maps (Command_Span.content o #span)
+      |> maps segment_content
       |> drop_suffix Token.is_space
       |> Source.of_list
       |> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat))
@@ -484,13 +504,13 @@
 
 fun isabelle ctxt body =
   if Config.get ctxt Document_Antiquotation.thy_output_display
-  then Latex.environment_text "isabelle" body
-  else Latex.enclose_text "\\isa{" "}" body;
+  then Latex.environment "isabelle" body
+  else Latex.macro "isa" body;
 
 fun isabelle_typewriter ctxt body =
   if Config.get ctxt Document_Antiquotation.thy_output_display
-  then Latex.environment_text "isabellett" body
-  else Latex.enclose_text "\\isatt{" "}" body;
+  then Latex.environment "isabellett" body
+  else Latex.macro "isatt" body;
 
 fun typewriter ctxt s =
   isabelle_typewriter ctxt (Latex.string (Latex.output_ascii s));
--- a/src/Pure/Thy/export_theory.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/export_theory.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -94,15 +94,6 @@
       locale_dependencies.iterator.map(_.no_content) ++
       (for { (_, xs) <- others; x <- xs.iterator } yield x.no_content)
 
-    lazy val entity_by_range: Map[Symbol.Range, List[Entity[No_Content]]] =
-      entity_iterator.toList.groupBy(_.range)
-
-    lazy val entity_by_kind_name: Map[(String, String), Entity[No_Content]] =
-      entity_iterator.map(entity => ((entity.kind, entity.name), entity)).toMap
-
-    lazy val entity_kinds: Set[String] =
-      entity_iterator.map(_.kind).toSet
-
     def cache(cache: Term.Cache): Theory =
       Theory(cache.string(name),
         parents.map(cache.string),
--- a/src/Pure/Thy/html.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/html.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -233,7 +233,7 @@
   }
 
   def output(body: XML.Body, hidden: Boolean, structural: Boolean): String =
-    Library.make_string(output(_, body, hidden, structural))
+    Library.make_string(output(_, body, hidden, structural), capacity = XML.text_length(body) * 2)
 
   def output(tree: XML.Tree, hidden: Boolean, structural: Boolean): String =
     output(List(tree), hidden, structural)
--- a/src/Pure/Thy/latex.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/latex.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -10,33 +10,24 @@
   val text: string * Position.T -> text
   val string: string -> text
   val block: text -> XML.tree
-  val enclose_text: string -> string -> text -> text
-  val latex_text: text -> text
-  val output_text: text -> string
-  val output_name: string -> string
+  val output: text -> text
+  val macro0: string -> text
+  val macro: string -> text -> text
+  val environment: string -> text -> text
   val output_ascii: string -> string
   val output_ascii_breakable: string -> string -> string
   val output_symbols: Symbol.symbol list -> string
   val output_syms: string -> string
   val symbols: Symbol_Pos.T list -> text
   val symbols_output: Symbol_Pos.T list -> text
-  val begin_delim: string -> string
-  val end_delim: string -> string
-  val begin_tag: string -> string
-  val end_tag: string -> string
-  val environment_text: string -> text -> text
-  val environment: string -> string -> string
   val isabelle_body: string -> text -> text
   val theory_entry: string -> string
-  val index_escape: string -> string
   type index_item = {text: text, like: string}
   type index_entry = {items: index_item list, def: bool}
   val index_entry: index_entry -> text
   val index_variants: (binding -> bool option -> 'a -> 'a) -> binding -> 'a -> 'a
   val latexN: string
-  val latex_output: string -> string * int
   val latex_markup: string * Properties.T -> Markup.output
-  val latex_indent: string -> int -> string
 end;
 
 structure Latex: LATEX =
@@ -46,22 +37,20 @@
 
 type text = XML.body;
 
-fun text ("", _) = []
-  | text (s, pos) = [XML.Elem (Position.markup pos Markup.document_latex, [XML.Text s])];
+fun text (s, pos) =
+  if s = "" then []
+  else if pos = Position.none then [XML.Text s]
+  else [XML.Elem (Position.markup pos Markup.document_latex, [XML.Text s])];
 
 fun string s = text (s, Position.none);
 
 fun block body = XML.Elem (Markup.document_latex, body);
 
-fun latex_text text =
-  text |> maps
-    (fn XML.Elem ((name, _), body) =>
-          if name = Markup.document_latexN then latex_text body else []
-      | t => [t]);
+fun output body = [XML.Elem (Markup.latex_output, body)];
 
-val output_text = XML.content_of o latex_text;
-
-fun enclose_text bg en body = string bg @ body @ string en;
+fun macro0 name = [XML.Elem (Markup.latex_macro0 name, [])];
+fun macro name body = [XML.Elem (Markup.latex_macro name, body)];
+fun environment name body = [XML.Elem (Markup.latex_environment name, body)];
 
 
 (* output name for LaTeX macros *)
@@ -196,25 +185,10 @@
   text (output_symbols (map Symbol_Pos.symbol syms), #1 (Symbol_Pos.range syms));
 
 
-(* tags *)
-
-val begin_delim = enclose_name "%\n\\isadelim" "\n";
-val end_delim = enclose_name "%\n\\endisadelim" "\n";
-val begin_tag = enclose_name "%\n\\isatag" "\n";
-fun end_tag tg = enclose_name "%\n\\endisatag" "\n" tg ^ enclose "{\\isafold" "}%\n" tg;
-
-
 (* theory presentation *)
 
-fun environment_delim name =
- ("%\n\\begin{" ^ output_name name ^ "}%\n",
-  "%\n\\end{" ^ output_name name ^ "}");
-
-fun environment_text name = environment_delim name |-> enclose_text;
-fun environment name = environment_delim name |-> enclose;
-
 fun isabelle_body name =
-  enclose_text
+  XML.enclose
    ("%\n\\begin{isabellebody}%\n\\setisabellecontext{" ^ output_syms name ^ "}%\n")
    "%\n\\end{isabellebody}%\n";
 
@@ -226,24 +200,12 @@
 type index_item = {text: text, like: string};
 type index_entry = {items: index_item list, def: bool};
 
-val index_escape =
-  translate_string (fn s =>
-    if member_string "!\"@|" s then "\\char" ^ string_of_int (ord s)
-    else if member_string "\\{}#" s then "\"" ^ s else s);
-
 fun index_item (item: index_item) =
-  let
-    val like_text =
-      if #like item = "" then ""
-      else index_escape (#like item) ^ "@";
-    val item_text = index_escape (output_text (#text item));
-  in like_text ^ item_text end;
+  XML.wrap_elem ((Markup.latex_index_item, #text item), XML.string (#like item));
 
 fun index_entry (entry: index_entry) =
-  (space_implode "!" (map index_item (#items entry)) ^
-    "|" ^ index_escape (if #def entry then "isaindexdef" else "isaindexref"))
-  |> enclose "\\index{" "}"
-  |> string;
+  [XML.Elem (Markup.latex_index_entry (if #def entry then "isaindexdef" else "isaindexref"),
+    map index_item (#items entry))];
 
 fun index_binding NONE = I
   | index_binding (SOME def) = Binding.map_name (suffix (if def then "_def" else "_ref"));
@@ -256,22 +218,29 @@
 
 val latexN = "latex";
 
+local
+
 fun latex_output str =
   let val syms = Symbol.explode str
   in (output_symbols syms, length_symbols syms) end;
 
+val command_markup = YXML.output_markup (Markup.latex_macro "isacommand");
+val keyword_markup = YXML.output_markup (Markup.latex_macro "isakeyword");
+val indent_markup = YXML.output_markup (Markup.latex_macro "isaindent");
+
+in
+
 fun latex_markup (s, _: Properties.T) =
-  if s = Markup.commandN orelse s = Markup.keyword1N orelse s = Markup.keyword3N
-  then ("\\isacommand{", "}")
-  else if s = Markup.keyword2N
-  then ("\\isakeyword{", "}")
+  if member (op =) [Markup.commandN, Markup.keyword1N, Markup.keyword3N] s then command_markup
+  else if s = Markup.keyword2N then keyword_markup
   else Markup.no_output;
 
-fun latex_indent "" _ = ""
-  | latex_indent s _ = enclose "\\isaindent{" "}" s;
-
 val _ = Output.add_mode latexN latex_output (prefix Symbol.latex o cartouche);
 val _ = Markup.add_mode latexN latex_markup;
-val _ = Pretty.add_mode latexN latex_indent;
+
+val _ = Pretty.add_mode latexN
+  (fn s => fn _ => if s = "" then s else uncurry enclose indent_markup s);
 
 end;
+
+end;
--- a/src/Pure/Thy/latex.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/latex.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -11,49 +11,268 @@
 
 import scala.annotation.tailrec
 import scala.collection.mutable
+import scala.collection.immutable.TreeMap
 import scala.util.matching.Regex
 
 
 object Latex
 {
+  /* output name for LaTeX macros */
+
+  private val output_name_map: Map[Char, String] =
+    Map('_' -> "UNDERSCORE",
+      '\'' -> "PRIME",
+      '0' -> "ZERO",
+      '1' -> "ONE",
+      '2' -> "TWO",
+      '3' -> "THREE",
+      '4' -> "FOUR",
+      '5' -> "FIVE",
+      '6' -> "SIX",
+      '7' -> "SEVEN",
+      '8' -> "EIGHT",
+      '9' -> "NINE")
+
+  def output_name(name: String): String =
+    if (name.exists(output_name_map.keySet)) {
+      val res = new StringBuilder
+      for (c <- name) {
+        output_name_map.get(c) match {
+          case None => res += c
+          case Some(s) => res ++= s
+        }
+      }
+      res.toString
+    }
+    else name
+
+
+  /* index entries */
+
+  def index_escape(str: String): String =
+  {
+    val special1 = "!\"@|"
+    val special2 = "\\{}#"
+    if (str.exists(c => special1.contains(c) || special2.contains(c))) {
+      val res = new StringBuilder
+      for (c <- str) {
+        if (special1.contains(c)) {
+          res ++= "\\char"
+          res ++= Value.Int(c)
+        }
+        else {
+          if (special2.contains(c)) { res += '"'}
+          res += c
+        }
+      }
+      res.toString
+    }
+    else str
+  }
+
+  object Index_Item
+  {
+    sealed case class Value(text: Text, like: String)
+    def unapply(tree: XML.Tree): Option[Value] =
+      tree match {
+        case XML.Wrapped_Elem(Markup.Latex_Index_Item(_), text, like) =>
+          Some(Value(text, XML.content(like)))
+        case _ => None
+      }
+  }
+
+  object Index_Entry
+  {
+    sealed case class Value(items: List[Index_Item.Value], kind: String)
+    def unapply(tree: XML.Tree): Option[Value] =
+      tree match {
+        case XML.Elem(Markup.Latex_Index_Entry(kind), body) =>
+          val items = body.map(Index_Item.unapply)
+          if (items.forall(_.isDefined)) Some(Value(items.map(_.get), kind)) else None
+        case _ => None
+      }
+  }
+
+
+  /* tags */
+
+  object Tags
+  {
+    object Op extends Enumeration
+    {
+      val fold, drop, keep = Value
+    }
+
+    val standard = "document,theory,proof,ML,visible,-invisible,important,unimportant"
+
+    private def explode(spec: String): List[String] =
+      Library.space_explode(',', spec)
+
+    def apply(spec: String): Tags =
+      new Tags(spec,
+        (explode(standard) ::: explode(spec)).foldLeft(TreeMap.empty[String, Op.Value]) {
+          case (m, tag) =>
+            tag.toList match {
+              case '/' :: cs => m + (cs.mkString -> Op.fold)
+              case '-' :: cs => m + (cs.mkString -> Op.drop)
+              case '+' :: cs => m + (cs.mkString -> Op.keep)
+              case cs => m + (cs.mkString -> Op.keep)
+            }
+        })
+
+    val empty: Tags = apply("")
+  }
+
+  class Tags private(spec: String, map: TreeMap[String, Tags.Op.Value])
+  {
+    override def toString: String = spec
+
+    def get(name: String): Option[Tags.Op.Value] = map.get(name)
+
+    def sty(comment_latex: Boolean): File.Content =
+    {
+      val path = Path.explode("isabelletags.sty")
+      val comment =
+        if (comment_latex) """\usepackage{comment}"""
+        else """%plain TeX version of comment package -- much faster!
+\let\isafmtname\fmtname\def\fmtname{plain}
+\usepackage{comment}
+\let\fmtname\isafmtname"""
+      val tags =
+        (for ((name, op) <- map.iterator)
+          yield "\\isa" + op + "tag{" + name + "}").toList
+      File.Content(path, comment + """
+
+\newcommand{\isakeeptag}[1]%
+{\includecomment{isadelim#1}\includecomment{isatag#1}\csarg\def{isafold#1}{}}
+\newcommand{\isadroptag}[1]%
+{\excludecomment{isadelim#1}\excludecomment{isatag#1}\csarg\def{isafold#1}{}}
+\newcommand{\isafoldtag}[1]%
+{\includecomment{isadelim#1}\excludecomment{isatag#1}\csarg\def{isafold#1}{\isafold{#1}}}
+
+""" + Library.terminate_lines(tags))
+    }
+  }
+
+
   /* output text and positions */
 
   type Text = XML.Body
 
-  def output(latex_text: Text, file_pos: String = ""): String =
+  def position(a: String, b: String): String = "%:%" + a + "=" + b + "%:%\n"
+
+  def init_position(file_pos: String): List[String] =
+    if (file_pos.isEmpty) Nil
+    else List("\\endinput\n", position(Markup.FILE, file_pos))
+
+  class Output(options: Options)
   {
-    var line = 1
-    val result = new mutable.ListBuffer[String]
-    val positions = new mutable.ListBuffer[String]
+    def latex_output(latex_text: Text): String = apply(latex_text)
+
+    def latex_macro0(name: String, optional_argument: String = ""): Text =
+      XML.string("\\" + name + optional_argument)
+
+    def latex_macro(name: String, body: Text, optional_argument: String = ""): Text =
+      XML.enclose("\\" + name + optional_argument + "{", "}", body)
+
+    def latex_environment(name: String, body: Text, optional_argument: String = ""): Text =
+      XML.enclose(
+        "%\n\\begin{" + name + "}" + optional_argument + "%\n",
+        "%\n\\end{" + name + "}", body)
+
+    def latex_heading(kind: String, body: Text, optional_argument: String = ""): Text =
+      XML.enclose(
+        "%\n\\" + options.string("document_heading_prefix") + kind + optional_argument + "{",
+        "%\n}\n", body)
 
-    def position(a: String, b: String): String = "%:%" + a + "=" + b + "%:%\n"
+    def latex_body(kind: String, body: Text, optional_argument: String = ""): Text =
+      latex_environment("isamarkup" + kind, body, optional_argument)
 
-    if (file_pos.nonEmpty) {
-      positions += "\\endinput\n"
-      positions += position(Markup.FILE, file_pos)
+    def latex_tag(name: String, body: Text, delim: Boolean = false): Text =
+    {
+      val s = output_name(name)
+      val kind = if (delim) "delim" else "tag"
+      val end = if (delim) "" else "{\\isafold" + s + "}%\n"
+      if (options.bool("document_comment_latex")) {
+        XML.enclose(
+          "%\n\\begin{isa" + kind + s + "}\n",
+          "%\n\\end{isa" + kind + s + "}\n" + end, body)
+      }
+      else {
+        XML.enclose(
+          "%\n\\isa" + kind + s + "\n",
+          "%\n\\endisa" + kind + s + "\n" + end, body)
+      }
+    }
+
+    def index_item(item: Index_Item.Value): String =
+    {
+      val like = if (item.like.isEmpty) "" else index_escape(item.like) + "@"
+      val text = index_escape(latex_output(item.text))
+      like + text
     }
 
-    def traverse(body: XML.Body): Unit =
+    def index_entry(entry: Index_Entry.Value): Text =
+    {
+      val items = entry.items.map(index_item).mkString("!")
+      val kind = if (entry.kind.isEmpty) "" else "|" + index_escape(entry.kind)
+      latex_macro("index", XML.string(items + kind))
+    }
+
+
+    /* standard output of text with per-line positions */
+
+    def unknown_elem(elem: XML.Elem, pos: Position.T): XML.Body =
+      error("Unknown latex markup element " + quote(elem.name) + Position.here(pos) +
+        ":\n" + XML.string_of_tree(elem))
+
+    def apply(latex_text: Text, file_pos: String = ""): String =
     {
-      body.foreach {
-        case XML.Wrapped_Elem(_, _, _) =>
-        case XML.Elem(markup, body) =>
-          if (markup.name == Markup.DOCUMENT_LATEX) {
-            for { l <- Position.Line.unapply(markup.properties) if positions.nonEmpty } {
-              val s = position(Value.Int(line), Value.Int(l))
-              if (positions.last != s) positions += s
+      var line = 1
+      val result = new mutable.ListBuffer[String]
+      val positions = new mutable.ListBuffer[String] ++= init_position(file_pos)
+
+      val file_position = if (file_pos.isEmpty) Position.none else Position.File(file_pos)
+
+      def traverse(xml: XML.Body): Unit =
+      {
+        xml.foreach {
+          case XML.Text(s) =>
+            line += s.count(_ == '\n')
+            result += s
+          case elem @ XML.Elem(markup, body) =>
+            val a = Markup.Optional_Argument.get(markup.properties)
+            traverse {
+              markup match {
+                case Markup.Document_Latex(props) =>
+                  for (l <- Position.Line.unapply(props) if positions.nonEmpty) {
+                    val s = position(Value.Int(line), Value.Int(l))
+                    if (positions.last != s) positions += s
+                  }
+                  body
+                case Markup.Latex_Output(_) => XML.string(latex_output(body))
+                case Markup.Latex_Macro0(name) if body.isEmpty => latex_macro0(name, a)
+                case Markup.Latex_Macro(name) => latex_macro(name, body, a)
+                case Markup.Latex_Environment(name) => latex_environment(name, body, a)
+                case Markup.Latex_Heading(kind) => latex_heading(kind, body, a)
+                case Markup.Latex_Body(kind) => latex_body(kind, body, a)
+                case Markup.Latex_Delim(name) => latex_tag(name, body, delim = true)
+                case Markup.Latex_Tag(name) => latex_tag(name, body)
+                case Markup.Latex_Index_Entry(_) =>
+                  elem match {
+                    case Index_Entry(entry) => index_entry(entry)
+                    case _ => unknown_elem(elem, file_position)
+                  }
+                case _ => unknown_elem(elem, file_position)
+              }
             }
-            traverse(body)
-          }
-        case XML.Text(s) =>
-          line += s.count(_ == '\n')
-          result += s
+        }
       }
-    }
-    traverse(latex_text)
+      traverse(latex_text)
 
-    result ++= positions
-    result.mkString
+      result ++= positions
+      result.mkString
+    }
   }
 
 
--- a/src/Pure/Thy/presentation.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/presentation.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -22,7 +22,7 @@
 
   abstract class HTML_Context
   {
-    /* directory structure */
+    /* directory structure and resources */
 
     def root_dir: Path
     def theory_session(name: Document.Node.Name): Sessions.Info
@@ -34,30 +34,10 @@
     def files_path(name: Document.Node.Name, path: Path): Path =
       theory_path(name).dir + Path.explode("files") + path.squash.html
 
-
-    /* cached theory exports */
-
-    val cache: Term.Cache = Term.Cache.make()
-
-    private val already_presented = Synchronized(Set.empty[String])
-    def register_presented(nodes: List[Document.Node.Name]): List[Document.Node.Name] =
-      already_presented.change_result(presented =>
-        (nodes.filterNot(name => presented.contains(name.theory)),
-          presented ++ nodes.iterator.map(_.theory)))
-
-    private val theory_cache = Synchronized(Map.empty[String, Export_Theory.Theory])
-    def cache_theory(thy_name: String, make_thy: => Export_Theory.Theory): Export_Theory.Theory =
-    {
-      theory_cache.change_result(thys =>
-      {
-        thys.get(thy_name) match {
-          case Some(thy) => (thy, thys)
-          case None =>
-            val thy = make_thy
-            (thy, thys + (thy_name -> thy))
-        }
-      })
-    }
+    type Theory_Exports = Map[String, Entity_Context.Theory_Export]
+    def theory_exports: Theory_Exports = Map.empty
+    def theory_export(name: String): Entity_Context.Theory_Export =
+      theory_exports.getOrElse(name, Entity_Context.no_theory_export)
 
 
     /* HTML content */
@@ -115,6 +95,13 @@
 
   object Entity_Context
   {
+    sealed case class Theory_Export(
+      entity_by_range: Map[Symbol.Range, List[Export_Theory.Entity[Export_Theory.No_Content]]],
+      entity_by_kind_name: Map[(String, String), Export_Theory.Entity[Export_Theory.No_Content]],
+      others: List[String])
+
+    val no_theory_export: Theory_Export = Theory_Export(Map.empty, Map.empty, Nil)
+
     object Theory_Ref
     {
       def unapply(props: Properties.T): Option[Document.Node.Name] =
@@ -129,7 +116,8 @@
     {
       def unapply(props: Properties.T): Option[(Path, Option[String], String, String)] =
         (props, props, props, props) match {
-          case (Markup.Ref(_), Position.Def_File(def_file), Markup.Kind(kind), Markup.Name(name)) =>
+          case (Markup.Entity.Ref.Prop(_), Position.Def_File(def_file),
+              Markup.Kind(kind), Markup.Name(name)) =>
             val def_theory = Position.Def_Theory.unapply(props)
             Some((Path.explode(def_file), def_theory, kind, name))
           case _ => None
@@ -142,7 +130,7 @@
         session: String,
         deps: Sessions.Deps,
         node: Document.Node.Name,
-        theory_exports: Map[String, Export_Theory.Theory]): Entity_Context =
+        html_context: HTML_Context): Entity_Context =
       new Entity_Context {
         private val seen_ranges: mutable.Set[Symbol.Range] = mutable.Set.empty
 
@@ -153,7 +141,8 @@
             case _ =>
               Some {
                 val entities =
-                  theory_exports.get(node.theory).flatMap(_.entity_by_range.get(range))
+                  html_context.theory_exports.get(node.theory)
+                    .flatMap(_.entity_by_range.get(range))
                     .getOrElse(Nil)
                 val body1 =
                   if (seen_ranges.contains(range)) {
@@ -183,7 +172,7 @@
 
         private def logical_ref(thy_name: String, kind: String, name: String): Option[String] =
           for {
-            thy <- theory_exports.get(thy_name)
+            thy <- html_context.theory_exports.get(thy_name)
             entity <- thy.entity_by_kind_name.get((kind, name))
           } yield entity.kname
 
@@ -193,7 +182,7 @@
             case Theory_Ref(node_name) =>
               node_relative(deps, session, node_name).map(html_dir =>
                 HTML.link(html_dir + html_name(node_name), body))
-            case Entity_Ref(file_path, def_theory, kind, name) =>
+            case Entity_Ref(file_path, def_theory, kind, name) if file_path.get_ext == "thy" =>
               for {
                 thy_name <-
                   def_theory orElse (if (File.eq(node.path, file_path)) Some(node.theory) else None)
@@ -487,6 +476,40 @@
     else None
   }
 
+  def read_exports(
+    sessions: List[String],
+    deps: Sessions.Deps,
+    db_context: Sessions.Database_Context): Map[String, Entity_Context.Theory_Export] =
+  {
+    type Batch = (String, List[String])
+    val batches =
+      sessions.foldLeft((Set.empty[String], List.empty[Batch]))(
+        { case ((seen, batches), session) =>
+            val thys = deps(session).loaded_theories.keys.filterNot(seen)
+            (seen ++ thys, (session, thys) :: batches)
+        })._2
+    Par_List.map[Batch, List[(String, Entity_Context.Theory_Export)]](
+      { case (session, thys) =>
+          for (thy_name <- thys) yield {
+            val theory =
+              if (thy_name == Thy_Header.PURE) Export_Theory.no_theory
+              else {
+                val provider = Export.Provider.database_context(db_context, List(session), thy_name)
+                if (Export_Theory.read_theory_parents(provider, thy_name).isDefined) {
+                  Export_Theory.read_theory(provider, session, thy_name, cache = db_context.cache)
+                }
+                else Export_Theory.no_theory
+              }
+            val entity_by_range =
+              theory.entity_iterator.toList.groupBy(_.range)
+            val entity_by_kind_name =
+              theory.entity_iterator.map(entity => ((entity.kind, entity.name), entity)).toMap
+            val others = theory.others.keySet.toList
+            thy_name -> Entity_Context.Theory_Export(entity_by_range, entity_by_kind_name, others)
+          }
+      }, batches).flatten.toMap
+  }
+
   def session_html(
     session: String,
     deps: Sessions.Deps,
@@ -496,11 +519,13 @@
     html_context: HTML_Context,
     session_elements: Elements): Unit =
   {
-    val hierarchy = deps.sessions_structure.hierarchy(session)
     val info = deps.sessions_structure(session)
     val options = info.options
     val base = deps(session)
 
+    val hierarchy = deps.sessions_structure.build_hierarchy(session)
+    val hierarchy_theories = hierarchy.reverse.flatMap(a => deps(a).used_theories.map(_._1))
+
     val session_dir = Isabelle_System.make_directory(html_context.session_dir(info))
 
     Bytes.write(session_dir + session_graph_path,
@@ -538,125 +563,94 @@
           map(link => HTML.text("View ") ::: List(link))).flatten
     }
 
-    val all_used_theories = hierarchy.reverse.flatMap(a => deps(a).used_theories.map(_._1))
-    val present_theories = html_context.register_presented(all_used_theories)
+    def entity_context(name: Document.Node.Name): Entity_Context =
+      Entity_Context.make(session, deps, name, html_context)
+
+
+    sealed case class Seen_File(
+      src_path: Path, thy_name: Document.Node.Name, thy_session: String)
+    {
+      val files_path: Path = html_context.files_path(thy_name, src_path)
 
-    val theory_exports: Map[String, Export_Theory.Theory] =
-      (for (node <- all_used_theories.iterator) yield {
-        val thy_name = node.theory
-        val theory =
-          if (thy_name == Thy_Header.PURE) Export_Theory.no_theory
-          else {
-            html_context.cache_theory(thy_name,
-              {
-                val provider = Export.Provider.database_context(db_context, hierarchy, thy_name)
-                if (Export_Theory.read_theory_parents(provider, thy_name).isDefined) {
-                  Export_Theory.read_theory(
-                    provider, session, thy_name, cache = html_context.cache)
-                }
-                else Export_Theory.no_theory
-              })
-        }
-        thy_name -> theory
-      }).toMap
+      def check(src_path1: Path, thy_name1: Document.Node.Name, thy_session1: String): Boolean =
+      {
+        val files_path1 = html_context.files_path(thy_name1, src_path1)
+        (src_path == src_path1 || files_path == files_path1) && thy_session == thy_session1
+      }
+    }
+    var seen_files = List.empty[Seen_File]
 
-    def entity_context(name: Document.Node.Name): Entity_Context =
-      Entity_Context.make(session, deps, name, theory_exports)
-
-    val theories: List[XML.Body] =
+    def present_theory(name: Document.Node.Name): Option[XML.Body] =
     {
-      sealed case class Seen_File(
-        src_path: Path, thy_name: Document.Node.Name, thy_session: String)
+      progress.expose_interrupt()
+
+      Build_Job.read_theory(db_context, hierarchy, name.theory).flatMap(command =>
       {
-        val files_path: Path = html_context.files_path(thy_name, src_path)
+        if (verbose) progress.echo("Presenting theory " + name)
+        val snapshot = Document.State.init.snippet(command)
+
+        val thy_elements =
+          session_elements.copy(entity =
+            html_context.theory_export(name.theory).others
+              .foldLeft(session_elements.entity)(_ + _))
 
-        def check(src_path1: Path, thy_name1: Document.Node.Name, thy_session1: String): Boolean =
-        {
-          val files_path1 = html_context.files_path(thy_name1, src_path1)
-          (src_path == src_path1 || files_path == files_path1) && thy_session == thy_session1
-        }
-      }
-      var seen_files = List.empty[Seen_File]
+        val files_html =
+          for {
+            (src_path, xml) <- snapshot.xml_markup_blobs(elements = thy_elements.html)
+            if xml.nonEmpty
+          }
+          yield {
+            progress.expose_interrupt()
+            if (verbose) progress.echo("Presenting file " + src_path)
 
-      sealed case class Theory(
-        name: Document.Node.Name,
-        command: Command,
-        files_html: List[(Path, XML.Tree)],
-        html: XML.Tree)
+            (src_path, html_context.source(
+              make_html(Entity_Context.empty, thy_elements, xml)))
+          }
 
-      def read_theory(name: Document.Node.Name): Option[Theory] =
-      {
-        progress.expose_interrupt()
+        val thy_html =
+          html_context.source(
+            make_html(entity_context(name), thy_elements,
+              snapshot.xml_markup(elements = thy_elements.html)))
 
-        for (command <- Build_Job.read_theory(db_context, hierarchy, name.theory))
-        yield {
-          if (verbose) progress.echo("Presenting theory " + name)
-          val snapshot = Document.State.init.snippet(command)
-
-          val thy_elements =
-            session_elements.copy(entity =
-              theory_exports(name.theory).others.keySet.foldLeft(session_elements.entity)(_ + _))
+        val thy_session = html_context.theory_session(name)
+        val thy_dir = Isabelle_System.make_directory(html_context.session_dir(thy_session))
+        val files =
+          for { (src_path, file_html) <- files_html }
+            yield {
+              seen_files.find(_.check(src_path, name, thy_session.name)) match {
+                case None => seen_files ::= Seen_File(src_path, name, thy_session.name)
+                case Some(seen_file) =>
+                  error("Incoherent use of file name " + src_path + " as " + files_path(src_path) +
+                    " in theory " + seen_file.thy_name + " vs. " + name)
+              }
 
-          val files_html =
-            for {
-              (src_path, xml) <- snapshot.xml_markup_blobs(elements = thy_elements.html)
-              if xml.nonEmpty
-            }
-            yield {
-              progress.expose_interrupt()
-              if (verbose) progress.echo("Presenting file " + src_path)
+              val file_path = html_context.files_path(name, src_path)
+              val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short)
+              HTML.write_document(file_path.dir, file_path.file_name,
+                List(HTML.title(file_title)), List(html_context.head(file_title), file_html),
+                base = Some(html_context.root_dir))
 
-              (src_path, html_context.source(
-                make_html(entity_context(name), thy_elements, xml)))
+              List(HTML.link(files_path(src_path), HTML.text(file_title)))
             }
 
-          val html =
-            html_context.source(
-              make_html(entity_context(name), thy_elements,
-                snapshot.xml_markup(elements = thy_elements.html)))
-
-          Theory(name, command, files_html, html)
-        }
-      }
+        val thy_title = "Theory " + name.theory_base_name
 
-      (for (thy <- Par_List.map(read_theory, present_theories).flatten) yield {
-        val thy_session = html_context.theory_session(thy.name)
-        val thy_dir = Isabelle_System.make_directory(html_context.session_dir(thy_session))
-        val files =
-          for { (src_path, file_html) <- thy.files_html }
-          yield {
-            seen_files.find(_.check(src_path, thy.name, thy_session.name)) match {
-              case None => seen_files ::= Seen_File(src_path, thy.name, thy_session.name)
-              case Some(seen_file) =>
-                error("Incoherent use of file name " + src_path + " as " + files_path(src_path) +
-                  " in theory " + seen_file.thy_name + " vs. " + thy.name)
-            }
-
-            val file_path = html_context.files_path(thy.name, src_path)
-            val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short)
-            HTML.write_document(file_path.dir, file_path.file_name,
-              List(HTML.title(file_title)), List(html_context.head(file_title), file_html),
-              base = Some(html_context.root_dir))
-
-            List(HTML.link(files_path(src_path), HTML.text(file_title)))
-          }
-
-        val thy_title = "Theory " + thy.name.theory_base_name
-
-        HTML.write_document(thy_dir, html_name(thy.name),
-          List(HTML.title(thy_title)), List(html_context.head(thy_title), thy.html),
+        HTML.write_document(thy_dir, html_name(name),
+          List(HTML.title(thy_title)), List(html_context.head(thy_title), thy_html),
           base = Some(html_context.root_dir))
 
         if (thy_session.name == session) {
           Some(
-            List(HTML.link(html_name(thy.name),
-              HTML.text(thy.name.theory_base_name) :::
+            List(HTML.link(html_name(name),
+              HTML.text(name.theory_base_name) :::
                 (if (files.isEmpty) Nil else List(HTML.itemize(files))))))
         }
         else None
-      }).flatten
+      })
     }
 
+    val theories = base.session_theories.flatMap(present_theory)
+
     val title = "Session " + session
     HTML.write_document(session_dir, "index.html",
       List(HTML.title(title + Isabelle_System.isabelle_heading())),
--- a/src/Pure/Thy/sessions.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/sessions.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -52,7 +52,7 @@
   Scan.optional (Parse.$$$ "in" |-- Parse.!!! Parse.path_input) (Input.string ".") --
   (Parse.$$$ "=" |--
     Parse.!!! (Scan.option (Parse.session_name --| Parse.!!! (Parse.$$$ "+")) --
-      Scan.optional (Parse.$$$ "description" |-- Parse.!!! (Parse.input Parse.text)) Input.empty --
+      Scan.optional (Parse.$$$ "description" |-- Parse.!!! (Parse.input Parse.embedded)) Input.empty --
       Scan.optional (Parse.$$$ "options" |-- Parse.!!! Parse.options) [] --
       Scan.optional (Parse.$$$ "sessions" |--
         Parse.!!! (Scan.repeat1 Parse.session_name)) [] --
--- a/src/Pure/Thy/sessions.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/sessions.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -267,9 +267,9 @@
               info.document_theories.flatMap(
               {
                 case (thy, pos) =>
-                  val parent_sessions =
+                  val build_hierarchy =
                     if (sessions_structure.build_graph.defined(session_name)) {
-                      sessions_structure.build_requirements(List(session_name))
+                      sessions_structure.build_hierarchy(session_name)
                     }
                     else Nil
 
@@ -283,7 +283,7 @@
                       if (session_theories.contains(name)) {
                         err("Redundant document theory from this session:")
                       }
-                      else if (parent_sessions.contains(qualifier)) None
+                      else if (build_hierarchy.contains(qualifier)) None
                       else if (dependencies.theories.contains(name)) None
                       else err("Document theory from other session not imported properly:")
                   }
@@ -781,8 +781,7 @@
         else {
           (for {
             (name, (info, _)) <- graph.iterator
-            if info.dir_selected || select_session(name) ||
-              graph.get_node(name).groups.exists(select_group)
+            if info.dir_selected || select_session(name) || info.groups.exists(select_group)
           } yield name).toList
         }
 
@@ -837,17 +836,17 @@
       deps
     }
 
-    def hierarchy(session: String): List[String] = build_graph.all_preds(List(session))
-
     def build_selection(sel: Selection): List[String] = selected(build_graph, sel)
     def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss)
     def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss)
     def build_topological_order: List[String] = build_graph.topological_order
+    def build_hierarchy(session: String): List[String] = build_graph.all_preds(List(session))
 
     def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel)
     def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss)
     def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds_rev(ss)
     def imports_topological_order: List[String] = imports_graph.topological_order
+    def imports_hierarchy(session: String): List[String] = imports_graph.all_preds(List(session))
 
     def bibtex_entries: List[(String, List[String])] =
       build_topological_order.flatMap(name =>
--- a/src/Pure/Thy/thy_header.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Thy/thy_header.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -147,7 +147,7 @@
 
 val abbrevs =
   Parse.and_list1
-    (Scan.repeat1 Parse.text -- (Parse.$$$ "=" |-- Parse.!!! (Scan.repeat1 Parse.text))
+    (Scan.repeat1 Parse.embedded -- (Parse.$$$ "=" |-- Parse.!!! (Scan.repeat1 Parse.embedded))
       >> uncurry (map_product pair)) >> flat;
 
 val keyword_decls = Parse.and_list1 keyword_decl >> flat;
--- a/src/Pure/Tools/build.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Tools/build.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -60,7 +60,7 @@
       else
        (Output.physical_stderr ("Skipping theories " ^ commas_quote (map #1 thys) ^
           " (undefined " ^ commas conds ^ ")\n"); [])
-  in apply_hooks qualifier loaded_theories end;
+  in loaded_theories end;
 
 
 (* build session *)
@@ -81,11 +81,16 @@
 
           fun build () =
             let
-              val res1 =
+              val res =
                 theories |>
-                  (List.app (build_theories session_name)
+                  (map (build_theories session_name)
                     |> session_timing
                     |> Exn.capture);
+              val res1 =
+                (case res of
+                  Exn.Res loaded_theories =>
+                    Exn.capture (apply_hooks session_name) (flat loaded_theories)
+                | Exn.Exn exn => Exn.Exn exn);
               val res2 = Exn.capture Session.finish ();
 
               val _ = Resources.finish_session_base ();
--- a/src/Pure/Tools/build.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Tools/build.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -161,7 +161,7 @@
   {
     val props = build_log.session_timing
     val threads = Markup.Session_Timing.Threads.unapply(props) getOrElse 1
-    val timing = Markup.Timing_Properties.parse(props)
+    val timing = Markup.Timing_Properties.get(props)
     "Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")"
   }
 
@@ -191,7 +191,6 @@
       options +
         "completion_limit=0" +
         "editor_tracing_messages=0" +
-        "kodkod_scala=false" +
         ("pide_reports=" + options.bool("build_pide_reports"))
 
     val store = Sessions.store(build_options)
@@ -205,6 +204,7 @@
       Sessions.load_structure(build_options, dirs = dirs, select_dirs = select_dirs, infos = infos)
 
     val full_sessions_selection = full_sessions.imports_selection(selection)
+    val full_sessions_selected = full_sessions_selection.toSet
 
     def sources_stamp(deps: Sessions.Deps, session_name: String): String =
     {
@@ -241,6 +241,13 @@
       else deps0
     }
 
+    val presentation_sessions =
+      (for {
+        session_name <- deps.sessions_structure.build_topological_order.iterator
+        info <- deps.sessions_structure.get(session_name)
+        if full_sessions_selected(session_name) && presentation.enabled(info) }
+      yield info).toList
+
 
     /* check unknown files */
 
@@ -297,7 +304,7 @@
     val log =
       build_options.string("system_log") match {
         case "" => No_Logger
-        case "true" => Logger.make(progress)
+        case "-" => Logger.make(progress)
         case log_file => Logger.make(Some(Path.explode(log_file)))
       }
 
@@ -488,14 +495,6 @@
     /* PDF/HTML presentation */
 
     if (!no_build && !progress.stopped && results.ok) {
-      val selected = full_sessions_selection.toSet
-      val presentation_sessions =
-        (for {
-          session_name <- deps.sessions_structure.imports_topological_order.iterator
-          info <- results.get_info(session_name)
-          if selected(session_name) && presentation.enabled(info) && results(session_name).ok }
-        yield info).toList
-
       if (presentation_sessions.nonEmpty) {
         val presentation_dir = presentation.dir(store)
         progress.echo("Presentation in " + presentation_dir.absolute)
@@ -507,22 +506,28 @@
         }
 
         using(store.open_database_context())(db_context =>
-          for (session <- presentation_sessions.map(_.name)) {
+        {
+          val exports =
+            Presentation.read_exports(presentation_sessions.map(_.name), deps, db_context)
+
+          Par_List.map((session: String) =>
+          {
             progress.expose_interrupt()
             progress.echo("Presenting " + session + " ...")
 
             val html_context =
               new Presentation.HTML_Context {
-                override val cache: Term.Cache = store.cache
                 override def root_dir: Path = presentation_dir
                 override def theory_session(name: Document.Node.Name): Sessions.Info =
                   deps.sessions_structure(deps(session).theory_qualifier(name))
+                override def theory_exports: Theory_Exports = exports
               }
             Presentation.session_html(
               session, deps, db_context, progress = progress,
               verbose = verbose, html_context = html_context,
               Presentation.elements1)
-          })
+          }, presentation_sessions.map(_.name))
+        })
       }
     }
 
--- a/src/Pure/Tools/rail.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Tools/rail.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -341,50 +341,53 @@
   let
     val output_antiq =
       Antiquote.Antiq #>
-      Document_Antiquotation.evaluate Latex.symbols ctxt #>
-      Latex.output_text;
+      Document_Antiquotation.evaluate Latex.symbols ctxt;
     fun output_text b s =
-      Output.output s
-      |> b ? enclose "\\isakeyword{" "}"
-      |> enclose "\\isa{" "}";
+      Latex.string (Output.output s)
+      |> b ? Latex.macro "isakeyword"
+      |> Latex.macro "isa";
 
     fun output_cat c (Cat (_, rails)) = outputs c rails
     and outputs c [rail] = output c rail
-      | outputs _ rails = implode (map (output "") rails)
-    and output _ (Bar []) = ""
+      | outputs _ rails = maps (output "") rails
+    and output _ (Bar []) = []
       | output c (Bar [cat]) = output_cat c cat
       | output _ (Bar (cat :: cats)) =
-          "\\rail@bar\n" ^ output_cat "" cat ^
-          implode (map (fn Cat (y, rails) =>
-              "\\rail@nextbar{" ^ string_of_int y ^ "}\n" ^ outputs "" rails) cats) ^
-          "\\rail@endbar\n"
+          Latex.string ("\\rail@bar\n") @ output_cat "" cat @
+          maps (fn Cat (y, rails) =>
+            Latex.string ("\\rail@nextbar{" ^ string_of_int y ^ "}\n") @ outputs "" rails) cats @
+          Latex.string "\\rail@endbar\n"
       | output c (Plus (cat, Cat (y, rails))) =
-          "\\rail@plus\n" ^ output_cat c cat ^
-          "\\rail@nextplus{" ^ string_of_int y ^ "}\n" ^ outputs "c" rails ^
-          "\\rail@endplus\n"
-      | output _ (Newline y) = "\\rail@cr{" ^ string_of_int y ^ "}\n"
-      | output c (Nonterminal s) = "\\rail@" ^ c ^ "nont{" ^ output_text false s ^ "}[]\n"
-      | output c (Terminal (b, s)) = "\\rail@" ^ c ^ "term{" ^ output_text b s ^ "}[]\n"
+          Latex.string "\\rail@plus\n" @ output_cat c cat @
+          Latex.string ("\\rail@nextplus{" ^ string_of_int y ^ "}\n") @ outputs "c" rails @
+          Latex.string "\\rail@endplus\n"
+      | output _ (Newline y) = Latex.string ("\\rail@cr{" ^ string_of_int y ^ "}\n")
+      | output c (Nonterminal s) =
+          Latex.string ("\\rail@" ^ c ^ "nont{") @ output_text false s @ Latex.string "}[]\n"
+      | output c (Terminal (b, s)) =
+          Latex.string ("\\rail@" ^ c ^ "term{") @ output_text b s @ Latex.string "}[]\n"
       | output c (Antiquote (b, a)) =
-          "\\rail@" ^ c ^ (if b then "term{" else "nont{") ^ output_antiq a ^ "}[]\n";
+          Latex.string ("\\rail@" ^ c ^ (if b then "term{" else "nont{")) @
+          Latex.output (output_antiq a) @
+          Latex.string "}[]\n";
 
     fun output_rule (name, rail) =
       let
         val (rail', y') = vertical_range rail 0;
         val out_name =
           (case name of
-            Antiquote.Text "" => ""
+            Antiquote.Text "" => []
           | Antiquote.Text s => output_text false s
           | Antiquote.Antiq a => output_antiq a);
       in
-        "\\rail@begin{" ^ string_of_int y' ^ "}{" ^ out_name ^ "}\n" ^
-        output "" rail' ^
-        "\\rail@end\n"
+        Latex.string ("\\rail@begin{" ^ string_of_int y' ^ "}{") @ out_name @ Latex.string "}\n" @
+        output "" rail' @
+        Latex.string "\\rail@end\n"
       end;
-  in Latex.string (Latex.environment "railoutput" (implode (map output_rule rules))) end;
+  in Latex.environment "railoutput" (maps output_rule rules) end;
 
 val _ = Theory.setup
-  (Document_Output.antiquotation_raw_embedded \<^binding>\<open>rail\<close> (Scan.lift Args.text_input)
+  (Document_Output.antiquotation_raw_embedded \<^binding>\<open>rail\<close> (Scan.lift Parse.embedded_input)
     (fn ctxt => output_rules ctxt o read ctxt));
 
 end;
--- a/src/Pure/Tools/scala_project.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Tools/scala_project.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -143,7 +143,7 @@
 
   def package_name(source_file: Path): Option[String] =
   {
-    val lines = split_lines(File.read(source_file))
+    val lines = Library.trim_split_lines(File.read(source_file))
     val Package = """\s*\bpackage\b\s*(?:object\b\s*)?((?:\w|\.)+)\b.*""".r
     lines.collectFirst({ case Package(name) => name })
   }
--- a/src/Pure/Tools/update_cartouches.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Tools/update_cartouches.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -14,8 +14,6 @@
 {
   /* update cartouches */
 
-  private val Verbatim_Body = """(?s)[ \t]*(.*?)[ \t]*""".r
-
   val Text_Antiq: Regex = """(?s)@\{\s*text\b\s*(.+)\}""".r
 
   def update_text(content: String): String =
@@ -46,12 +44,6 @@
       (for (tok <- Token.explode(Keyword.Keywords.empty, text0).iterator)
         yield {
           if (tok.kind == Token.Kind.ALT_STRING) Symbol.cartouche(tok.content)
-          else if (tok.kind == Token.Kind.VERBATIM) {
-            tok.content match {
-              case Verbatim_Body(s) => Symbol.cartouche(s)
-              case s => tok.source
-            }
-          }
           else tok.source
         }
       ).mkString
@@ -96,7 +88,7 @@
     -t           replace @{text} antiquotations within text tokens
 
   Recursively find .thy or ROOT files and update theory syntax to use
-  cartouches instead of old-style {* verbatim *} or `alt_string` tokens.
+  cartouches instead of `alt_string` tokens.
 
   Old versions of files are preserved by appending "~~".
 """,
--- a/src/Pure/Tools/update_comments.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/Tools/update_comments.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -23,7 +23,7 @@
         case tok :: rest
         if tok.source == "--" || tok.source == Symbol.comment =>
           rest.dropWhile(_.is_space) match {
-            case tok1 :: rest1 if tok1.is_text =>
+            case tok1 :: rest1 if tok1.is_embedded =>
               update(rest1, make_comment(tok1) :: result)
             case _ => update(rest, tok.source :: result)
           }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/ex/Alternative_Headings.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -0,0 +1,34 @@
+(*  Title:      Pure/ex/Alternative_Headings.thy
+    Author:     Makarius
+*)
+
+chapter \<open>Alternative document headings\<close>
+
+theory Alternative_Headings
+  imports Pure
+  keywords
+    "chapter*" "section*" "subsection*" "subsubsection*" :: document_heading
+begin
+
+ML \<open>
+local
+
+fun alternative_heading name body =
+  [XML.Elem (Markup.latex_heading (unsuffix "*" name) |> Markup.optional_argument "*", body)];
+
+fun document_heading (name, pos) =
+  Outer_Syntax.command (name, pos) (name ^ " heading")
+    (Parse.opt_target -- Parse.document_source --| Scan.option (Parse.$$$ ";") >>
+      Document_Output.document_output
+        {markdown = false, markup = alternative_heading name});
+
+val _ =
+  List.app document_heading
+   [\<^command_keyword>\<open>chapter*\<close>,
+    \<^command_keyword>\<open>section*\<close>,
+    \<^command_keyword>\<open>subsection*\<close>,
+    \<^command_keyword>\<open>subsubsection*\<close>];
+
+in end\<close>
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/ex/Alternative_Headings_Examples.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -0,0 +1,23 @@
+(*  Title:      Pure/ex/Alternative_Headings_Examples.thy
+    Author:     Makarius
+*)
+
+chapter \<open>Some examples of alternative document headings\<close>
+
+theory Alternative_Headings_Examples
+  imports Alternative_Headings
+begin
+
+section \<open>Regular section\<close>
+
+subsection \<open>Regular subsection\<close>
+
+subsubsection \<open>Regular subsubsection\<close>
+
+subsubsection* \<open>Alternative subsubsection\<close>
+
+subsection* \<open>Alternative subsection\<close>
+
+section* \<open>Alternative section\<close>
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/ex/document/root.tex	Fri Dec 10 08:58:09 2021 +0100
@@ -0,0 +1,20 @@
+\documentclass[10pt,a4paper]{report}
+\usepackage[T1]{fontenc}
+\usepackage{ifthen,proof,amssymb,isabelle,isabellesym}
+
+\isabellestyle{sltt}
+\usepackage{pdfsetup}\urlstyle{rm}
+
+
+\hyphenation{Isabelle}
+
+\begin{document}
+
+\title{Miscellaneous examples and experiments for Isabelle/Pure}
+\maketitle
+
+\parindent 0pt \parskip 0.5ex
+
+\input{session}
+
+\end{document}
--- a/src/Pure/library.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/library.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -129,9 +129,9 @@
 
   /* strings */
 
-  def make_string(f: StringBuilder => Unit): String =
+  def make_string(f: StringBuilder => Unit, capacity: Int = 16): String =
   {
-    val s = new StringBuilder
+    val s = new StringBuilder(capacity)
     f(s)
     s.toString
   }
--- a/src/Pure/pure_syn.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Pure/pure_syn.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -7,66 +7,43 @@
 
 signature PURE_SYN =
 sig
-  val document_command: {markdown: bool} -> (xstring * Position.T) option * Input.source ->
-    Toplevel.transition -> Toplevel.transition
   val bootstrap_thy: theory
 end;
 
 structure Pure_Syn: PURE_SYN =
 struct
 
-val semi = Scan.option (Parse.$$$ ";");
-
-fun output_document state markdown txt =
-  let
-    val ctxt = Toplevel.presentation_context state;
-    val _ = Context_Position.reports ctxt (Document_Output.document_reports txt);
-  in Document_Output.output_document ctxt markdown txt end;
+fun document_heading (name, pos) =
+  Outer_Syntax.command (name, pos) (name ^ " heading")
+    (Parse.opt_target -- Parse.document_source --| Scan.option (Parse.$$$ ";") >>
+      Document_Output.document_output
+        {markdown = false, markup = fn body => [XML.Elem (Markup.latex_heading name, body)]});
 
-fun document_command markdown (loc, txt) =
-  Toplevel.keep (fn state =>
-    (case loc of
-      NONE => ignore (output_document state markdown txt)
-    | SOME (_, pos) =>
-        error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
-  Toplevel.present_local_theory loc (fn state =>
-    ignore (output_document state markdown txt));
-
-val _ =
-  Outer_Syntax.command ("chapter", \<^here>) "chapter heading"
-    (Parse.opt_target -- Parse.document_source --| semi >> document_command {markdown = false});
+fun document_body ((name, pos), description) =
+  Outer_Syntax.command (name, pos) description
+    (Parse.opt_target -- Parse.document_source >>
+      Document_Output.document_output
+        {markdown = true, markup = fn body => [XML.Elem (Markup.latex_body name, body)]});
 
 val _ =
-  Outer_Syntax.command ("section", \<^here>) "section heading"
-    (Parse.opt_target -- Parse.document_source --| semi >> document_command {markdown = false});
-
-val _ =
-  Outer_Syntax.command ("subsection", \<^here>) "subsection heading"
-    (Parse.opt_target -- Parse.document_source --| semi >> document_command {markdown = false});
-
-val _ =
-  Outer_Syntax.command ("subsubsection", \<^here>) "subsubsection heading"
-    (Parse.opt_target -- Parse.document_source --| semi >> document_command {markdown = false});
+  List.app document_heading
+   [("chapter", \<^here>),
+    ("section", \<^here>),
+    ("subsection", \<^here>),
+    ("subsubsection", \<^here>),
+    ("paragraph", \<^here>),
+    ("subparagraph", \<^here>)];
 
 val _ =
-  Outer_Syntax.command ("paragraph", \<^here>) "paragraph heading"
-    (Parse.opt_target -- Parse.document_source --| semi >> document_command {markdown = false});
-
-val _ =
-  Outer_Syntax.command ("subparagraph", \<^here>) "subparagraph heading"
-    (Parse.opt_target -- Parse.document_source --| semi >> document_command {markdown = false});
-
-val _ =
-  Outer_Syntax.command ("text", \<^here>) "formal comment (primary style)"
-    (Parse.opt_target -- Parse.document_source >> document_command {markdown = true});
-
-val _ =
-  Outer_Syntax.command ("txt", \<^here>) "formal comment (secondary style)"
-    (Parse.opt_target -- Parse.document_source >> document_command {markdown = true});
+  List.app document_body
+   [(("text", \<^here>), "formal comment (primary style)"),
+    (("txt", \<^here>), "formal comment (secondary style)")];
 
 val _ =
   Outer_Syntax.command ("text_raw", \<^here>) "LaTeX text (without surrounding environment)"
-    (Parse.opt_target -- Parse.document_source >> document_command {markdown = true});
+    (Parse.opt_target -- Parse.document_source >>
+      Document_Output.document_output
+        {markdown = true, markup = XML.enclose "%\n" "\n"});
 
 val _ =
   Outer_Syntax.command ("theory", \<^here>) "begin theory"
--- a/src/Tools/Code/code_target.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Tools/Code/code_target.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -746,7 +746,7 @@
   Outer_Syntax.command \<^command_keyword>\<open>code_printing\<close> "declare dedicated printing for code symbols"
     (parse_symbol_pragmas (Code_Printer.parse_const_syntax) (Code_Printer.parse_tyco_syntax)
       Parse.string (Parse.minus >> K ()) (Parse.minus >> K ())
-      (Parse.text -- Scan.optional (\<^keyword>\<open>for\<close> |-- parse_simple_symbols) [])
+      (Parse.embedded -- Scan.optional (\<^keyword>\<open>for\<close> |-- parse_simple_symbols) [])
       >> (Toplevel.theory o fold set_printings_cmd));
 
 val _ =
--- a/src/Tools/Haskell/Haskell.thy	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Tools/Haskell/Haskell.thy	Fri Dec 10 08:58:09 2021 +0100
@@ -322,7 +322,7 @@
         Just y -> Just (i, y)
 
 separate :: a -> [a] -> [a]
-separate s (x : (xs @ (_ : _))) = x : s : separate s xs
+separate s (x : xs@(_ : _)) = x : s : separate s xs
 separate _ xs = xs;
 
 
@@ -342,7 +342,7 @@
   space_explode :: Char -> String -> [String]
   space_explode c = Split.split (Split.dropDelims (Split.whenElt (== c)))
   trim_line :: String -> String
-  trim_line s = gen_trim_line (length s) ((!!) s) take s
+  trim_line s = gen_trim_line (length s) (s !!) take s
 
 instance StringLike Text where
   space_explode :: Char -> Text -> [Text]
@@ -699,10 +699,7 @@
 get props name = List.lookup name props
 
 get_value :: (Bytes -> Maybe a) -> T -> Bytes -> Maybe a
-get_value parse props name =
-  case get props name of
-    Nothing -> Nothing
-    Just s -> parse s
+get_value parse props name = maybe Nothing parse (get props name)
 
 put :: Entry -> T -> T
 put entry props = entry : remove (fst entry) props
@@ -2746,7 +2743,6 @@
 Type classes for XML data representation.
 -}
 
-{-# LANGUAGE TypeSynonymInstances #-}
 {-# LANGUAGE FlexibleInstances #-}
 
 module Isabelle.XML.Classes
--- a/src/Tools/jEdit/src/jedit_rendering.scala	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Tools/jEdit/src/jedit_rendering.scala	Fri Dec 10 08:58:09 2021 +0100
@@ -70,7 +70,6 @@
       Token.Kind.SPACE -> NULL,
       Token.Kind.STRING -> LITERAL1,
       Token.Kind.ALT_STRING -> LITERAL2,
-      Token.Kind.VERBATIM -> COMMENT3,
       Token.Kind.CARTOUCHE -> COMMENT3,
       Token.Kind.CONTROL -> COMMENT3,
       Token.Kind.INFORMAL_COMMENT -> COMMENT1,
@@ -228,8 +227,8 @@
   def entity_ref(range: Text.Range, focus: Rendering.Focus): List[Text.Info[Color]] =
     snapshot.select(range, Rendering.entity_elements, _ =>
       {
-        case Text.Info(_, XML.Elem(Markup(Markup.ENTITY, Markup.Ref(i)), _))
-        if focus(i) => Some(entity_ref_color)
+        case Text.Info(_, XML.Elem(Markup.Entity.Ref(i), _)) if focus(i) =>
+          Some(entity_ref_color)
         case _ => None
       })
 
--- a/src/Tools/quickcheck.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Tools/quickcheck.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -281,7 +281,7 @@
 
 fun limit timeout (limit_time, is_interactive) f exc () =
   if limit_time then
-    Timeout.apply timeout f ()
+    Timeout.apply_physical timeout f ()
       handle timeout_exn as Timeout.TIMEOUT _ =>
         if is_interactive then exc () else Exn.reraise timeout_exn
   else f ();
--- a/src/Tools/try.ML	Fri Dec 10 08:39:34 2021 +0100
+++ b/src/Tools/try.ML	Fri Dec 10 08:58:09 2021 +0100
@@ -89,7 +89,7 @@
               val auto_time_limit = Options.default_real \<^system_option>\<open>auto_time_limit\<close>
             in
               if auto_time_limit > 0.0 then
-                (case Timeout.apply (seconds auto_time_limit) (fn () => body true state) () of
+                (case Timeout.apply_physical (seconds auto_time_limit) (fn () => body true state) () of
                   (true, (_, outcome)) => List.app Output.information outcome
                 | _ => ())
               else ()