merged;
authorwenzelm
Sat, 20 Aug 2022 21:34:55 +0200
changeset 75935 06eb4d0031e3
parent 75883 d7e0b6620c07 (current diff)
parent 75934 4586e90573ac (diff)
child 75936 d2e6a1342c90
merged;
NEWS
--- a/NEWS	Fri Aug 19 05:49:17 2022 +0000
+++ b/NEWS	Sat Aug 20 21:34:55 2022 +0200
@@ -202,6 +202,10 @@
 
 *** System ***
 
+* HTML presentation no longer supports README.html, which was meant as
+add-on to the index.html of a session. Rare INCOMPATIBILITY, consider
+using a separate theory "README" with Isabelle document markup/markdown.
+
 * Isabelle/Scala is now based on Scala 3. This is a completely different
 compiler ("dotty") and a quite different source language (we are using
 the classic Java-style syntax, not the new Python-style syntax).
--- a/etc/build.props	Fri Aug 19 05:49:17 2022 +0000
+++ b/etc/build.props	Sat Aug 20 21:34:55 2022 +0200
@@ -116,6 +116,7 @@
   src/Pure/PIDE/command_span.scala \
   src/Pure/PIDE/document.scala \
   src/Pure/PIDE/document_id.scala \
+  src/Pure/PIDE/document_info.scala \
   src/Pure/PIDE/document_status.scala \
   src/Pure/PIDE/editor.scala \
   src/Pure/PIDE/headless.scala \
--- a/src/HOL/Algebra/README.html	Fri Aug 19 05:49:17 2022 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,85 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-
-<HTML>
-
-<HEAD>
-  <meta http-equiv="content-type" content="text/html;charset=iso-8859-1">
-  <TITLE>HOL/Algebra/README.html</TITLE>
-</HEAD>
-
-<BODY>
-
-<H1>Algebra --- Classical Algebra, using Explicit Structures and Locales</H1>
-
-This directory contains proofs in classical algebra.  It is intended
-as a base for any algebraic development in Isabelle.  Emphasis is on
-reusability.  This is achieved by modelling algebraic structures
-as first-class citizens of the logic (not axiomatic type classes, say).
-The library is expected to grow in future releases of Isabelle.
-Contributions are welcome.
-
-<H2>GroupTheory, including Sylow's Theorem</H2>
-
-<P>These proofs are mainly by Florian Kamm&uuml;ller.  (Later, Larry
-Paulson simplified some of the proofs.)  These theories were indeed
-the original motivation for locales.
-
-Here is an outline of the directory's contents: <UL> <LI>Theory <A
-HREF="Group.html"><CODE>Group</CODE></A> defines semigroups, monoids,
-groups, commutative monoids, commutative groups, homomorphisms and the
-subgroup relation.  It also defines the product of two groups
-(This theory was reimplemented by Clemens Ballarin).
-
-<LI>Theory <A HREF="FiniteProduct.html"><CODE>FiniteProduct</CODE></A> extends
-commutative groups by a product operator for finite sets (provided by
-Clemens Ballarin).
-
-<LI>Theory <A HREF="Coset.html"><CODE>Coset</CODE></A> defines
-the factorization of a group and shows that the factorization a normal
-subgroup is a group.
-
-<LI>Theory <A HREF="Bij.html"><CODE>Bij</CODE></A>
-defines bijections over sets and operations on them and shows that they
-are a group.  It shows that automorphisms form a group.
-
-<LI>Theory <A HREF="Exponent.html"><CODE>Exponent</CODE></A> the
-	    combinatorial argument underlying Sylow's first theorem.
-
-<LI>Theory <A HREF="Sylow.html"><CODE>Sylow</CODE></A>
-contains a proof of the first Sylow theorem.
-</UL>
-
-<H2>Rings and Polynomials</H2>
-
-<UL><LI>Theory <A HREF="Ring.html"><CODE>CRing</CODE></A>
-defines Abelian monoids and groups.  The difference to commutative
-      structures is merely notational:  the binary operation is
-      addition rather than multiplication.  Commutative rings are
-      obtained by inheriting properties from Abelian groups and
-      commutative monoids.  Further structures in the algebraic
-      hierarchy of rings: integral domain.
-
-<LI>Theory <A HREF="Module.html"><CODE>Module</CODE></A>
-introduces the notion of a R-left-module over an Abelian group, where
-	R is a ring.
-
-<LI>Theory <A HREF="UnivPoly.html"><CODE>UnivPoly</CODE></A>
-constructs univariate polynomials over rings and integral domains.
-	  Degree function.  Universal Property.
-</UL>
-
-<H2>Development of Polynomials using Type Classes</H2>
-
-<P>A development of univariate polynomials for HOL's ring classes
-is available at <CODE>HOL/Library/Polynomial</CODE>.
-
-<P>[Jacobson1985] Nathan Jacobson, Basic Algebra I, Freeman, 1985.
-
-<P>[Ballarin1999] Clemens Ballarin, Computer Algebra and Theorem Proving,
-  Author's PhD thesis, 1999.  Also University of Cambridge, Computer Laboratory Technical Report number 473.
-
-<ADDRESS>
-<P><A HREF="http://www21.in.tum.de/~ballarin">Clemens Ballarin</A>.
-</ADDRESS>
-</BODY>
-</HTML>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Algebra/README.thy	Sat Aug 20 21:34:55 2022 +0200
@@ -0,0 +1,74 @@
+theory README imports Main
+begin
+
+section \<open>Algebra --- Classical Algebra, using Explicit Structures and Locales\<close>
+
+text \<open>
+  This directory contains proofs in classical algebra. It is intended as a
+  base for any algebraic development in Isabelle. Emphasis is on reusability.
+  This is achieved by modelling algebraic structures as first-class citizens
+  of the logic (not axiomatic type classes, say). The library is expected to
+  grow in future releases of Isabelle. Contributions are welcome.
+\<close>
+
+subsection \<open>GroupTheory, including Sylow's Theorem\<close>
+
+text \<open>
+  These proofs are mainly by Florian Kammüller. (Later, Larry Paulson
+  simplified some of the proofs.) These theories were indeed the original
+  motivation for locales.
+
+  Here is an outline of the directory's contents:
+
+  \<^item> Theory \<^file>\<open>Group.thy\<close> defines semigroups, monoids, groups, commutative
+    monoids, commutative groups, homomorphisms and the subgroup relation. It
+    also defines the product of two groups (This theory was reimplemented by
+    Clemens Ballarin).
+
+  \<^item> Theory \<^file>\<open>FiniteProduct.thy\<close> extends commutative groups by a product
+    operator for finite sets (provided by Clemens Ballarin).
+
+  \<^item> Theory \<^file>\<open>Coset.thy\<close> defines the factorization of a group and shows that
+    the factorization a normal subgroup is a group.
+
+  \<^item> Theory \<^file>\<open>Bij.thy\<close> defines bijections over sets and operations on them and
+    shows that they are a group. It shows that automorphisms form a group.
+
+  \<^item> Theory \<^file>\<open>Exponent.thy\<close> the combinatorial argument underlying Sylow's
+    first theorem.
+
+  \<^item> Theory \<^file>\<open>Sylow.thy\<close> contains a proof of the first Sylow theorem.
+\<close>
+
+
+subsection \<open>Rings and Polynomials\<close>
+
+text \<open>
+  \<^item> Theory \<^file>\<open>Ring.thy\<close> defines Abelian monoids and groups. The difference to
+    commutative structures is merely notational: the binary operation is
+    addition rather than multiplication. Commutative rings are obtained by
+    inheriting properties from Abelian groups and commutative monoids. Further
+    structures in the algebraic hierarchy of rings: integral domain.
+
+  \<^item> Theory \<^file>\<open>Module.thy\<close> introduces the notion of a R-left-module over an
+    Abelian group, where R is a ring.
+
+  \<^item> Theory \<^file>\<open>UnivPoly.thy\<close> constructs univariate polynomials over rings and
+    integral domains. Degree function. Universal Property.
+\<close>
+
+
+subsection \<open>Development of Polynomials using Type Classes\<close>
+
+text \<open>
+  A development of univariate polynomials for HOL's ring classes is available
+  at \<^file>\<open>~~/src/HOL/Computational_Algebra/Polynomial.thy\<close>.
+
+  [Jacobson1985] Nathan Jacobson, Basic Algebra I, Freeman, 1985.
+
+  [Ballarin1999] Clemens Ballarin, Computer Algebra and Theorem Proving,
+  Author's PhD thesis, 1999. Also University of Cambridge, Computer Laboratory
+  Technical Report number 473.
+\<close>
+
+end
--- a/src/HOL/Auth/Guard/README.html	Fri Aug 19 05:49:17 2022 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,68 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-
-<HTML>
-
-<HEAD>
-  <meta http-equiv="content-type" content="text/html;charset=iso-8859-1">
-  <TITLE>HOL/Auth/Guard/README.html</TITLE>
-</HEAD>
-
-<BODY>
-
-<H1>Protocol-Independent Secrecy Results</H1>
-
-date: april 2002
-author: Frederic Blanqui
-email: blanqui@lri.fr
-webpage: 
-
-<P>The current development is built above the HOL (Higher-Order Logic)
-Isabelle theory and the formalization of protocols introduced by <A
-HREF="http://www.cl.cam.ac.uk/users/lcp/">Larry Paulson</A>.  More details are
-in his paper <A
-HREF="http://www.cl.cam.ac.uk/users/lcp/papers/Auth/jcs.pdf">
-The Inductive approach
-to verifying cryptographic protocols</A> (J. Computer Security 6, pages
-85-128, 1998).
-
-<P>
-This directory contains a number of files:
-
-<UL>
-<LI>Extensions.thy contains extensions of Larry Paulson's files with many useful
-lemmas.
-
-<LI>Analz contains an important theorem about the decomposition of analz
-between pparts (pairs) and kparts (messages that are not pairs).
-
-<LI>Guard contains the protocol-independent secrecy theorem for nonces.
-<LI>GuardK is the same for keys.
-<LI>Guard_Public extends Guard and GuardK for public-key protocols.
-<LI>Guard_Shared extends Guard and GuardK for symmetric-key protocols.
-
-<LI>List_Msg contains definitions on lists (inside messages).
-
-<LI>P1 contains the definition of the protocol P1 and the proof of its
-properties (strong forward integrity, insertion resilience, truncation
-resilience, data confidentiality and non-repudiability)
-
-<LI>P2 is the same for the protocol P2
-
-<LI>NS_Public is for Needham-Schroeder-Lowe
-<LI>OtwayRees is for Otway-Rees
-<LI>Yahalom is for Yahalom
-
-<LI>Proto contains a more precise formalization of protocols with rules
-and a protocol-independent theorem for proving guardness from a preservation
-property. It also contains the proofs for Needham-Schroeder as an example.
-</UL>
-
-<HR>
-<P>Last modified 20 August 2002
-
-<ADDRESS>
-<A HREF="http://www.lri.fr/~blanqui/">Frederic Blanqui</A>,
-<A HREF="mailto:blanqui@lri.fr">blanqui@lri.fr</A>
-</ADDRESS>
-</BODY>
-</HTML>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Auth/Guard/README_Guard.thy	Sat Aug 20 21:34:55 2022 +0200
@@ -0,0 +1,57 @@
+theory README_Guard imports Main
+begin
+
+section \<open>Protocol-Independent Secrecy Results\<close>
+
+text \<open>
+  \<^item> date: April 2002
+  \<^item> author: Frederic Blanqui
+  \<^item> email: blanqui@lri.fr
+
+  The current development is built above the HOL (Higher-Order Logic) Isabelle
+  theory and the formalization of protocols introduced by Larry Paulson. More
+  details are in his paper
+  \<^url>\<open>https://www.cl.cam.ac.uk/users/lcp/papers/Auth/jcs.pdf\<close>: \<^emph>\<open>The Inductive
+  approach to verifying cryptographic protocols\<close> (J. Computer Security 6,
+  pages 85-128, 1998).
+
+  This directory contains a number of files:
+
+    \<^item> \<^file>\<open>Extensions.thy\<close> contains extensions of Larry Paulson's files with
+      many useful lemmas.
+
+    \<^item> \<^file>\<open>Analz.thy\<close> contains an important theorem about the decomposition of
+    analz between pparts (pairs) and kparts (messages that are not pairs).
+
+    \<^item> \<^file>\<open>Guard.thy\<close> contains the protocol-independent secrecy theorem for
+      nonces.
+
+    \<^item> \<^file>\<open>GuardK.thy\<close> is the same for keys.
+
+    \<^item> \<^file>\<open>Guard_Public.thy\<close> extends \<^file>\<open>Guard.thy\<close> and \<^file>\<open>GuardK.thy\<close> for
+    public-key protocols.
+
+    \<^item> \<^file>\<open>Guard_Shared.thy\<close> extends \<^file>\<open>Guard.thy\<close> and \<^file>\<open>GuardK.thy\<close> for
+    symmetric-key protocols.
+
+    \<^item> \<^file>\<open>List_Msg.thy\<close> contains definitions on lists (inside messages).
+
+    \<^item> \<^file>\<open>P1.thy\<close> contains the definition of the protocol P1 and the proof of
+      its properties (strong forward integrity, insertion resilience,
+      truncation resilience, data confidentiality and non-repudiability).
+
+    \<^item> \<^file>\<open>P2.thy\<close> is the same for the protocol P2
+
+    \<^item> \<^file>\<open>Guard_NS_Public.thy\<close> is for Needham-Schroeder-Lowe
+
+    \<^item> \<^file>\<open>Guard_OtwayRees.thy\<close> is for Otway-Rees
+
+    \<^item> \<^file>\<open>Guard_Yahalom.thy\<close> is for Yahalom
+
+    \<^item> \<^file>\<open>Proto.thy\<close> contains a more precise formalization of protocols with
+      rules and a protocol-independent theorem for proving guardness from a
+      preservation property. It also contains the proofs for Needham-Schroeder
+      as an example.
+\<close>
+
+end
--- a/src/HOL/Auth/README.html	Fri Aug 19 05:49:17 2022 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,53 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-
-<HTML>
-
-<HEAD>
-  <meta http-equiv="content-type" content="text/html;charset=iso-8859-1">
-  <TITLE>HOL/Auth/README</TITLE>
-</HEAD>
-
-<BODY>
-
-<H1>Auth--The Inductive Approach to Verifying Security Protocols</H1>
-
-<P>Cryptographic protocols are of major importance, especially with the
-growing use of the Internet.  This directory demonstrates the ``inductive
-method'' of protocol verification, which is described in <A
-HREF="http://www.cl.cam.ac.uk/users/lcp/papers/protocols.html">various
-papers</A>.  The operational semantics of protocol participants is defined
-inductively.
-
-<P>This directory contains proofs concerning
-
-<UL>
-<LI>three versions of the Otway-Rees protocol
-
-<LI>the Needham-Schroeder shared-key protocol
-
-<LI>the Needham-Schroeder public-key protocol (original and with Lowe's
-modification)
-
-<LI>two versions of Kerberos: the simplified form published in the BAN paper
-	and also the full protocol (Kerberos IV)
-
-<LI>three versions of the Yahalom protocol, including a bad one that 
-	illustrates the purpose of the Oops rule
-
-<LI>a novel recursive authentication protocol 
-
-<LI>the Internet protocol TLS
-
-<LI>The certified e-mail protocol of Abadi et al.
-</UL>
-
-<P>Frederic Blanqui has contributed a theory of guardedness, which is
-demonstrated by proofs of some roving agent protocols.
-
-<ADDRESS>
-<A
-HREF="http://www.cl.cam.ac.uk/users/lcp/">Larry Paulson</A>,
-<A HREF="mailto:lcp@cl.cam.ac.uk">lcp@cl.cam.ac.uk</A>
-</ADDRESS>
-</BODY>
-</HTML>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Auth/README.thy	Sat Aug 20 21:34:55 2022 +0200
@@ -0,0 +1,38 @@
+theory README imports Main
+begin
+
+section \<open>Auth--The Inductive Approach to Verifying Security Protocols\<close>
+
+text \<open>
+  Cryptographic protocols are of major importance, especially with the growing
+  use of the Internet. This directory demonstrates the ``inductive method'' of
+  protocol verification, which is described in papers:
+  \<^url>\<open>http://www.cl.cam.ac.uk/users/lcp/papers/protocols.html\<close>. The operational
+  semantics of protocol participants is defined inductively.
+
+  This directory contains proofs concerning:
+
+    \<^item> three versions of the Otway-Rees protocol
+
+    \<^item> the Needham-Schroeder shared-key protocol
+
+    \<^item> the Needham-Schroeder public-key protocol (original and with Lowe's
+      modification)
+
+    \<^item> two versions of Kerberos: the simplified form published in the BAN paper
+    and also the full protocol (Kerberos IV)
+
+    \<^item> three versions of the Yahalom protocol, including a bad one that
+      illustrates the purpose of the Oops rule
+
+    \<^item> a novel recursive authentication protocol
+
+    \<^item> the Internet protocol TLS
+
+    \<^item> The certified e-mail protocol of Abadi et al.
+
+  Frederic Blanqui has contributed a theory of guardedness, which is
+  demonstrated by proofs of some roving agent protocols.
+\<close>
+
+end
--- a/src/HOL/HOLCF/README.html	Fri Aug 19 05:49:17 2022 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,54 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-
-<html>
-
-<head>
-  <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
-  <title>HOLCF/README</title>
-</head>
-
-<body>
-
-<h3>HOLCF: A higher-order version of LCF based on Isabelle/HOL</h3>
-
-HOLCF is the definitional extension of Church's Higher-Order Logic with
-Scott's Logic for Computable Functions that has been implemented in the
-theorem prover Isabelle.  This results in a flexible setup for reasoning
-about functional programs. HOLCF supports standard domain theory (in particular
-fixpoint reasoning and recursive domain equations) but also coinductive
-arguments about lazy datatypes.
-
-<p>
-
-The most recent description of HOLCF is found here:
-
-<ul>
-  <li><a href="http://web.cecs.pdx.edu/~brianh/phdthesis.html">HOLCF '11: A Definitional Domain Theory for Verifying Functional Programs</a>, <br>
-  Brian Huffman.<br>
-  Ph.D. thesis, Portland State University.<br>
-  Year: 2012.
-</ul>
-
-Descriptions of earlier versions can also be found online:
-
-<ul>
-  <li><a href="/~nipkow/pubs/jfp99.html">HOLCF = HOL+LCF</a>
-</ul>
-
-A detailed description (in German) of the entire development can be found in:
-
-<ul>
-  <li><a href="http://www4.informatik.tu-muenchen.de/publ/papers/Diss_Regensbu.pdf">HOLCF: eine konservative Erweiterung von HOL um LCF</a>, <br>
-      Franz Regensburger.<br>
-      Dissertation Technische Universit&auml;t M&uuml;nchen.<br>
-      Year: 1994.
-</ul>
-
-A short survey is available in:
-<ul>
-  <li><a href="http://www4.informatik.tu-muenchen.de/publ/papers/Regensburger_HOLT1995.pdf">HOLCF: Higher Order Logic of Computable Functions</a><br>
-</ul>
-
-</body>
-
-</html>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/HOLCF/README.thy	Sat Aug 20 21:34:55 2022 +0200
@@ -0,0 +1,37 @@
+theory README imports Main
+begin
+
+section \<open>HOLCF: A higher-order version of LCF based on Isabelle/HOL\<close>
+
+text \<open>
+  HOLCF is the definitional extension of Church's Higher-Order Logic with
+  Scott's Logic for Computable Functions that has been implemented in the
+  theorem prover Isabelle. This results in a flexible setup for reasoning
+  about functional programs. HOLCF supports standard domain theory (in
+  particular fixpoint reasoning and recursive domain equations) but also
+  coinductive arguments about lazy datatypes.
+
+  The most recent description of HOLCF is found here:
+
+    \<^item> \<^emph>\<open>HOLCF '11: A Definitional Domain Theory for Verifying Functional
+    Programs\<close> \<^url>\<open>http://web.cecs.pdx.edu/~brianh/phdthesis.html\<close>, Brian
+    Huffman. Ph.D. thesis, Portland State University. 2012.
+
+  Descriptions of earlier versions can also be found online:
+
+    \<^item> \<^emph>\<open>HOLCF = HOL+LCF\<close> \<^url>\<open>https://www21.in.tum.de/~nipkow/pubs/jfp99.html\<close>
+
+  A detailed description (in German) of the entire development can be found
+  in:
+
+    \<^item> \<^emph>\<open>HOLCF: eine konservative Erweiterung von HOL um LCF\<close>
+    \<^url>\<open>http://www4.informatik.tu-muenchen.de/publ/papers/Diss_Regensbu.pdf\<close>,
+    Franz Regensburger. Dissertation Technische Universität München. 1994.
+
+  A short survey is available in:
+
+    \<^item> \<^emph>\<open>HOLCF: Higher Order Logic of Computable Functions\<close>
+    \<^url>\<open>http://www4.informatik.tu-muenchen.de/publ/papers/Regensburger_HOLT1995.pdf\<close>
+\<close>
+
+end
--- a/src/HOL/Hoare/README.html	Fri Aug 19 05:49:17 2022 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,119 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-
-<HTML>
-
-<HEAD>
-  <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
-  <TITLE>HOL/Hoare/ReadMe</TITLE>
-</HEAD>
-
-<BODY>
-
-<H2>Hoare Logic for a Simple WHILE Language</H2>
-
-<H3>Language and logic</H3>
-
-This directory contains an implementation of Hoare logic for a simple WHILE
-language. The constructs are
-<UL>
-<LI> <kbd>SKIP</kbd>
-<LI> <kbd>_ := _</kbd>
-<LI> <kbd>_ ; _</kbd>
-<LI> <kbd>IF _ THEN _ ELSE _ FI</kbd>
-<LI> <kbd>WHILE _ INV {_} DO _ OD</kbd>
-</UL>
-Note that each WHILE-loop must be annotated with an invariant.
-<P>
-
-After loading theory Hoare, you can state goals of the form
-<PRE>
-VARS x y ... {P} prog {Q}
-</PRE>
-where <kbd>prog</kbd> is a program in the above language, <kbd>P</kbd> is the
-precondition, <kbd>Q</kbd> the postcondition, and <kbd>x y ...</kbd> is the
-list of all <i>program variables</i> in <kbd>prog</kbd>. The latter list must
-be nonempty and it must include all variables that occur on the left-hand
-side of an assignment in <kbd>prog</kbd>. Example:
-<PRE>
-VARS x {x = a} x := x+1 {x = a+1}
-</PRE>
-The (normal) variable <kbd>a</kbd> is merely used to record the initial
-value of <kbd>x</kbd> and is not a program variable. Pre/post conditions
-can be arbitrary HOL formulae mentioning both program variables and normal
-variables.
-<P>
-
-The implementation hides reasoning in Hoare logic completely and provides a
-method <kbd>vcg</kbd> for transforming a goal in Hoare logic into an
-equivalent list of verification conditions in HOL:
-<PRE>
-apply vcg
-</PRE>
-If you want to simplify the resulting verification conditions at the same
-time:
-<PRE>
-apply vcg_simp
-</PRE>
-which, given the example goal above, solves it completely. For further
-examples see <a href="Examples.html">Examples</a>.
-<P>
-
-IMPORTANT:
-This is a logic of partial correctness. You can only prove that your program
-does the right thing <i>if</i> it terminates, but not <i>that</i> it
-terminates.
-A logic of total correctness is also provided and described below.
-
-<H3>Total correctness</H3>
-
-To prove termination, each WHILE-loop must be annotated with a variant:
-<UL>
-<LI> <kbd>WHILE _ INV {_} VAR {_} DO _ OD</kbd>
-</UL>
-A variant is an expression with type <kbd>nat</kbd>, which may use program
-variables and normal variables.
-<P>
-
-A total-correctness goal has the form
-<PRE>
-VARS x y ... [P] prog [Q]
-</PRE>
-enclosing the pre- and postcondition in square brackets.
-<P>
-
-Methods <kbd>vcg_tc</kbd> and <kbd>vcg_tc_simp</kbd> can be used to derive
-verification conditions.
-<P>
-
-From a total-correctness proof, a function can be extracted which
-for every input satisfying the precondition returns an output
-satisfying the postcondition.
-
-<H3>Notes on the implementation</H3>
-
-The implementation loosely follows
-<P>
-Mike Gordon.
-<cite>Mechanizing Programming Logics in Higher Order Logic.</cite><BR>
-University of Cambridge, Computer Laboratory, TR 145, 1988.
-<P>
-published as
-<P>
-Mike Gordon.
-<cite>Mechanizing Programming Logics in Higher Order Logic.</cite><BR>
-In
-<cite>Current Trends in Hardware Verification and Automated Theorem Proving
-</cite>,<BR>
-edited by G. Birtwistle and P.A. Subrahmanyam, Springer-Verlag, 1989. 
-<P>
-
-The main differences: the state is modelled as a tuple as suggested in
-<P>
-J. von Wright and J. Hekanaho and P. Luostarinen and T. Langbacka.
-<cite>Mechanizing Some Advanced Refinement Concepts</cite>.
-Formal Methods in System Design, 3, 1993, 49-81.
-<P>
-and the embeding is deep, i.e. there is a concrete datatype of programs. The
-latter is not really necessary.
-</BODY>
-</HTML>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Hoare/README.thy	Sat Aug 20 21:34:55 2022 +0200
@@ -0,0 +1,93 @@
+theory README imports Main
+begin
+
+section \<open>Hoare Logic for a Simple WHILE Language\<close>
+
+subsection \<open>Language and logic\<close>
+
+text \<open>
+  This directory contains an implementation of Hoare logic for a simple WHILE
+  language. The constructs are
+
+    \<^item> \<^verbatim>\<open>SKIP\<close>
+    \<^item> \<^verbatim>\<open>_ := _\<close>
+    \<^item> \<^verbatim>\<open>_ ; _\<close>
+    \<^item> \<^verbatim>\<open>IF _ THEN _ ELSE _ FI\<close>
+    \<^item> \<^verbatim>\<open>WHILE _ INV {_} DO _ OD\<close>
+
+  Note that each WHILE-loop must be annotated with an invariant.
+
+  Within the context of theory \<^verbatim>\<open>Hoare\<close>, you can state goals of the form
+    @{verbatim [display] \<open>VARS x y ... {P} prog {Q}\<close>}
+  where \<^verbatim>\<open>prog\<close> is a program in the above language, \<^verbatim>\<open>P\<close> is the precondition,
+  \<^verbatim>\<open>Q\<close> the postcondition, and \<^verbatim>\<open>x y ...\<close> is the list of all \<^emph>\<open>program
+  variables\<close> in \<^verbatim>\<open>prog\<close>. The latter list must be nonempty and it must include
+  all variables that occur on the left-hand side of an assignment in \<^verbatim>\<open>prog\<close>.
+  Example:
+    @{verbatim [display] \<open>VARS x {x = a} x := x+1 {x = a+1}\<close>}
+  The (normal) variable \<^verbatim>\<open>a\<close> is merely used to record the initial value of
+  \<^verbatim>\<open>x\<close> and is not a program variable. Pre/post conditions can be arbitrary HOL
+  formulae mentioning both program variables and normal variables.
+
+  The implementation hides reasoning in Hoare logic completely and provides a
+  method \<^verbatim>\<open>vcg\<close> for transforming a goal in Hoare logic into an equivalent list
+  of verification conditions in HOL: \<^theory_text>\<open>apply vcg\<close>
+
+  If you want to simplify the resulting verification conditions at the same
+  time: \<^theory_text>\<open>apply vcg_simp\<close> which, given the example goal above, solves it
+  completely. For further examples see \<^file>\<open>Examples.thy\<close>.
+
+  \<^bold>\<open>IMPORTANT:\<close>
+  This is a logic of partial correctness. You can only prove that your program
+  does the right thing \<^emph>\<open>if\<close> it terminates, but not \<^emph>\<open>that\<close> it terminates. A
+  logic of total correctness is also provided and described below.
+\<close>
+
+
+subsection \<open>Total correctness\<close>
+
+text \<open>
+  To prove termination, each WHILE-loop must be annotated with a variant:
+
+    \<^item> \<^verbatim>\<open>WHILE _ INV {_} VAR {_} DO _ OD\<close>
+
+  A variant is an expression with type \<^verbatim>\<open>nat\<close>, which may use program variables
+  and normal variables.
+
+  A total-correctness goal has the form \<^verbatim>\<open>VARS x y ... [P] prog [Q]\<close> enclosing
+  the pre- and postcondition in square brackets.
+
+  Methods \<^verbatim>\<open>vcg_tc\<close> and \<^verbatim>\<open>vcg_tc_simp\<close> can be used to derive verification
+  conditions.
+
+  From a total-correctness proof, a function can be extracted which for every
+  input satisfying the precondition returns an output satisfying the
+  postcondition.
+\<close>
+
+
+subsection \<open>Notes on the implementation\<close>
+
+text \<open>
+  The implementation loosely follows
+
+  Mike Gordon. \<^emph>\<open>Mechanizing Programming Logics in Higher Order Logic\<close>.
+  University of Cambridge, Computer Laboratory, TR 145, 1988.
+
+  published as
+
+  Mike Gordon. \<^emph>\<open>Mechanizing Programming Logics in Higher Order Logic\<close>. In
+  \<^emph>\<open>Current Trends in Hardware Verification and Automated Theorem Proving\<close>,
+  edited by G. Birtwistle and P.A. Subrahmanyam, Springer-Verlag, 1989.
+
+  The main differences: the state is modelled as a tuple as suggested in
+
+  J. von Wright and J. Hekanaho and P. Luostarinen and T. Langbacka.
+  \<^emph>\<open>Mechanizing Some Advanced Refinement Concepts\<close>. Formal Methods in System
+  Design, 3, 1993, 49-81.
+
+  and the embeding is deep, i.e. there is a concrete datatype of programs. The
+  latter is not really necessary.
+\<close>
+
+end
--- a/src/HOL/Library/README.html	Fri Aug 19 05:49:17 2022 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,72 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-
-<html>
-
-<head>
-  <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
-  <title>HOL-Library/README</title>
-</head>
-
-<body>
-
-<h1>HOL-Library: supplemental theories for main Isabelle/HOL</h1>
-
-This is a collection of generic theories that may be used together
-with main Isabelle/HOL.
-
-<p>
-
-Addition of new theories should be done with some care, as the
-``module system'' of Isabelle is rather simplistic.  The following
-guidelines may be helpful to achieve maximum re-usability and minimum
-clashes with existing developments.
-
-<dl>
-
-<dt><strong>Examples</strong>
-
-<dd>Theories should be as ``generic'' as is sensible.  Unused (or
-rather unusable?) theories should be avoided; common applications
-should actually refer to the present theory.  Small example uses may
-be included in the library as well, but should be put in a separate
-theory, such as <tt>Foobar</tt> accompanied by
-<tt>Foobar_Examples</tt>.
-
-<dt><strong>Theory names</strong>
-
-<dd>The theory loader name space is <em>flat</em>, so use sufficiently
-long and descriptive names to reduce the danger of clashes with the
-user's own theories.  The convention for theory names is as follows:
-<tt>Foobar_Doobar</tt> (this looks best in LaTeX output).
-
-<dt><strong>Names of logical items</strong>
-
-<dd>There are separate hierarchically structured name spaces for
-types, constants, theorems etc.  Nevertheless, some care should be
-taken, as the name spaces are always ``open''.  Use adequate names;
-avoid unreadable abbreviations.  The general naming convention is to
-separate word constituents by underscores, as in <tt>foo_bar</tt> or
-<tt>Foo_Bar</tt> (this looks best in LaTeX output).
-
-<dt><strong>Global context declarations</strong>
-
-<dd>Only items introduced in the present theory should be declared
-globally (e.g. as Simplifier rules).  Note that adding and deleting
-rules from parent theories may result in strange behavior later,
-depending on the user's arrangement of import lists.
-
-<dt><strong>Spacing</strong>
-
-<dd>Isabelle is able to produce a high-quality LaTeX document from the
-theory sources, provided some minor issues are taken care of.  In
-particular, spacing and line breaks are directly taken from source
-text.  Incidentally, output looks very good if common type-setting
-conventions are observed: put a single space <em>after</em> each
-punctuation character ("<tt>,</tt>", "<tt>.</tt>", etc.), but none
-before it; do not extra spaces inside of parentheses; do not attempt
-to simulate table markup with spaces, avoid ``hanging'' indentations.
-
-</dl>
-
-</body>
-</html>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Library/README.thy	Sat Aug 20 21:34:55 2022 +0200
@@ -0,0 +1,43 @@
+theory README imports Main
+begin
+
+section \<open>HOL-Library: supplemental theories for main Isabelle/HOL\<close>
+
+text \<open>
+  This is a collection of generic theories that may be used together with main
+  Isabelle/HOL.
+
+  Addition of new theories should be done with some care, as the ``module
+  system'' of Isabelle is rather simplistic. The following guidelines may be
+  helpful to achieve maximum re-usability and minimum clashes with existing
+  developments.
+
+  \<^descr>[Examples] Theories should be as ``generic'' as is sensible. Unused (or
+  rather unusable?) theories should be avoided; common applications should
+  actually refer to the present theory. Small example uses may be included in
+  the library as well, but should be put in a separate theory, such as
+  \<^verbatim>\<open>Foobar.thy\<close> accompanied by \<^verbatim>\<open>Foobar_Examples.thy\<close>.
+
+  \<^descr>[Names of logical items] There are separate hierarchically structured name
+  spaces for types, constants, theorems etc. Nevertheless, some care should be
+  taken, as the name spaces are always ``open''. Use adequate names; avoid
+  unreadable abbreviations. The general naming convention is to separate word
+  constituents by underscores, as in \<^verbatim>\<open>foo_bar\<close> or \<^verbatim>\<open>Foo_Bar\<close> (this looks best
+  in LaTeX output).
+
+  \<^descr>[Global context declarations] Only items introduced in the present theory
+  should be declared globally (e.g. as Simplifier rules). Note that adding and
+  deleting rules from parent theories may result in strange behavior later,
+  depending on the user's arrangement of import lists.
+
+  \<^descr>[Spacing] Isabelle is able to produce a high-quality LaTeX document from
+  the theory sources, provided some minor issues are taken care of. In
+  particular, spacing and line breaks are directly taken from source text.
+  Incidentally, output looks very good if common type-setting conventions are
+  observed: put a single space \<^emph>\<open>after\<close> each punctuation character ("\<^verbatim>\<open>,\<close>",
+  "\<^verbatim>\<open>.\<close>", etc.), but none before it; do not extra spaces inside of
+  parentheses; do not attempt to simulate table markup with spaces, avoid
+  ``hanging'' indentations.
+\<close>
+
+end
--- a/src/HOL/ROOT	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/HOL/ROOT	Sat Aug 20 21:34:55 2022 +0200
@@ -58,6 +58,8 @@
   description "
     Classical Higher-order Logic -- batteries included.
   "
+  theories [document = false]
+    README
   theories
     Library
     (*conflicting type class instantiations and dependent applications*)
@@ -326,6 +328,8 @@
     Verification of imperative programs (verification conditions are generated
     automatically from pre/post conditions and loop invariants).
   "
+  theories [document = false]
+    README
   theories
     Examples
     ExamplesAbort
@@ -406,6 +410,8 @@
   sessions
     "HOL-Cardinals"
     "HOL-Combinatorics"
+  theories [document = false]
+    README
   theories
     (* Orders and Lattices *)
     Galois_Connection    (* Knaster-Tarski theorem and Galois connections *)
@@ -429,10 +435,15 @@
   "
   sessions "HOL-Library"
   directories "Smartcard" "Guard"
+  theories [document = false]
+    README
   theories
     Auth_Shared
     Auth_Public
     "Smartcard/Auth_Smartcard"
+  theories [document = false]
+    "Guard/README_Guard"
+  theories
     "Guard/Auth_Guard_Shared"
     "Guard/Auth_Guard_Public"
   document_files "root.tex"
@@ -445,10 +456,15 @@
     Verifying security protocols using Chandy and Misra's UNITY formalism.
   "
   directories "Simple" "Comp"
+  theories [document = false]
+    README
   theories
     (*Basic meta-theory*)
     UNITY_Main
 
+  theories [document = false]
+    "Simple/README_Simple"
+  theories
     (*Simple examples: no composition*)
     "Simple/Deadlock"
     "Simple/Common"
@@ -463,6 +479,9 @@
     (*Verifying security protocols using UNITY*)
     "Simple/NSP_Bad"
 
+  theories [document = false]
+    "Comp/README_Comp"
+  theories
     (*Example of composition*)
     "Comp/Handshake"
 
@@ -783,7 +802,9 @@
   description "
     Lamport's Temporal Logic of Actions.
   "
-  theories TLA
+  theories
+    README
+    TLA
 
 session "HOL-TLA-Inc" in "TLA/Inc" = "HOL-TLA" +
   theories Inc
@@ -1083,6 +1104,8 @@
   "
   sessions
     "HOL-Library"
+  theories [document = false]
+    README
   theories
     HOLCF (global)
   document_files "root.tex"
--- a/src/HOL/TLA/README.html	Fri Aug 19 05:49:17 2022 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,68 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-
-<HTML>
-
-<HEAD>
-  <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
-  <TITLE>HOL/TLA</TITLE>
-</HEAD>
-
-<BODY>
-
-<H2>TLA: Lamport's Temporal Logic of Actions</H2>
-
-<A HREF="http://www.research.digital.com/SRC/personal/Leslie_Lamport/tla/tla.html">TLA</A>
-is a linear-time temporal logic introduced by Leslie Lamport in
-<EM>The Temporal Logic of Actions</EM> (ACM TOPLAS 16(3), 1994,
-872-923). Unlike other temporal logics, both systems and properties
-are represented as logical formulas, and logical connectives such as
-implication, conjunction, and existential quantification represent
-structural relations such as refinement, parallel composition, and
-hiding. TLA has been applied to numerous case studies.
-
-<P>This directory formalizes TLA in Isabelle/HOL, as follows:
-<UL>
-<LI>Theory <A HREF="Intensional.html">Intensional</A> prepares the
-  ground by introducing basic syntax for "lifted", possibl-world based 
-  logics.
-<LI>Theories <A HREF="Stfun.html">Stfun</A> and
-  <A HREF="Action.html">Action</A> represent the state and transition
-  level formulas of TLA, evaluated over single states and pairs of
-  states.
-<LI>Theory <A HREF="Init.html">Init</A> introduces temporal logic
-  and defines conversion functions from nontemporal to temporal
-  formulas.
-<LI>Theory <A HREF="TLA.html">TLA</A> axiomatizes proper temporal
-  logic.
-</UL>
-
-Please consult the
-<A HREF="http://www.pst.informatik.uni-muenchen.de/~merz/isabelle/IsaTLADesign.ps">design notes</A>
-for further information regarding the setup and use of this encoding
-of TLA.
-
-<P>
-The theories are accompanied by a small number of examples:
-<UL>
-<LI><A HREF="Inc/index.html">Inc</A>: Lamport's <EM>increment</EM>
-  example, a standard TLA benchmark, illustrates an elementary TLA
-  proof.
-<LI><A HREF="Buffer/index.html">Buffer</A>: a proof that two buffers
-  in a row implement a single buffer, uses a simple refinement
-  mapping.
-<LI><A HREF="Memory/index.html">Memory</A>: a verification of (the
-  untimed part of) Broy and Lamport's <em>RPC-Memory</em> case study,
-  more fully explained in LNCS 1169 (the 
-  <A HREF="http://www.pst.informatik.uni-muenchen.de/~merz/papers/RPCMemory.html">TLA
-  solution</A> is available separately).
-</UL>
-
-<HR>
-
-<ADDRESS>
-<A HREF="mailto:merz@informatik.uni-muenchen.de">Stephan Merz</A>
-</ADDRESS>
-<!-- hhmts start -->
-Last modified: Sat Mar  5 00:54:49 CET 2005
-<!-- hhmts end -->
-</BODY></HTML>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TLA/README.thy	Sat Aug 20 21:34:55 2022 +0200
@@ -0,0 +1,48 @@
+theory README imports Main
+begin
+
+section \<open>TLA: Lamport's Temporal Logic of Actions\<close>
+
+text \<open>
+  TLA \<^url>\<open>http://www.research.digital.com/SRC/personal/Leslie_Lamport/tla/tla.html\<close>
+  is a linear-time temporal logic introduced by Leslie Lamport in \<^emph>\<open>The
+  Temporal Logic of Actions\<close> (ACM TOPLAS 16(3), 1994, 872-923). Unlike other
+  temporal logics, both systems and properties are represented as logical
+  formulas, and logical connectives such as implication, conjunction, and
+  existential quantification represent structural relations such as
+  refinement, parallel composition, and hiding. TLA has been applied to
+  numerous case studies.
+
+  This directory formalizes TLA in Isabelle/HOL, as follows:
+
+    \<^item> \<^file>\<open>Intensional.thy\<close> prepares the ground by introducing basic syntax for
+      "lifted", possible-world based logics.
+
+    \<^item> \<^file>\<open>Stfun.thy\<close> and \<^file>\<open>Action.thy\<close> represent the state and transition
+      level formulas of TLA, evaluated over single states and pairs of states.
+
+    \<^item> \<^file>\<open>Init.thy\<close> introduces temporal logic and defines conversion functions
+      from nontemporal to temporal formulas.
+
+    \<^item> \<^file>\<open>TLA.thy\<close> axiomatizes proper temporal logic.
+
+
+  Please consult the \<^emph>\<open>design notes\<close>
+  \<^url>\<open>http://www.pst.informatik.uni-muenchen.de/~merz/isabelle/IsaTLADesign.ps\<close>
+  for further information regarding the setup and use of this encoding of TLA.
+
+  The theories are accompanied by a small number of examples:
+
+    \<^item> \<^dir>\<open>Inc\<close>: Lamport's \<^emph>\<open>increment\<close> example, a standard TLA benchmark,
+      illustrates an elementary TLA proof.
+
+    \<^item> \<^dir>\<open>Buffer\<close>: a proof that two buffers in a row implement a single buffer,
+      uses a simple refinement mapping.
+
+    \<^item> \<^dir>\<open>Memory\<close>: a verification of (the untimed part of) Broy and Lamport's
+    \<^emph>\<open>RPC-Memory\<close> case study, more fully explained in LNCS 1169 (the \<^emph>\<open>TLA
+    solution\<close> is available separately from
+    \<^url>\<open>http://www.pst.informatik.uni-muenchen.de/~merz/papers/RPCMemory.html\<close>).
+\<close>
+
+end
--- a/src/HOL/Tools/Mirabelle/mirabelle.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/HOL/Tools/Mirabelle/mirabelle.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -17,7 +17,7 @@
     (for {
       file <-
         File.find_files(Path.explode("~~/src/HOL/Tools/Mirabelle").file,
-          pred = _.getName.endsWith(".ML"))
+          pred = file => File.is_ML(file.getName))
       line <- split_lines(File.read(file))
       name <- line match { case Pattern(a) => Some(a) case _ => None }
     } yield name).sorted
--- a/src/HOL/UNITY/Comp/README.html	Fri Aug 19 05:49:17 2022 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,45 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-
-<HTML>
-
-<HEAD>
-  <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
-  <TITLE>HOL/UNITY/README</TITLE>
-</HEAD>
-
-<BODY>
-
-<H2>UNITY: Examples Involving Program Composition</H2>
-
-<P>
-The directory presents verification examples involving program composition.
-They are mostly taken from the works of Chandy, Charpentier and Chandy.
-
-<UL>
-<LI>examples of <em>universal properties</em>:
-the counter (<A HREF="Counter.thy"><CODE>Counter.thy</CODE></A>)
-and priority system (<A HREF="Priority.thy"><CODE>Priority.thy</CODE></A>)
-
-<LI>the allocation system (<A HREF="Alloc.thy"><CODE>Alloc.thy</CODE></A>)
-
-<LI>client implementation (<A HREF="Client.thy"><CODE>Client.thy</CODE></A>)
-
-<LI>allocator implementation (<A HREF="AllocImpl.thy"><CODE>AllocImpl.thy</CODE></A>)
-
-<LI>the handshake protocol
-(<A HREF="Handshake.thy"><CODE>Handshake.thy</CODE></A>)
-
-<LI>the timer array (demonstrates arrays of processes)
-(<A HREF="TimerArray.thy"><CODE>TimerArray.thy</CODE></A>)
-</UL>
-
-<P> Safety proofs (invariants) are often proved automatically.  Progress
-proofs involving ENSURES can sometimes be proved automatically.  The
-level of automation appears to be about the same as in HOL-UNITY by Flemming
-Andersen et al.
-
-<ADDRESS>
-<A NAME="lcp@cl.cam.ac.uk" HREF="mailto:lcp@cl.cam.ac.uk">lcp@cl.cam.ac.uk</A>
-</ADDRESS>
-</BODY>
-</HTML>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/UNITY/Comp/README_Comp.thy	Sat Aug 20 21:34:55 2022 +0200
@@ -0,0 +1,24 @@
+theory README_Comp imports Main
+begin
+
+section \<open>UNITY: Examples Involving Program Composition\<close>
+
+text \<open>
+  The directory presents verification examples involving program composition.
+  They are mostly taken from the works of Chandy, Charpentier and Chandy.
+
+  \<^item> examples of \<^emph>\<open>universal properties\<close>: the counter (\<^file>\<open>Counter.thy\<close>) and
+    priority system (\<^file>\<open>Priority.thy\<close>)
+  \<^item> the allocation system (\<^file>\<open>Alloc.thy\<close>)
+  \<^item> client implementation (\<^file>\<open>Client.thy\<close>)
+  \<^item> allocator implementation (\<^file>\<open>AllocImpl.thy\<close>)
+  \<^item> the handshake protocol (\<^file>\<open>Handshake.thy\<close>)
+  \<^item> the timer array (demonstrates arrays of processes) (\<^file>\<open>TimerArray.thy\<close>)
+
+  Safety proofs (invariants) are often proved automatically. Progress proofs
+  involving ENSURES can sometimes be proved automatically. The level of
+  automation appears to be about the same as in HOL-UNITY by Flemming Andersen
+  et al.
+\<close>
+
+end
--- a/src/HOL/UNITY/README.html	Fri Aug 19 05:49:17 2022 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,49 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-
-<HTML>
-
-<HEAD>
-  <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
-  <TITLE>HOL/UNITY/README</TITLE>
-</HEAD>
-
-<BODY>
-
-<H2>UNITY--Chandy and Misra's UNITY formalism</H2>
-
-<P>The book <EM>Parallel Program Design: A Foundation</EM> by Chandy and Misra
-(Addison-Wesley, 1988) presents the UNITY formalism.  UNITY consists of an
-abstract programming language of guarded assignments and a calculus for
-reasoning about such programs.  Misra's 1994 paper "A Logic for Concurrent
-Programming" presents New UNITY, giving more elegant foundations for a more
-general class of languages.  In recent work, Chandy and Sanders have proposed
-new methods for reasoning about systems composed of many components.
-
-<P>This directory formalizes these new ideas for UNITY.  The Isabelle examples
-may seem strange to UNITY traditionalists.  Hand UNITY proofs tend to be
-written in the forwards direction, as in informal mathematics, while Isabelle
-works best in a backwards (goal-directed) style.  Programs are expressed as
-sets of commands, where each command is a relation on states.  Quantification
-over commands using [] is easily expressed.  At present, there are no examples
-of quantification using ||.
-
-<P>A UNITY assertion denotes the set of programs satisfying it, as
-in the propositions-as-types paradigm.  The resulting style is readable if
-unconventional.
-
-<P> Safety proofs (invariants) are often proved automatically.  Progress
-proofs involving ENSURES can sometimes be proved automatically.  The
-level of automation appears to be about the same as in HOL-UNITY by Flemming
-Andersen et al.
-
-<P>
-The directory <A HREF="Simple/"><CODE>Simple</CODE></A>
-presents a few examples, mostly taken from Misra's 1994
-paper, involving single programs.
-The directory <A HREF="Comp/"><CODE>Comp</CODE></A>
-presents examples of proofs involving program composition.
-
-<ADDRESS>
-<A NAME="lcp@cl.cam.ac.uk" HREF="mailto:lcp@cl.cam.ac.uk">lcp@cl.cam.ac.uk</A>
-</ADDRESS>
-</BODY></HTML>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/UNITY/README.thy	Sat Aug 20 21:34:55 2022 +0200
@@ -0,0 +1,37 @@
+theory README imports Main
+begin
+
+section \<open>UNITY--Chandy and Misra's UNITY formalism\<close>
+
+text \<open>
+  The book \<^emph>\<open>Parallel Program Design: A Foundation\<close> by Chandy and Misra
+  (Addison-Wesley, 1988) presents the UNITY formalism. UNITY consists of an
+  abstract programming language of guarded assignments and a calculus for
+  reasoning about such programs. Misra's 1994 paper "A Logic for Concurrent
+  Programming" presents New UNITY, giving more elegant foundations for a more
+  general class of languages. In recent work, Chandy and Sanders have proposed
+  new methods for reasoning about systems composed of many components.
+
+  This directory formalizes these new ideas for UNITY. The Isabelle examples
+  may seem strange to UNITY traditionalists. Hand UNITY proofs tend to be
+  written in the forwards direction, as in informal mathematics, while
+  Isabelle works best in a backwards (goal-directed) style. Programs are
+  expressed as sets of commands, where each command is a relation on states.
+  Quantification over commands using \<^verbatim>\<open>[]\<close> is easily expressed. At present,
+  there are no examples of quantification using \<^verbatim>\<open>||\<close>.
+
+  A UNITY assertion denotes the set of programs satisfying it, as in the
+  propositions-as-types paradigm. The resulting style is readable if
+  unconventional.
+
+  Safety proofs (invariants) are often proved automatically. Progress proofs
+  involving ENSURES can sometimes be proved automatically. The level of
+  automation appears to be about the same as in HOL-UNITY by Flemming Andersen
+  et al.
+
+  The directory \<^dir>\<open>Simple\<close> presents a few examples, mostly taken from Misra's
+  1994 paper, involving single programs. The directory \<^dir>\<open>Comp\<close> presents
+  examples of proofs involving program composition.
+\<close>
+
+end
--- a/src/HOL/UNITY/Simple/README.html	Fri Aug 19 05:49:17 2022 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,42 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-
-<HTML>
-
-<HEAD>
-  <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
-  <TITLE>HOL/UNITY/README</TITLE>
-</HEAD>
-
-<BODY>
-
-<H2>UNITY: Examples Involving Single Programs</H2>
-
-<P> The directory presents verification examples that do not involve program
-composition.  They are mostly taken from Misra's 1994 papers on ``New UNITY'':
-<UL>
-<LI>common meeting time (<A HREF="Common.thy"><CODE>Common.thy</CODE></A>)
-
-<LI>the token ring (<A HREF="Token.thy"><CODE>Token.thy</CODE></A>)
-
-<LI>the communication network
-(<A HREF="Network.thy"><CODE>Network.thy</CODE></A>)
-
-<LI>the lift controller (a standard benchmark) (<A HREF="Lift.thy"><CODE>Lift.thy</CODE></A>)
-
-<LI>a mutual exclusion algorithm (<A HREF="Mutex.thy"><CODE>Mutex.thy</CODE></A>)
-
-<LI><EM>n</EM>-process deadlock
-(<A HREF="Deadlock.thy"><CODE>Deadlock.thy</CODE></A>)
-
-<LI>unordered channel (<A HREF="Channel.thy"><CODE>Channel.thy</CODE></A>)
-
-<LI>reachability in directed graphs (section 6.4 of the book) (<A
-HREF="Reach.thy"><CODE>Reach.thy</CODE></A> and
-<A HREF="Reachability.thy"><CODE>Reachability.thy</CODE></A>)
-</UL>
-
-<ADDRESS>
-<A NAME="lcp@cl.cam.ac.uk" HREF="mailto:lcp@cl.cam.ac.uk">lcp@cl.cam.ac.uk</A>
-</ADDRESS>
-</BODY>
-</HTML>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/UNITY/Simple/README_Simple.thy	Sat Aug 20 21:34:55 2022 +0200
@@ -0,0 +1,22 @@
+theory README_Simple imports Main
+begin
+
+section \<open>UNITY: Examples Involving Single Programs\<close>
+
+text \<open>
+  The directory presents verification examples that do not involve program
+  composition. They are mostly taken from Misra's 1994 papers on ``New
+  UNITY'':
+
+    \<^item> common meeting time (\<^file>\<open>Common.thy\<close>)
+    \<^item> the token ring (\<^file>\<open>Token.thy\<close>)
+    \<^item> the communication network (\<^file>\<open>Network.thy\<close>)
+    \<^item> the lift controller (a standard benchmark) (\<^file>\<open>Lift.thy\<close>)
+    \<^item> a mutual exclusion algorithm (\<^file>\<open>Mutex.thy\<close>)
+    \<^item> \<open>n\<close>-process deadlock (\<^file>\<open>Deadlock.thy\<close>)
+    \<^item> unordered channel (\<^file>\<open>Channel.thy\<close>)
+    \<^item> reachability in directed graphs (section 6.4 of the book)
+      (\<^file>\<open>Reach.thy\<close> and \<^file>\<open>Reachability.thy\<close>>
+\<close>
+
+end
--- a/src/Pure/Admin/build_jcef.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Admin/build_jcef.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -71,11 +71,11 @@
           for {
             file <- File.find_files(platform_dir.file).iterator
             name = file.getName
-            if name.endsWith(".dll") || name.endsWith(".exe")
+            if File.is_dll(name) || File.is_exe(name)
           } File.set_executable(File.path(file), true)
 
           val classpath =
-            File.find_files(platform_dir.file, pred = (file => file.getName.endsWith(".jar")))
+            File.find_files(platform_dir.file, pred = file => File.is_jar(file.getName))
               .flatMap(file => File.relative_path(platform_dir, File.path(file)))
               .map(jar => "        " + quote("$ISABELLE_JCEF_HOME/" + jar.implode))
               .mkString(" \\\n")
--- a/src/Pure/Admin/build_jedit.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Admin/build_jedit.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -161,7 +161,7 @@
       for {
         file <- File.find_files(Path.explode("~~/src/Tools/jEdit/patches").file).iterator
         name = file.getName
-        if !name.endsWith("~") && !name.endsWith(".orig")
+        if !File.is_backup(name)
       } {
         progress.bash("patch -p2 < " + File.bash_path(File.path(file)),
           cwd = source_dir.file, echo = true).check
@@ -181,7 +181,7 @@
 
       val java_sources =
         for {
-          file <- File.find_files(org_source_dir.file, file => file.getName.endsWith(".java"))
+          file <- File.find_files(org_source_dir.file, file => File.is_java(file.getName))
           package_name <- Scala_Project.package_name(File.path(file))
           if !exclude_package(package_name)
         } yield File.path(component_dir.java_path.relativize(file.toPath).toFile).implode
--- a/src/Pure/Admin/build_log.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Admin/build_log.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -108,8 +108,8 @@
     def apply(file: JFile): Log_File = {
       val name = file.getName
       val text =
-        if (name.endsWith(".gz")) File.read_gzip(file)
-        else if (name.endsWith(".xz")) File.read_xz(file)
+        if (File.is_gz(name)) File.read_gzip(file)
+        else if (File.is_xz(name)) File.read_xz(file)
         else File.read(file)
       apply(name, text)
     }
--- a/src/Pure/Admin/check_sources.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Admin/check_sources.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -50,7 +50,7 @@
     val hg = Mercurial.repository(root)
     for {
       file <- hg.known_files()
-      if file.endsWith(".thy") || file.endsWith(".ML") || file.endsWith("/ROOT")
+      if File.is_thy(file) || File.is_ML(file) || file.endsWith("/ROOT")
     } check_file(root + Path.explode(file))
   }
 
--- a/src/Pure/General/file.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/General/file.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -69,6 +69,26 @@
   def url(path: Path): URL = url(path.file)
 
 
+  /* adhoc file types */
+
+  def is_ML(s: String): Boolean = s.endsWith(".ML")
+  def is_bib(s: String): Boolean = s.endsWith(".bib")
+  def is_dll(s: String): Boolean = s.endsWith(".dll")
+  def is_exe(s: String): Boolean = s.endsWith(".exe")
+  def is_gz(s: String): Boolean = s.endsWith(".gz")
+  def is_html(s: String): Boolean = s.endsWith(".html")
+  def is_jar(s: String): Boolean = s.endsWith(".jar")
+  def is_java(s: String): Boolean = s.endsWith(".java")
+  def is_node(s: String): Boolean = s.endsWith(".node")
+  def is_pdf(s: String): Boolean = s.endsWith(".pdf")
+  def is_png(s: String): Boolean = s.endsWith(".png")
+  def is_thy(s: String): Boolean = s.endsWith(".thy")
+  def is_xz(s: String): Boolean = s.endsWith(".xz")
+  def is_zip(s: String): Boolean = s.endsWith(".zip")
+
+  def is_backup(s: String): Boolean = s.endsWith("~") || s.endsWith(".orig")
+
+
   /* relative paths */
 
   def relative_path(base: Path, other: Path): Option[Path] = {
--- a/src/Pure/General/mailman.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/General/mailman.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -420,7 +420,7 @@
 
     def find_messages(dir: Path): List[Message] = {
       for {
-        file <- File.find_files(dir.file, file => file.getName.endsWith(".html"))
+        file <- File.find_files(dir.file, file => File.is_html(file.getName))
         rel_path <- File.relative_path(dir, File.path(file))
       }
       yield {
--- a/src/Pure/General/path.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/General/path.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -91,6 +91,8 @@
   val USER_HOME: Path = variable("USER_HOME")
   val ISABELLE_HOME: Path = variable("ISABELLE_HOME")
 
+  val index_html: Path = basic("index.html")
+
 
   /* explode */
 
@@ -158,6 +160,10 @@
       error(("Collision of file names due case-insensitivity:" :: collisions).mkString("\n  "))
     }
   }
+
+  def eq_case_insensitive(path1: Path, path2: Path): Boolean =
+    path1 == path2 ||
+    Word.lowercase(path1.expand.implode) == Word.lowercase(path2.expand.implode)
 }
 
 
--- a/src/Pure/ML/ml_console.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/ML/ml_console.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -70,7 +70,7 @@
           session_base =
             if (raw_ml_system) None
             else Some(Sessions.base_info(
-              options, logic, dirs = dirs, include_sessions = include_sessions).check.base))
+              options, logic, dirs = dirs, include_sessions = include_sessions).check_errors.base))
 
       POSIX_Interrupt.handler { process.interrupt() } {
         new TTY_Loop(process.stdin, process.stdout).join()
--- a/src/Pure/ML/ml_process.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/ML/ml_process.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -80,7 +80,7 @@
     // session base
     val (init_session_base, eval_init_session) =
       session_base match {
-        case None => (sessions_structure.bootstrap, Nil)
+        case None => (Sessions.bootstrap_base, Nil)
         case Some(base) => (base, List("Resources.init_session_env ()"))
       }
     val init_session = Isabelle_System.tmp_file("init_session")
@@ -173,7 +173,7 @@
       val more_args = getopts(args)
       if (args.isEmpty || more_args.nonEmpty) getopts.usage()
 
-      val base_info = Sessions.base_info(options, logic, dirs = dirs).check
+      val base_info = Sessions.base_info(options, logic, dirs = dirs).check_errors
       val store = Sessions.store(options)
       val result =
         ML_Process(options, base_info.sessions_structure, store, logic = logic, args = eval_args,
--- a/src/Pure/PIDE/document.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/PIDE/document.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -116,8 +116,6 @@
 
       def expand: Name =
         Name(path.expand.implode, master_dir_path.expand.implode, theory)
-      def symbolic: Name =
-        Name(path.implode_symbolic, master_dir_path.implode_symbolic, theory)
 
       def is_theory: Boolean = theory.nonEmpty
 
@@ -593,7 +591,7 @@
 
     def xml_markup_blobs(
       elements: Markup.Elements = Markup.Elements.full
-    ) : List[(Path, XML.Body)] = {
+    ) : List[(Command.Blob, XML.Body)] = {
       snippet_command match {
         case None => Nil
         case Some(command) =>
@@ -607,7 +605,7 @@
                 markup.to_XML(Text.Range(0, text.length), text, elements)
               }
               else Nil
-            blob.src_path -> xml
+            blob -> xml
           }
       }
     }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/PIDE/document_info.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -0,0 +1,179 @@
+/*  Title:      Pure/PIDE/document_info.scala
+    Author:     Makarius
+
+Persistent document information --- for presentation purposes.
+*/
+
+package isabelle
+
+
+object Document_Info {
+  sealed case class Session(
+    name: String,
+    used_theories: List[String],
+    loaded_theories: Map[String, Theory])
+
+  object Theory {
+    def apply(
+      name: String,
+      files: List[String],
+      static_session: String,
+      dynamic_session: String,
+      entities: List[Export_Theory.Entity0],
+      others: List[String]
+    ): Theory = {
+      val entities1 =
+        entities.filter(e => e.file.nonEmpty && Position.Range.unapply(e.pos).isDefined)
+      new Theory(name, files, static_session, dynamic_session, entities1, others)
+    }
+  }
+
+  class Theory private(
+    val name: String,
+    val files: List[String],
+    val static_session: String,
+    val dynamic_session: String,
+    entities: List[Export_Theory.Entity0],
+    others: List[String]
+  ) {
+    override def toString: String = name
+
+    val (thy_file, blobs_files) =
+      files match {
+        case Nil => error("Unknown theory file for " + quote(name))
+        case a :: bs =>
+          def for_theory: String = " for theory " + quote(name)
+          if (!File.is_thy(a)) error("Bad .thy file " + quote(a) + for_theory)
+          for (b <- bs if File.is_thy(b)) error("Bad auxiliary file " + quote(b) + for_theory)
+          (a, bs)
+      }
+
+    def home_session: Boolean = static_session == dynamic_session
+
+    def print_short: String =
+      if (home_session) Long_Name.base_name(name) else name
+
+    def print_long: String =
+      "theory " + quote(name) +
+      (if (home_session) "" else " (session " + quote(dynamic_session) + ")")
+
+    private lazy val by_file_range: Map[(String, Symbol.Range), List[Export_Theory.Entity0]] =
+      entities.groupBy(entity => (entity.file, entity.range))
+
+    private lazy val by_file_kname: Map[(String, String), Export_Theory.Entity0] =
+      (for {
+        entity <- entities
+        file <- Position.File.unapply(entity.pos)
+      } yield (file, entity.kname) -> entity).toMap
+
+    def get_defs(file: String, range: Symbol.Range): List[Export_Theory.Entity0] =
+      by_file_range.getOrElse((file, range), Nil)
+
+    def get_def(file: String, kind: String, name: String): Option[Export_Theory.Entity0] =
+      by_file_kname.get((file, Export_Theory.export_kind_name(kind, name)))
+
+    def elements(elements: Presentation.Elements): Presentation.Elements =
+      elements.copy(entity = others.foldLeft(elements.entity)(_ + _))
+  }
+
+  val empty: Document_Info = new Document_Info(Map.empty)
+
+  def read(
+    database_context: Export.Database_Context,
+    deps: Sessions.Deps,
+    sessions: List[String]
+  ): Document_Info = {
+    val sessions_domain = sessions.toSet
+    val sessions_structure = deps.sessions_structure
+    val sessions_requirements = sessions_structure.build_requirements(sessions)
+
+    def read_theory(theory_context: Export.Theory_Context): Option[Document_Info.Theory] =
+    {
+      val session_name = theory_context.session_context.session_name
+      val theory_name = theory_context.theory
+
+      theory_context.files0(permissive = true) match {
+        case Nil => None
+        case files =>
+          val (entities, others) =
+            if (sessions_domain(session_name)) {
+              val theory = Export_Theory.read_theory(theory_context, permissive = true)
+              (theory.entity_iterator.toList,
+                theory.others.keySet.toList)
+            }
+            else (Nil, Nil)
+          val theory =
+            Theory(theory_name,
+              static_session = sessions_structure.theory_qualifier(theory_name),
+              dynamic_session = session_name,
+              files = files,
+              entities = entities,
+              others = others)
+          Some(theory)
+      }
+    }
+
+    def read_session(session_name: String): Document_Info.Session = {
+      val static_theories = deps(session_name).used_theories.map(_._1.theory)
+      val loaded_theories0 = {
+        using(database_context.open_session(deps.base_info(session_name))) { session_context =>
+          for {
+            theory_name <- static_theories
+            theory <- read_theory(session_context.theory(theory_name))
+          } yield theory_name -> theory
+        }
+      }.toMap
+      val used_theories = static_theories.filter(loaded_theories0.keySet)
+      Session(session_name, used_theories, loaded_theories0)
+    }
+
+    val result0 =
+      (for (session <- Par_List.map(read_session, sessions_requirements).iterator)
+        yield session.name -> session).toMap
+
+    val result1 =
+      sessions_requirements.foldLeft(Map.empty[String, Session]) {
+        case (seen, session_name) =>
+          val session0 = result0(session_name)
+          val loaded_theories1 =
+            sessions_structure(session_name).parent.map(seen) match {
+              case None => session0.loaded_theories
+              case Some(parent_session) =>
+                parent_session.loaded_theories ++ session0.loaded_theories
+            }
+          val session1 = session0.copy(loaded_theories = loaded_theories1)
+          seen + (session_name -> session1)
+      }
+
+    new Document_Info(result1)
+  }
+}
+
+class Document_Info private(sessions: Map[String, Document_Info.Session]) {
+  override def toString: String =
+    sessions.keysIterator.toList.sorted.mkString("Document_Info(", ", ", ")")
+
+  def the_session(session: String): Document_Info.Session =
+    sessions.getOrElse(session,
+      error("Unknown document information for session: " + quote(session)))
+
+  def theory_by_name(session: String, theory: String): Option[Document_Info.Theory] =
+    by_session_and_theory_name.get((session, theory))
+
+  def theory_by_file(session: String, file: String): Option[Document_Info.Theory] =
+    by_session_and_theory_file.get((session, file))
+
+  private lazy val by_session_and_theory_name: Map[(String, String), Document_Info.Theory] =
+    (for {
+      session <- sessions.valuesIterator
+      theory <- session.loaded_theories.valuesIterator
+    } yield (session.name, theory.name) -> theory).toMap
+
+  private lazy val by_session_and_theory_file: Map[(String, String), Document_Info.Theory] = {
+    (for {
+      session <- sessions.valuesIterator
+      theory <- session.loaded_theories.valuesIterator
+      file <- theory.files.iterator
+    } yield (session.name, file) -> theory).toMap
+  }
+}
--- a/src/Pure/PIDE/headless.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/PIDE/headless.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -549,7 +549,11 @@
       val options: Options,
       val session_base_info: Sessions.Base_Info,
       log: Logger = No_Logger)
-  extends isabelle.Resources(session_base_info.sessions_structure, session_base_info.check.base, log = log) {
+    extends isabelle.Resources(
+      session_base_info.sessions_structure,
+      session_base_info.check_errors.base,
+      log = log
+    ) {
     resources =>
 
     val store: Sessions.Store = Sessions.store(options)
@@ -561,11 +565,12 @@
       print_mode: List[String] = Nil,
       progress: Progress = new Progress
     ): Session = {
-      val session = new Session(session_base_info.session, options, resources)
+      val session_name = session_base_info.session_name
+      val session = new Session(session_name, options, resources)
 
-      progress.echo("Starting session " + session_base_info.session + " ...")
+      progress.echo("Starting session " + session_name + " ...")
       Isabelle_Process.start(session, options, session_base_info.sessions_structure, store,
-        logic = session_base_info.session, modes = print_mode).await_startup()
+        logic = session_name, modes = print_mode).await_startup()
 
       session
     }
--- a/src/Pure/PIDE/resources.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/PIDE/resources.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -16,9 +16,6 @@
   def empty: Resources =
     new Resources(Sessions.Structure.empty, Sessions.Structure.empty.bootstrap)
 
-  def file_node(file: Path, dir: String = "", theory: String = ""): Document.Node.Name =
-    empty.file_node(file, dir = dir, theory = theory)
-
   def hidden_node(name: Document.Node.Name): Boolean =
     !name.is_theory || name.theory == Sessions.root_name || File_Format.registry.is_theory(name)
 
@@ -57,7 +54,7 @@
        (command_timings,
        (Command_Span.load_commands.map(cmd => (cmd.name, cmd.position)),
        (Scala.functions,
-       (session_base.global_theories.toList,
+       (sessions_structure.global_theories.toList,
         session_base.loaded_theories.keys)))))))))
   }
 
@@ -150,14 +147,16 @@
     } yield file
   }
 
+  def global_theory(theory: String): Boolean =
+    sessions_structure.global_theories.isDefinedAt(theory)
+
   def theory_name(qualifier: String, theory: String): String =
-    if (Long_Name.is_qualified(theory) || session_base.global_theories.isDefinedAt(theory))
-      theory
+    if (Long_Name.is_qualified(theory) || global_theory(theory)) theory
     else Long_Name.qualify(qualifier, theory)
 
   def find_theory_node(theory: String): Option[Document.Node.Name] = {
     val thy_file = Path.basic(Long_Name.base_name(theory)).thy
-    val session = session_base.theory_qualifier(theory)
+    val session = sessions_structure.theory_qualifier(theory)
     val dirs =
       sessions_structure.get(session) match {
         case Some(info) => info.dirs
@@ -182,14 +181,14 @@
   }
 
   def import_name(name: Document.Node.Name, s: String): Document.Node.Name =
-    import_name(session_base.theory_qualifier(name), name.master_dir, s)
+    import_name(sessions_structure.theory_qualifier(name), name.master_dir, s)
 
   def import_name(info: Sessions.Info, s: String): Document.Node.Name =
     import_name(info.name, info.dir.implode, s)
 
   def find_theory(file: JFile): Option[Document.Node.Name] = {
     for {
-      qualifier <- session_base.session_directories.get(File.canonical(file).getParentFile)
+      qualifier <- sessions_structure.session_directories.get(File.canonical(file).getParentFile)
       theory_base <- proper_string(Thy_Header.theory_name(file.getName))
       theory = theory_name(qualifier, theory_base)
       theory_node <- find_theory_node(theory)
@@ -198,7 +197,7 @@
   }
 
   def complete_import_name(context_name: Document.Node.Name, s: String): List[String] = {
-    val context_session = session_base.theory_qualifier(context_name)
+    val context_session = sessions_structure.theory_qualifier(context_name)
     val context_dir =
       try { Some(context_name.master_dir_path) }
       catch { case ERROR(_) => None }
@@ -208,7 +207,7 @@
       theory <- Thy_Header.try_read_dir(dir).iterator
       if Completion.completed(s)(theory)
     } yield {
-      if (session == context_session || session_base.global_theories.isDefinedAt(theory)) theory
+      if (session == context_session || global_theory(theory)) theory
       else Long_Name.qualify(session, theory)
     }).toList.sorted
   }
@@ -233,9 +232,10 @@
         val imports =
           header.imports.map({ case (s, pos) =>
             val name = import_name(node_name, s)
-            if (Sessions.exclude_theory(name.theory_base_name))
-              error("Bad theory name " + quote(name.theory_base_name) + Position.here(pos))
-            (name, pos)
+            if (Sessions.illegal_theory(name.theory_base_name)) {
+              error("Illegal theory name " + quote(name.theory_base_name) + Position.here(pos))
+            }
+            else (name, pos)
           })
         Document.Node.Header(imports, header.keywords, header.abbrevs)
       }
--- a/src/Pure/System/classpath.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/System/classpath.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -29,7 +29,7 @@
       } yield File.absolute(new JFile(s))
 
     val jar_files1 =
-      jar_files.flatMap(start => File.find_files(start, _.getName.endsWith(".jar")))
+      jar_files.flatMap(start => File.find_files(start, file => File.is_jar(file.getName)))
         .map(File.absolute)
 
     val tmp_jars =
--- a/src/Pure/System/isabelle_tool.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/System/isabelle_tool.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -14,10 +14,7 @@
 
   private def is_external(dir: Path, name: String): Boolean = {
     val file = (dir + Path.explode(name)).file
-    try {
-      file.isFile && file.canRead && file.canExecute &&
-        !name.endsWith("~") && !name.endsWith(".orig")
-    }
+    try { file.isFile && file.canRead && file.canExecute && !File.is_backup(name) }
     catch { case _: SecurityException => false }
   }
 
--- a/src/Pure/Thy/bibtex.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Thy/bibtex.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -17,8 +17,6 @@
 object Bibtex {
   /** file format **/
 
-  def is_bibtex(name: String): Boolean = name.endsWith(".bib")
-
   class File_Format extends isabelle.File_Format {
     val format_name: String = "bibtex"
     val file_ext: String = "bib"
--- a/src/Pure/Thy/export.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Thy/export.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -294,7 +294,7 @@
       document_snapshot: Option[Document.Snapshot] = None,
       close_database_context: Boolean = false
     ): Session_Context = {
-      val session_name = session_base_info.check.base.session_name
+      val session_name = session_base_info.check_errors.session_name
       val session_hierarchy = session_base_info.sessions_structure.build_hierarchy(session_name)
       val session_databases =
         database_server match {
@@ -420,9 +420,9 @@
         if matcher(entry_name)
         entry <- get(entry_name.theory, entry_name.name).iterator
       } yield File.content(entry.entry_name.make_path(), entry.uncompressed)).toList
-  }
+    }
 
-  override def toString: String =
+    override def toString: String =
       "Export.Session_Context(" + commas_quote(session_stack) + ")"
   }
 
@@ -449,10 +449,13 @@
         case _ => None
       }
 
-    def files(): Option[(String, List[String])] =
-      split_lines(apply(FILES, permissive = true).text) match {
+    def files0(permissive: Boolean = false): List[String] =
+      split_lines(apply(FILES, permissive = permissive).text)
+
+    def files(permissive: Boolean = false): Option[(String, List[String])] =
+      files0(permissive = permissive) match {
         case Nil => None
-        case thy_file :: blobs_files => Some((thy_file, blobs_files))
+        case a :: bs => Some((a, bs))
       }
 
     override def toString: String = "Export.Theory_Context(" + quote(theory) + ")"
--- a/src/Pure/Thy/export_theory.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Thy/export_theory.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -190,6 +190,7 @@
   ) {
     val kname: String = export_kind_name(kind, name)
     val range: Symbol.Range = Position.Range.unapply(pos).getOrElse(Text.Range.offside)
+    val file: String = Position.File.unapply(pos).getOrElse("")
 
     def export_kind: String = Export_Theory.export_kind(kind)
     override def toString: String = export_kind + " " + quote(name)
--- a/src/Pure/Thy/html.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Thy/html.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -94,6 +94,30 @@
   def script_file(path: Path): XML.Elem = script_file(Url.print_file(path.file))
 
 
+  /* href */
+
+  def relative_href(loc: Path, base: Option[Path] = None, reverse: Boolean = false): String = {
+    base match {
+      case None =>
+        val path = loc.expand
+        if (path.is_absolute) Exn.error("Relative href expected: " + path)
+        else if (path.is_current) "" else path.implode
+      case Some(dir) =>
+        val path1 = dir.absolute_file.toPath
+        val path2 = loc.absolute_file.toPath
+        try {
+          val java_path = if (reverse) path2.relativize(path1) else path1.relativize(path2)
+          val path = File.path(java_path.toFile)
+          if (path.is_current) "" else path.implode
+        }
+        catch {
+          case _: IllegalArgumentException =>
+            Exn.error("Failed to relativize href " + path2 + " with wrt. base " + path1)
+        }
+    }
+  }
+
+
   /* output text with control symbols */
 
   private val control: Map[Symbol.Symbol, Operator] =
@@ -416,14 +440,6 @@
 
   /* document directory context (fonts + css) */
 
-  def relative_prefix(dir: Path, base: Option[Path]): String =
-    base match {
-      case None => ""
-      case Some(base_dir) =>
-        val path = File.path(dir.absolute.java_path.relativize(base_dir.absolute.java_path).toFile)
-        if (path.is_current) "" else path.implode + "/"
-    }
-
   def isabelle_css: Path = Path.explode("~~/etc/isabelle.css")
 
   def write_document(dir: Path, name: String, head: XML.Body, body: XML.Body,
@@ -433,8 +449,8 @@
     structural: Boolean = true
   ): Unit = {
     Isabelle_System.make_directory(dir)
-    val prefix = relative_prefix(dir, base)
-    File.write(dir + isabelle_css.base, fonts_css_dir(prefix) + "\n\n" + File.read(isabelle_css))
+    val fonts = fonts_css_dir(relative_href(dir, base = base, reverse = true))
+    File.write(dir + isabelle_css.base, fonts + "\n\n" + File.read(isabelle_css))
     File.write(dir + Path.basic(name),
       output_document(head, body, css = css, hidden = hidden, structural = structural))
   }
--- a/src/Pure/Thy/presentation.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Thy/presentation.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -17,21 +17,47 @@
 
   /* HTML context */
 
-  sealed case class HTML_Document(title: String, content: String)
+  def html_context(
+    sessions_structure: Sessions.Structure,
+    elements: Elements,
+    root_dir: Path = Path.current,
+    document_info: Document_Info = Document_Info.empty
+  ): HTML_Context = new HTML_Context(sessions_structure, elements, root_dir, document_info)
 
-  abstract class HTML_Context {
+  class HTML_Context private[Presentation](
+    sessions_structure: Sessions.Structure,
+    val elements: Elements,
+    val root_dir: Path,
+    val document_info: Document_Info
+  ) {
     /* directory structure and resources */
 
-    def nodes: Nodes
-    def root_dir: Path
-    def theory_session(name: Document.Node.Name): Sessions.Info
+    def theory_by_name(session: String, theory: String): Option[Document_Info.Theory] =
+      document_info.theory_by_name(session, theory)
+
+    def theory_by_file(session: String, file: String): Option[Document_Info.Theory] =
+      document_info.theory_by_file(session, file)
+
+    def session_dir(session: String): Path =
+      root_dir + Path.explode(sessions_structure(session).chapter_session)
 
-    def session_dir(info: Sessions.Info): Path =
-      root_dir + Path.explode(info.chapter_session)
-    def theory_dir(name: Document.Node.Name): Path =
-      session_dir(theory_session(name))
-    def files_path(name: Document.Node.Name, path: Path): Path =
-      theory_dir(name) + Path.explode("files") + path.squash.html
+    def theory_html(theory: Document_Info.Theory): Path =
+    {
+      def check(name: String): Option[Path] = {
+        val path = Path.explode(name).html
+        if (Path.eq_case_insensitive(path, Path.index_html)) None
+        else Some(path)
+      }
+      check(theory.print_short) orElse check(theory.name) getOrElse
+        error("Illegal global theory name " + quote(theory.name) +
+          " (conflict with " + Path.index_html + ")")
+    }
+
+    def file_html(file: String): Path =
+      Path.explode(file).squash.html
+
+    def smart_html(theory: Document_Info.Theory, file: String): Path =
+      if (File.is_thy(file)) theory_html(theory) else file_html(file)
 
 
     /* HTML content */
@@ -63,6 +89,8 @@
     }
   }
 
+  sealed case class HTML_Document(title: String, content: String)
+
 
   /* presentation elements */
 
@@ -84,174 +112,90 @@
       language = Markup.Elements(Markup.Language.DOCUMENT))
 
 
-  /* per-session node info */
-
-  sealed case class File_Info(theory: String, is_theory: Boolean = false)
+  /* formal entities */
 
-  object Node_Info {
-    val empty: Node_Info = new Node_Info(Map.empty, Map.empty, Nil)
-    def make(theory: Export_Theory.Theory): Node_Info = {
-      val by_range = theory.entity_iterator.toList.groupBy(_.range)
-      val by_kind_name =
-        theory.entity_iterator.map(entity => ((entity.kind, entity.name), entity)).toMap
-      val others = theory.others.keySet.toList
-      new Node_Info(by_range, by_kind_name, others)
-    }
-  }
-
-  class Node_Info private(
-    by_range: Map[Symbol.Range, List[Export_Theory.Entity0]],
-    by_kind_name: Map[(String, String), Export_Theory.Entity0],
-    val others: List[String]) {
-    def for_range(range: Symbol.Range): List[Export_Theory.Entity0] =
-      by_range.getOrElse(range, Nil)
-    def get_kind_name(kind: String, name: String): Option[String] =
-      by_kind_name.get((kind, name)).map(_.kname)
+  object Theory_Ref {
+    def unapply(props: Properties.T): Option[String] =
+      (props, props) match {
+        case (Markup.Kind(Markup.THEORY), Markup.Name(theory)) => Some(theory)
+        case _ => None
+      }
   }
 
-  object Nodes {
-    val empty: Nodes = new Nodes(Map.empty, Map.empty)
-
-    def read(
-      database_context: Export.Database_Context,
-      deps: Sessions.Deps,
-      presentation_sessions: List[String]
-    ): Nodes = {
-
-      def open_session(session: String): Export.Session_Context =
-        database_context.open_session(deps.base_info(session))
-
-      type Batch = (String, List[String])
-      val batches =
-        presentation_sessions.foldLeft((Set.empty[String], List.empty[Batch]))(
-          { case ((seen, batches), session) =>
-              val thys = deps(session).loaded_theories.keys.filterNot(seen)
-              (seen ++ thys, (session, thys) :: batches)
-          })._2
-
-      val theory_node_info =
-        Par_List.map[Batch, List[(String, Node_Info)]](
-          { case (session, thys) =>
-              using(open_session(session)) { session_context =>
-                for (thy_name <- thys) yield {
-                  val theory_context = session_context.theory(thy_name)
-                  val theory = Export_Theory.read_theory(theory_context, permissive = true)
-                  thy_name -> Node_Info.make(theory)
-                }
-              }
-          }, batches).flatten.toMap
-
-      val files_info =
-        deps.sessions_structure.build_requirements(presentation_sessions).flatMap(session =>
-          using(open_session(session)) { session_context =>
-            session_context.theory_names().flatMap { theory =>
-              session_context.theory(theory).files() match {
-                case None => Nil
-                case Some((thy, blobs)) =>
-                  val thy_file_info = File_Info(theory, is_theory = true)
-                  (thy -> thy_file_info) :: blobs.map(_ -> File_Info(theory))
-              }
-            }
-          }).toMap
-
-      new Nodes(theory_node_info, files_info)
-    }
+  object Entity_Ref {
+    def unapply(props: Properties.T): Option[(String, String, String)] =
+      (props, props, props, props) match {
+        case (Markup.Entity.Ref.Prop(_), Position.Def_File(file), Markup.Kind(kind), Markup.Name(name))
+        if Path.is_wellformed(file) => Some((file, kind, name))
+        case _ => None
+      }
   }
 
-  class Nodes private(
-    theory_node_info: Map[String, Node_Info],
-    val files_info: Map[String, File_Info]
-  ) {
-    def apply(name: String): Node_Info = theory_node_info.getOrElse(name, Node_Info.empty)
-    def get(name: String): Option[Node_Info] = theory_node_info.get(name)
-  }
-
-
-  /* formal entities */
-
-  type Entity = Export_Theory.Entity[Export_Theory.No_Content]
-
-  object Entity_Context {
-    object Theory_Ref {
-      def unapply(props: Properties.T): Option[Document.Node.Name] =
-        (props, props, props) match {
-          case (Markup.Kind(Markup.THEORY), Markup.Name(theory), Position.Def_File(thy_file)) =>
-            Some(Resources.file_node(Path.explode(thy_file), theory = theory))
-          case _ => None
-        }
-    }
-
-    object Entity_Ref {
-      def unapply(props: Properties.T): Option[(Path, Option[String], String, String)] =
-        (props, props, props, props) match {
-          case (Markup.Entity.Ref.Prop(_), Position.Def_File(def_file),
-              Markup.Kind(kind), Markup.Name(name)) =>
-            val def_theory = Position.Def_Theory.unapply(props)
-            Some((Path.explode(def_file), def_theory, kind, name))
-          case _ => None
-        }
-    }
-
-    val empty: Entity_Context = new Entity_Context
+  object Node_Context {
+    val empty: Node_Context = new Node_Context
 
     def make(
-        session: String,
-        deps: Sessions.Deps,
-        node: Document.Node.Name,
-        html_context: HTML_Context): Entity_Context =
-      new Entity_Context {
+      html_context: HTML_Context,
+      session_name: String,
+      theory_name: String,
+      file_name: String,
+      node_dir: Path,
+    ): Node_Context =
+      new Node_Context {
+        private val session_dir = html_context.session_dir(session_name)
+
         private val seen_ranges: mutable.Set[Symbol.Range] = mutable.Set.empty
 
-        override def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] = {
+        override def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] =
           body match {
             case List(XML.Elem(Markup("span", List("id" -> _)), _)) => None
             case _ =>
-              Some {
-                val entities = html_context.nodes(node.theory).for_range(range)
+              for (theory <- html_context.theory_by_name(session_name, theory_name))
+              yield {
                 val body1 =
                   if (seen_ranges.contains(range)) {
                     HTML.entity_def(HTML.span(HTML.id(offset_id(range)), body))
                   }
                   else HTML.span(body)
-                entities.map(_.kname).foldLeft(body1) {
-                  case (elem, id) => HTML.entity_def(HTML.span(HTML.id(id), List(elem)))
+                theory.get_defs(file_name, range).foldLeft(body1) {
+                  case (elem, entity) =>
+                    HTML.entity_def(HTML.span(HTML.id(entity.kname), List(elem)))
                 }
               }
           }
-        }
 
         private def offset_id(range: Text.Range): String =
           "offset_" + range.start + ".." + range.stop
 
-        private def physical_ref(thy_name: String, props: Properties.T): Option[String] = {
-          for {
-            range <- Position.Def_Range.unapply(props)
-            if thy_name == node.theory
-          } yield {
-            seen_ranges += range
-            offset_id(range)
-          }
-        }
-
-        private def logical_ref(thy_name: String, kind: String, name: String): Option[String] =
-          html_context.nodes.get(thy_name).flatMap(_.get_kind_name(kind, name))
-
         override def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = {
           props match {
-            case Theory_Ref(node_name) =>
-              node_relative(deps, session, node_name).map(html_dir =>
-                HTML.link(html_dir + html_name(node_name), body))
-            case Entity_Ref(file_path, def_theory, kind, name) if file_path.get_ext == "thy" =>
+            case Theory_Ref(thy_name) =>
+              for (theory <- html_context.theory_by_name(session_name, thy_name))
+              yield {
+                val html_path = session_dir + html_context.theory_html(theory)
+                val html_link = HTML.relative_href(html_path, base = Some(node_dir))
+                HTML.link(html_link, body)
+              }
+            case Entity_Ref(def_file, kind, name) =>
+              def logical_ref(theory: Document_Info.Theory): Option[String] =
+                theory.get_def(def_file, kind, name).map(_.kname)
+
+              def physical_ref(theory: Document_Info.Theory): Option[String] =
+                props match {
+                  case Position.Def_Range(range) if theory.name == theory_name =>
+                    seen_ranges += range
+                    Some(offset_id(range))
+                  case _ => None
+                }
+
               for {
-                thy_name <-
-                  def_theory orElse (if (File.eq(node.path, file_path)) Some(node.theory) else None)
-                node_name = Resources.file_node(file_path, theory = thy_name)
-                html_dir <- node_relative(deps, session, node_name)
-                html_file = node_file(node_name)
-                html_ref <-
-                  logical_ref(thy_name, kind, name) orElse physical_ref(thy_name, props)
-              } yield {
-                HTML.entity_ref(HTML.link(html_dir + html_file + "#" + html_ref, body))
+                theory <- html_context.theory_by_file(session_name, def_file)
+                html_ref <- logical_ref(theory) orElse physical_ref(theory)
+              }
+              yield {
+                val html_path = session_dir + html_context.smart_html(theory, def_file)
+                val html_link = HTML.relative_href(html_path, base = Some(node_dir))
+                HTML.entity_ref(HTML.link(html_link + "#" + html_ref, body))
               }
             case _ => None
           }
@@ -259,92 +203,85 @@
       }
   }
 
-  class Entity_Context {
+  class Node_Context {
     def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] = None
     def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = None
-  }
 
-
-  /* HTML output */
-
-  private val div_elements =
-    Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.`enum`.name,
-      HTML.descr.name)
+    val div_elements: Set[String] =
+      Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.`enum`.name,
+        HTML.descr.name)
 
-  def make_html(
-    entity_context: Entity_Context,
-    elements: Elements,
-    xml: XML.Body
-  ): XML.Body = {
-    def html_div(html: XML.Body): Boolean =
-      html exists {
-        case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body)
-        case XML.Text(_) => false
-      }
+    def make_html(elements: Elements, xml: XML.Body): XML.Body = {
+      def html_div(html: XML.Body): Boolean =
+        html exists {
+          case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body)
+          case XML.Text(_) => false
+        }
+
+      def html_class(c: String, html: XML.Body): XML.Body =
+        if (c == "") html
+        else if (html_div(html)) List(HTML.div(c, html))
+        else List(HTML.span(c, html))
 
-    def html_class(c: String, html: XML.Body): XML.Body =
-      if (c == "") html
-      else if (html_div(html)) List(HTML.div(c, html))
-      else List(HTML.span(c, html))
-
-    def html_body(xml_body: XML.Body, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
-      xml_body.foldRight((List.empty[XML.Tree], end_offset)) { case (tree, (res, end_offset1)) =>
-        val (res1, offset) = html_body_single(tree, end_offset1)
-        (res1 ++ res, offset)
-      }
+      def html_body(xml_body: XML.Body, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
+        xml_body.foldRight((List.empty[XML.Tree], end_offset)) { case (tree, (res, end_offset1)) =>
+          val (res1, offset) = html_body_single(tree, end_offset1)
+          (res1 ++ res, offset)
+        }
 
-    @tailrec
-    def html_body_single(xml_tree: XML.Tree, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
-      xml_tree match {
-        case XML.Wrapped_Elem(markup, _, body) => html_body_single(XML.Elem(markup, body), end_offset)
-        case XML.Elem(Markup(Markup.ENTITY, props @ Markup.Kind(kind)), body) =>
-          val (body1, offset) = html_body(body, end_offset)
-          if (elements.entity(kind)) {
-            entity_context.make_ref(props, body1) match {
-              case Some(link) => (List(link), offset)
-              case None => (body1, offset)
+      @tailrec
+      def html_body_single(xml_tree: XML.Tree, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
+        xml_tree match {
+          case XML.Wrapped_Elem(markup, _, body) => html_body_single(XML.Elem(markup, body), end_offset)
+          case XML.Elem(Markup(Markup.ENTITY, props @ Markup.Kind(kind)), body) =>
+            val (body1, offset) = html_body(body, end_offset)
+            if (elements.entity(kind)) {
+              make_ref(props, body1) match {
+                case Some(link) => (List(link), offset)
+                case None => (body1, offset)
+              }
             }
-          }
-          else (body1, offset)
-        case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(name)), body) =>
-          val (body1, offset) = html_body(body, end_offset)
-          (html_class(if (elements.language(name)) name else "", body1), offset)
-        case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) =>
-          val (body1, offset) = html_body(body, end_offset)
-          (List(HTML.par(body1)), offset)
-        case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) =>
-          val (body1, offset) = html_body(body, end_offset)
-          (List(HTML.item(body1)), offset)
-        case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), text) =>
-          (Nil, end_offset - XML.symbol_length(text))
-        case XML.Elem(Markup.Markdown_List(kind), body) =>
-          val (body1, offset) = html_body(body, end_offset)
-          if (kind == Markup.ENUMERATE) (List(HTML.`enum`(body1)), offset)
-          else (List(HTML.list(body1)), offset)
-        case XML.Elem(markup, body) =>
-          val name = markup.name
-          val (body1, offset) = html_body(body, end_offset)
-          val html =
-            markup.properties match {
-              case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD =>
-                html_class(kind, body1)
-              case _ =>
-                body1
+            else (body1, offset)
+          case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(name)), body) =>
+            val (body1, offset) = html_body(body, end_offset)
+            (html_class(if (elements.language(name)) name else "", body1), offset)
+          case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) =>
+            val (body1, offset) = html_body(body, end_offset)
+            (List(HTML.par(body1)), offset)
+          case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) =>
+            val (body1, offset) = html_body(body, end_offset)
+            (List(HTML.item(body1)), offset)
+          case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), text) =>
+            (Nil, end_offset - XML.symbol_length(text))
+          case XML.Elem(Markup.Markdown_List(kind), body) =>
+            val (body1, offset) = html_body(body, end_offset)
+            if (kind == Markup.ENUMERATE) (List(HTML.`enum`(body1)), offset)
+            else (List(HTML.list(body1)), offset)
+          case XML.Elem(markup, body) =>
+            val name = markup.name
+            val (body1, offset) = html_body(body, end_offset)
+            val html =
+              markup.properties match {
+                case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD =>
+                  html_class(kind, body1)
+                case _ =>
+                  body1
+              }
+            Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match {
+              case Some(c) => (html_class(c.toString, html), offset)
+              case None => (html_class(name, html), offset)
             }
-          Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match {
-            case Some(c) => (html_class(c.toString, html), offset)
-            case None => (html_class(name, html), offset)
-          }
-        case XML.Text(text) =>
-          val offset = end_offset - Symbol.length(text)
-          val body = HTML.text(Symbol.decode(text))
-          entity_context.make_def(Text.Range(offset, end_offset), body) match {
-            case Some(body1) => (List(body1), offset)
-            case None => (body, offset)
-          }
-      }
+          case XML.Text(text) =>
+            val offset = end_offset - Symbol.length(text)
+            val body = HTML.text(Symbol.decode(text))
+            make_def(Text.Range(offset, end_offset), body) match {
+              case Some(body1) => (List(body1), offset)
+              case None => (body, offset)
+            }
+        }
 
-    html_body(xml, XML.symbol_length(xml) + 1)._1
+      html_body(xml, XML.symbol_length(xml) + 1)._1
+    }
   }
 
 
@@ -353,7 +290,6 @@
   def html_document(
     snapshot: Document.Snapshot,
     html_context: HTML_Context,
-    elements: Elements,
     plain_text: Boolean = false,
     fonts_css: String = HTML.fonts_css()
   ): HTML_Document = {
@@ -370,8 +306,8 @@
         val title =
           if (name.is_theory) "Theory " + quote(name.theory_base_name)
           else "File " + Symbol.cartouche_decoded(name.path.file_name)
-        val xml = snapshot.xml_markup(elements = elements.html)
-        val body = make_html(Entity_Context.empty, elements, xml)
+        val xml = snapshot.xml_markup(elements = html_context.elements.html)
+        val body = Node_Context.empty.make_html(html_context.elements, xml)
         html_context.html_document(title, body, fonts_css)
       }
     }
@@ -401,8 +337,6 @@
     def enabled: Boolean
     def enabled(info: Sessions.Info): Boolean = enabled || info.browser_info
     def dir(store: Sessions.Store): Path = store.presentation_dir
-    def dir(store: Sessions.Store, info: Sessions.Info): Path =
-      dir(store) + Path.explode(info.chapter_session)
   }
 
 
@@ -493,176 +427,116 @@
 
   /* present session */
 
-  val session_graph_path = Path.explode("session_graph.pdf")
-  val readme_path = Path.explode("README.html")
-
-  def html_name(name: Document.Node.Name): String = Path.explode(name.theory_base_name).html.implode
-  def files_path(src_path: Path): String = (Path.explode("files") + src_path.squash.html).implode
-
-  private def node_file(name: Document.Node.Name): String =
-    if (name.node.endsWith(".thy")) html_name(name) else files_path(name.path)
-
-  private def session_relative(
-    deps: Sessions.Deps,
-    session0: String,
-    session1: String
-  ): Option[String] = {
-    for {
-      info0 <- deps.sessions_structure.get(session0)
-      info1 <- deps.sessions_structure.get(session1)
-    } yield info0.relative_path(info1)
-  }
-
-  def node_relative(
-    deps: Sessions.Deps,
-    session0: String,
-    node_name: Document.Node.Name
-  ): Option[String] = {
-    val session1 = deps(session0).theory_qualifier(node_name)
-    session_relative(deps, session0, session1)
-  }
+  val session_graph_path: Path = Path.explode("session_graph.pdf")
 
   def session_html(
+    html_context: HTML_Context,
     session_context: Export.Session_Context,
-    deps: Sessions.Deps,
     progress: Progress = new Progress,
     verbose: Boolean = false,
-    html_context: HTML_Context,
-    session_elements: Elements
   ): Unit = {
-    val session = session_context.session_name
-    val info = session_context.sessions_structure(session)
-    val options = info.options
-    val base = session_context.session_base
+    val session_name = session_context.session_name
+    val session_info = session_context.sessions_structure(session_name)
 
-    val session_dir = Isabelle_System.make_directory(html_context.session_dir(info))
+    val session_dir =
+      Isabelle_System.make_directory(html_context.session_dir(session_name)).expand
+
+    progress.echo("Presenting " + session_name + " in " + session_dir + " ...")
+
+    val session = html_context.document_info.the_session(session_name)
 
     Bytes.write(session_dir + session_graph_path,
-      graphview.Graph_File.make_pdf(options, base.session_graph_display))
+      graphview.Graph_File.make_pdf(session_info.options,
+        session_context.session_base.session_graph_display))
 
-    val documents =
+    val document_variants =
       for {
-        doc <- info.document_variants
+        doc <- session_info.document_variants
         db <- session_context.session_db()
-        document <- Document_Build.read_document(db, session, doc.name)
-      } yield {
-        val doc_path = (session_dir + doc.path.pdf).expand
-        if (verbose) progress.echo("Presenting document " + session + "/" + doc.name)
-        if (options.bool("document_echo")) progress.echo("Document at " + doc_path)
+        document <- Document_Build.read_document(db, session_name, doc.name)
+      }
+      yield {
+        val doc_path = session_dir + doc.path.pdf
+        if (Path.eq_case_insensitive(doc.path.pdf, session_graph_path)) {
+          error("Illegal document variant " + quote(doc.name) +
+            " (conflict with " + session_graph_path + ")")
+        }
+        if (verbose) progress.echo("Presenting document " + session_name + "/" + doc.name)
+        if (session_info.document_echo) progress.echo("Document at " + doc_path)
         Bytes.write(doc_path, document.pdf)
         doc
       }
 
-    val view_links = {
-      val deps_link =
-        HTML.link(session_graph_path, HTML.text("theory dependencies"))
-
-      val readme_links =
-        if ((info.dir + readme_path).is_file) {
-          Isabelle_System.copy_file(info.dir + readme_path, session_dir + readme_path)
-          List(HTML.link(readme_path, HTML.text("README")))
-        }
-        else Nil
-
-      val document_links =
-        documents.map(doc => HTML.link(doc.path.pdf, HTML.text(doc.name)))
-
+    val document_links = {
+      val link1 = HTML.link(session_graph_path, HTML.text("theory dependencies"))
+      val links2 = document_variants.map(doc => HTML.link(doc.path.pdf, HTML.text(doc.name)))
       Library.separate(HTML.break ::: HTML.nl,
-        (deps_link :: readme_links ::: document_links).
-          map(link => HTML.text("View ") ::: List(link))).flatten
+        (link1 :: links2).map(link => HTML.text("View ") ::: List(link))).flatten
     }
 
-    def entity_context(name: Document.Node.Name): Entity_Context =
-      Entity_Context.make(session, deps, name, html_context)
-
-
-    sealed case class Seen_File(
-      src_path: Path,
-      thy_name: Document.Node.Name,
-      thy_session: String
-    ) {
-      val files_path: Path = html_context.files_path(thy_name, src_path)
-
-      def check(src_path1: Path, thy_name1: Document.Node.Name, thy_session1: String): Boolean = {
-        val files_path1 = html_context.files_path(thy_name1, src_path1)
-        (src_path == src_path1 || files_path == files_path1) && thy_session == thy_session1
-      }
-    }
-    var seen_files = List.empty[Seen_File]
-
-    def present_theory(name: Document.Node.Name): Option[XML.Body] = {
+    def present_theory(theory_name: String): XML.Body = {
       progress.expose_interrupt()
 
-      Build_Job.read_theory(session_context.theory(name.theory)).flatMap { command =>
-        if (verbose) progress.echo("Presenting theory " + name)
-        val snapshot = Document.State.init.snippet(command)
+      def err(): Nothing =
+        error("Missing document information for theory: " + quote(theory_name))
 
-        val thy_elements =
-          session_elements.copy(entity =
-            html_context.nodes(name.theory).others.foldLeft(session_elements.entity)(_ + _))
+      val command = Build_Job.read_theory(session_context.theory(theory_name)) getOrElse err()
+      val theory = html_context.theory_by_name(session_name, theory_name) getOrElse err()
+
+      if (verbose) progress.echo("Presenting theory " + quote(theory_name))
+      val snapshot = Document.State.init.snippet(command)
 
-        val files_html =
-          for {
-            (src_path, xml) <- snapshot.xml_markup_blobs(elements = thy_elements.html)
-            if xml.nonEmpty
-          }
-          yield {
-            progress.expose_interrupt()
-            if (verbose) progress.echo("Presenting file " + src_path)
+      val thy_elements = theory.elements(html_context.elements)
+
+      def node_context(file_name: String, node_dir: Path): Node_Context =
+        Node_Context.make(html_context, session_name, theory_name, file_name, node_dir)
 
-            (src_path, html_context.source(
-              make_html(Entity_Context.empty, thy_elements, xml)))
-          }
-
-        val thy_html =
-          html_context.source(
-            make_html(entity_context(name), thy_elements,
-              snapshot.xml_markup(elements = thy_elements.html)))
+      val thy_html =
+        html_context.source(
+          node_context(theory.thy_file, session_dir).
+            make_html(thy_elements, snapshot.xml_markup(elements = thy_elements.html)))
 
-        val thy_session = html_context.theory_session(name).name
-        val thy_dir = Isabelle_System.make_directory(html_context.theory_dir(name))
-        val files =
-          for { (src_path, file_html) <- files_html }
-            yield {
-              seen_files.find(_.check(src_path, name, thy_session)) match {
-                case None => seen_files ::= Seen_File(src_path, name, thy_session)
-                case Some(seen_file) =>
-                  error("Incoherent use of file name " + src_path + " as " + files_path(src_path) +
-                    " in theory " + seen_file.thy_name + " vs. " + name)
-              }
-
-              val file_path = html_context.files_path(name, src_path)
-              val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short)
-              HTML.write_document(file_path.dir, file_path.file_name,
-                List(HTML.title(file_title)), List(html_context.head(file_title), file_html),
-                base = Some(html_context.root_dir))
+      val files =
+        for {
+          (blob, xml) <- snapshot.xml_markup_blobs(elements = thy_elements.html)
+          if xml.nonEmpty
+        }
+        yield {
+          progress.expose_interrupt()
+          val file_name = blob.name.node
+          if (verbose) progress.echo("Presenting file " + quote(file_name))
 
-              List(HTML.link(files_path(src_path), HTML.text(file_title)))
-            }
-
-        val thy_title = "Theory " + name.theory_base_name
-
-        HTML.write_document(thy_dir, html_name(name),
-          List(HTML.title(thy_title)), List(html_context.head(thy_title), thy_html),
-          base = Some(html_context.root_dir))
+          val file_html = session_dir + html_context.file_html(file_name)
+          val file_dir = file_html.dir
+          val html_link = HTML.relative_href(file_html, base = Some(session_dir))
+          val html =
+            html_context.source(
+              node_context(file_name, file_dir).make_html(thy_elements, xml))
 
-        if (thy_session == session) {
-          Some(
-            List(HTML.link(html_name(name),
-              HTML.text(name.theory_base_name) :::
-                (if (files.isEmpty) Nil else List(HTML.itemize(files))))))
+          val file_title = "File " + Symbol.cartouche_decoded(blob.src_path.implode_short)
+          HTML.write_document(file_dir, file_html.file_name,
+            List(HTML.title(file_title)), List(html_context.head(file_title), html),
+            base = Some(html_context.root_dir))
+          List(HTML.link(html_link, HTML.text(file_title)))
         }
-        else None
-      }
+
+      val thy_title = "Theory " + theory.print_short
+      HTML.write_document(session_dir, html_context.theory_html(theory).implode,
+        List(HTML.title(thy_title)), List(html_context.head(thy_title), thy_html),
+        base = Some(html_context.root_dir))
+
+      List(HTML.link(html_context.theory_html(theory),
+        HTML.text(theory.print_short) :::
+        (if (files.isEmpty) Nil else List(HTML.itemize(files)))))
     }
 
-    val theories = base.proper_session_theories.flatMap(present_theory)
+    val theories = session.used_theories.map(present_theory)
 
-    val title = "Session " + session
-    HTML.write_document(session_dir, "index.html",
-      List(HTML.title(title + Isabelle_System.isabelle_heading())),
-      html_context.head(title, List(HTML.par(view_links))) ::
-        html_context.contents("Theories", theories),
-      base = Some(html_context.root_dir))
+    val title = "Session " + session_name
+      HTML.write_document(session_dir, "index.html",
+        List(HTML.title(title + Isabelle_System.isabelle_heading())),
+        html_context.head(title, List(HTML.par(document_links))) ::
+          html_context.contents("Theories", theories),
+        base = Some(html_context.root_dir))
   }
 }
--- a/src/Pure/Thy/sessions.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Thy/sessions.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -30,11 +30,8 @@
 
   def is_pure(name: String): Boolean = name == Thy_Header.PURE
 
-
-  def exclude_session(name: String): Boolean = name == "" || name == DRAFT
-
-  def exclude_theory(name: String): Boolean =
-    name == root_name || name == "README" || name == "index" || name == "bib"
+  def illegal_session(name: String): Boolean = name == "" || name == DRAFT
+  def illegal_theory(name: String): Boolean = name == root_name || name == "bib"
 
 
   /* ROOTS file format */
@@ -61,8 +58,6 @@
   sealed case class Base(
     session_name: String = "",
     session_pos: Position.T = Position.none,
-    session_directories: Map[JFile, String] = Map.empty,
-    global_theories: Map[String, String] = Map.empty,
     proper_session_theories: List[Document.Node.Name] = Nil,
     document_theories: List[Document.Node.Name] = Nil,
     loaded_theories: Graph[String, Outer_Syntax] = Graph.string,  // cumulative imports
@@ -76,15 +71,13 @@
     session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph,
     errors: List[String] = Nil
   ) {
+    def session_entry: (String, Base) = session_name -> this
+
     override def toString: String =
       "Sessions.Base(session_name = " + quote(session_name) +
         ", loaded_theories = " + loaded_theories.size +
         ", used_theories = " + used_theories.length + ")"
 
-    def theory_qualifier(name: String): String =
-      global_theories.getOrElse(name, Long_Name.qualifier(name))
-    def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory)
-
     def loaded_theory(name: String): Boolean = loaded_theories.defined(name)
     def loaded_theory(name: Document.Node.Name): Boolean = loaded_theory(name.theory)
 
@@ -100,6 +93,8 @@
       nodes(name).syntax orElse loaded_theory_syntax(name) getOrElse overall_syntax
   }
 
+  val bootstrap_base: Base = Base(overall_syntax = Thy_Header.bootstrap_syntax)
+
   sealed case class Deps(sessions_structure: Structure, session_bases: Map[String, Base]) {
     override def toString: String = "Sessions.Deps(" + sessions_structure + ")"
 
@@ -156,13 +151,9 @@
       }
     }
 
-    val bootstrap_bases = {
-      val base = sessions_structure.bootstrap
-      Map(base.session_name -> base)
-    }
-
     val session_bases =
-      sessions_structure.imports_topological_order.foldLeft(bootstrap_bases) {
+      sessions_structure.imports_topological_order.foldLeft(
+          Map(Sessions.bootstrap_base.session_entry)) {
         case (session_bases, session_name) =>
           progress.expose_interrupt()
 
@@ -183,7 +174,8 @@
             val overall_syntax = dependencies.overall_syntax
 
             val proper_session_theories =
-              dependencies.theories.filter(name => deps_base.theory_qualifier(name) == session_name)
+              dependencies.theories.filter(name =>
+                sessions_structure.theory_qualifier(name) == session_name)
 
             val theory_files = dependencies.theories.map(_.path)
 
@@ -213,7 +205,7 @@
                 Graph_Display.Node("[" + name + "]", "session." + name)
 
               def node(name: Document.Node.Name): Graph_Display.Node = {
-                val qualifier = deps_base.theory_qualifier(name)
+                val qualifier = sessions_structure.theory_qualifier(name)
                 if (qualifier == info.name)
                   Graph_Display.Node(name.theory_base_name, "theory." + name.theory)
                 else session_node(qualifier)
@@ -221,7 +213,7 @@
 
               val required_sessions =
                 dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory))
-                  .map(theory => deps_base.theory_qualifier(theory))
+                  .map(theory => sessions_structure.theory_qualifier(theory))
                   .filter(name => name != info.name && sessions_structure.defined(name))
 
               val required_subgraph =
@@ -258,7 +250,7 @@
                 sessions_structure.imports_requirements(List(session_name)).toSet
               for {
                 name <- dependencies.theories
-                qualifier = deps_base.theory_qualifier(name)
+                qualifier = sessions_structure.theory_qualifier(name)
                 if !known_sessions(qualifier)
               } yield "Bad import of theory " + quote(name.toString) +
                 ": need to include sessions " + quote(qualifier) + " in ROOT"
@@ -280,7 +272,7 @@
                   known_theories.get(thy).map(_.name) match {
                     case None => err("Unknown document theory")
                     case Some(name) =>
-                      val qualifier = deps_base.theory_qualifier(name)
+                      val qualifier = sessions_structure.theory_qualifier(name)
                       if (proper_session_theories.contains(name)) {
                         err("Redundant document theory from this session:")
                       }
@@ -336,8 +328,6 @@
               Base(
                 session_name = info.name,
                 session_pos = info.pos,
-                session_directories = sessions_structure.session_directories,
-                global_theories = sessions_structure.global_theories,
                 proper_session_theories = proper_session_theories,
                 document_theories = document_theories,
                 loaded_theories = dependencies.loaded_theories,
@@ -353,7 +343,7 @@
                   document_errors ::: dir_errors ::: sources_errors ::: path_errors :::
                   bibtex_errors)
 
-            session_bases + (info.name -> base)
+            session_bases + base.session_entry
           }
           catch {
             case ERROR(msg) =>
@@ -374,8 +364,11 @@
     errors: List[String] = Nil,
     infos: List[Info] = Nil
   ) {
-    def check: Base_Info = if (errors.isEmpty) this else error(cat_lines(errors))
-    def session: String = base.session_name
+    def session_name: String = base.session_name
+
+    def check_errors: Base_Info =
+      if (errors.isEmpty) this
+      else error(cat_lines(errors))
   }
 
   def base_info0(session: String): Base_Info =
@@ -413,7 +406,7 @@
         val required_theories =
           for {
             thy <- base.loaded_theories.keys
-            if !ancestor_loaded(thy) && base.theory_qualifier(thy) != session
+            if !ancestor_loaded(thy) && selected_sessions.theory_qualifier(thy) != session
           }
           yield thy
 
@@ -485,11 +478,6 @@
   ) {
     def chapter_session: String = chapter + "/" + name
 
-    def relative_path(info1: Info): String =
-      if (name == info1.name) ""
-      else if (chapter == info1.chapter) "../" + info1.name + "/"
-      else "../../" + info1.chapter_session + "/"
-
     def deps: List[String] = parent.toList ::: imports
 
     def deps_base(session_bases: String => Base): Base = {
@@ -531,6 +519,8 @@
       variants
     }
 
+    def document_echo: Boolean = options.bool("document_echo")
+
     def documents: List[Document_Build.Document_Variant] = {
       val variants = document_variants
       if (!document_enabled || document_files.isEmpty) Nil else variants
@@ -547,7 +537,7 @@
     lazy val bibtex_entries: List[Text.Info[String]] =
       (for {
         (document_dir, file) <- document_files.iterator
-        if Bibtex.is_bibtex(file.file_name)
+        if File.is_bib(file.file_name)
         info <- Bibtex.entries(File.read(dir + document_dir + file)).iterator
       } yield info).toList
 
@@ -567,7 +557,7 @@
     try {
       val name = entry.name
 
-      if (exclude_session(name)) error("Bad session name")
+      if (illegal_session(name)) error("Illegal session name " + quote(name))
       if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session")
       if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session")
 
@@ -580,8 +570,10 @@
         entry.theories.map({ case (opts, thys) =>
           (session_options ++ opts,
             thys.map({ case ((thy, pos), _) =>
-              if (exclude_theory(thy))
-                error("Bad theory name " + quote(thy) + Position.here(pos))
+              val thy_name = Thy_Header.import_name(thy)
+              if (illegal_theory(thy_name)) {
+                error("Illegal theory name " + quote(thy_name) + Position.here(pos))
+              }
               else (thy, pos) })) })
 
       val global_theories =
@@ -739,11 +731,7 @@
   ) {
     sessions_structure =>
 
-    def bootstrap: Base =
-      Base(
-        session_directories = session_directories,
-        global_theories = global_theories,
-        overall_syntax = Thy_Header.bootstrap_syntax)
+    def bootstrap: Base = Base(overall_syntax = Thy_Header.bootstrap_syntax)
 
     def dest_session_directories: List[(String, String)] =
       for ((file, session) <- session_directories.toList)
@@ -764,6 +752,7 @@
 
     def theory_qualifier(name: String): String =
       global_theories.getOrElse(name, Long_Name.qualifier(name))
+    def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory)
 
     def check_sessions(names: List[String]): Unit = {
       val bad_sessions = SortedSet(names.filterNot(defined): _*).toList
--- a/src/Pure/Thy/thy_header.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Thy/thy_header.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -91,7 +91,7 @@
 
   def import_name(s: String): String =
     s match {
-      case File_Name(name) if !name.endsWith(".thy") => name
+      case File_Name(name) if !File.is_thy(name) => name
       case _ => error("Malformed theory import: " + quote(s))
     }
 
--- a/src/Pure/Thy/thy_info.ML	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Thy/thy_info.ML	Sat Aug 20 21:34:55 2022 +0200
@@ -38,7 +38,8 @@
     segments: Document_Output.segment list};
 
 fun adjust_pos_properties (context: presentation_context) pos =
-  Position.offset_properties_of (#adjust_pos context pos) @ Position.id_properties_of pos;
+  Position.offset_properties_of (#adjust_pos context pos) @
+  filter (fn (a, _) => a = Markup.idN orelse a = Markup.fileN) (Position.get_props pos);
 
 structure Presentation = Theory_Data
 (
--- a/src/Pure/Tools/build.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Tools/build.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -496,27 +496,19 @@
         }
 
         using(Export.open_database_context(store)) { database_context =>
-          val presentation_nodes =
-            Presentation.Nodes.read(database_context, deps, presentation_sessions.map(_.name))
+          val document_info =
+            Document_Info.read(database_context, deps, presentation_sessions.map(_.name))
 
           Par_List.map({ (session: String) =>
             progress.expose_interrupt()
-            progress.echo("Presenting " + session + " ...")
 
             val html_context =
-              new Presentation.HTML_Context {
-                override def nodes: Presentation.Nodes = presentation_nodes
-                override def root_dir: Path = presentation_dir
-                override def theory_session(name: Document.Node.Name): Sessions.Info =
-                  deps.sessions_structure(deps(session).theory_qualifier(name))
-              }
+              Presentation.html_context(deps.sessions_structure, Presentation.elements1,
+                root_dir = presentation_dir, document_info = document_info)
 
             using(database_context.open_session(deps.base_info(session))) { session_context =>
-              Presentation.session_html(session_context, deps,
-                progress = progress,
-                verbose = verbose,
-                html_context = html_context,
-                Presentation.elements1)
+              Presentation.session_html(html_context, session_context,
+                progress = progress, verbose = verbose)
             }
           }, presentation_sessions.map(_.name))
         }
--- a/src/Pure/Tools/build_job.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Tools/build_job.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -26,10 +26,16 @@
 
     for {
       id <- theory_context.document_id()
-      (thy_file, blobs_files) <- theory_context.files()
+      (thy_file, blobs_files) <- theory_context.files(permissive = true)
     }
     yield {
-      val node_name = Resources.file_node(Path.explode(thy_file), theory = theory_context.theory)
+      val master_dir =
+        Thy_Header.split_file_name(thy_file) match {
+          case Some((dir, _)) => dir
+          case None => error("Cannot determine theory master directory: " + quote(thy_file))
+        }
+      val node_name =
+        Document.Node.Name(thy_file, master_dir = master_dir, theory = theory_context.theory)
 
       val results =
         Command.Results.make(
@@ -38,8 +44,8 @@
 
       val blobs =
         blobs_files.map { file =>
+          val name = Document.Node.Name(file)
           val path = Path.explode(file)
-          val name = Resources.file_node(path)
           val src_path = File.relative_path(node_name.master_dir_path, path).getOrElse(path)
           Command.Blob(name, src_path, None)
         }
@@ -364,7 +370,7 @@
             }
 
             export_text(Export.FILES,
-              cat_lines(snapshot.node_files.map(_.symbolic.node)), compress = false)
+              cat_lines(snapshot.node_files.map(_.path.implode_symbolic)), compress = false)
 
             for (((_, xml), i) <- snapshot.xml_markup_blobs().zipWithIndex) {
               export_(Export.MARKUP + (i + 1), xml)
--- a/src/Pure/Tools/server_commands.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Tools/server_commands.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -68,7 +68,7 @@
 
       val base_info =
         Sessions.base_info(options, args.session, progress = progress, dirs = dirs,
-          include_sessions = args.include_sessions).check
+          include_sessions = args.include_sessions).check_errors
 
       val results =
         Build.build(options,
--- a/src/Pure/Tools/update_cartouches.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Tools/update_cartouches.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -97,7 +97,7 @@
         for {
           spec <- specs
           file <- File.find_files(Path.explode(spec).file,
-            file => file.getName.endsWith(".thy") || file.getName == "ROOT")
+            file => File.is_thy(file.getName) || file.getName == "ROOT")
         } update_cartouches(replace_text, File.path(file))
       })
 }
--- a/src/Pure/Tools/update_comments.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Tools/update_comments.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -60,7 +60,7 @@
 
         for {
           spec <- specs
-          file <- File.find_files(Path.explode(spec).file, file => file.getName.endsWith(".thy"))
+          file <- File.find_files(Path.explode(spec).file, file => File.is_thy(file.getName))
         } update_comments(File.path(file))
       })
 }
--- a/src/Pure/Tools/update_header.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Tools/update_header.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -54,7 +54,7 @@
 
         for {
           spec <- specs
-          file <- File.find_files(Path.explode(spec).file, file => file.getName.endsWith(".thy"))
+          file <- File.find_files(Path.explode(spec).file, file => File.is_thy(file.getName))
         } update_header(section, File.path(file))
       })
 }
--- a/src/Pure/Tools/update_then.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Tools/update_then.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -48,7 +48,7 @@
   
         for {
           spec <- specs
-          file <- File.find_files(Path.explode(spec).file, file => file.getName.endsWith(".thy"))
+          file <- File.find_files(Path.explode(spec).file, file => File.is_thy(file.getName))
         } update_then(File.path(file))
       })
 }
--- a/src/Pure/Tools/update_theorems.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Pure/Tools/update_theorems.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -50,7 +50,7 @@
 
       for {
         spec <- specs
-        file <- File.find_files(Path.explode(spec).file, file => file.getName.endsWith(".thy"))
+        file <- File.find_files(Path.explode(spec).file, file => File.is_thy(file.getName))
       } update_theorems(File.path(file))
     })
 }
--- a/src/Tools/Graphview/graph_file.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/Graphview/graph_file.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -27,8 +27,8 @@
     }
 
     val name = file.getName
-    if (name.endsWith(".png")) Graphics_File.write_png(file, paint, w, h)
-    else if (name.endsWith(".pdf")) Graphics_File.write_pdf(file, paint, w, h)
+    if (File.is_png(name)) Graphics_File.write_png(file, paint, w, h)
+    else if (File.is_pdf(name)) Graphics_File.write_pdf(file, paint, w, h)
     else error("Bad type of file: " + quote(name) + " (.png or .pdf expected)")
   }
 
--- a/src/Tools/VSCode/src/build_vscode_extension.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/VSCode/src/build_vscode_extension.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -21,7 +21,7 @@
     progress: Progress = new Progress
   ): Unit = {
     val keywords =
-      Sessions.base_info(options, logic, dirs = dirs).check.base.overall_syntax.keywords
+      Sessions.base_info(options, logic, dirs = dirs).check_errors.base.overall_syntax.keywords
 
     val output_path = build_dir + Path.explode("isabelle-grammar.json")
     progress.echo(output_path.expand.implode)
--- a/src/Tools/VSCode/src/build_vscodium.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/VSCode/src/build_vscodium.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -66,7 +66,7 @@
     def is_linux: Boolean = platform == Platform.Family.linux
 
     def download_name: String = "VSCodium-" + download_template.replace("{VERSION}", version)
-    def download_zip: Boolean = download_name.endsWith(".zip")
+    def download_zip: Boolean = File.is_zip(download_name)
 
     def download(dir: Path, progress: Progress = new Progress): Unit = {
       if (download_zip) Isabelle_System.require_command("unzip", test = "-h")
@@ -222,7 +222,7 @@
         val files =
           File.find_files(dir.file, pred = { file =>
             val name = file.getName
-            name.endsWith(".dll") || name.endsWith(".exe") || name.endsWith(".node")
+            File.is_dll(name) || File.is_exe(name) || File.is_node(name)
           })
         files.foreach(file => File.set_executable(File.path(file), true))
         Isabelle_System.bash("chmod -R o-w " + File.bash_path(dir)).check
--- a/src/Tools/VSCode/src/dynamic_output.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/VSCode/src/dynamic_output.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -36,8 +36,8 @@
             else this
         }
       if (st1.output != output) {
-        val context =
-          new Presentation.Entity_Context {
+        val node_context =
+          new Presentation.Node_Context {
             override def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] =
               for {
                 thy_file <- Position.Def_File.unapply(props)
@@ -47,7 +47,7 @@
               } yield HTML.link(uri.toString + "#" + def_line, body)
           }
         val elements = Presentation.elements2.copy(entity = Markup.Elements.full)
-        val html = Presentation.make_html(context, elements, Pretty.separate(st1.output))
+        val html = node_context.make_html(elements, Pretty.separate(st1.output))
         channel.write(LSP.Dynamic_Output(HTML.source(html).toString))
       }
       st1
--- a/src/Tools/VSCode/src/language_server.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/VSCode/src/language_server.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -263,15 +263,15 @@
           Sessions.base_info(
             options, session_name, dirs = session_dirs,
             include_sessions = include_sessions, session_ancestor = session_ancestor,
-            session_requirements = session_requirements).check
+            session_requirements = session_requirements).check_errors
 
         def build(no_build: Boolean = false): Build.Results =
           Build.build(options,
-            selection = Sessions.Selection.session(base_info.session), build_heap = true,
-            no_build = no_build, dirs = session_dirs, infos = base_info.infos)
+            selection = Sessions.Selection.session(base_info.session_name),
+            build_heap = true, no_build = no_build, dirs = session_dirs, infos = base_info.infos)
 
         if (!session_no_build && !build(no_build = true).ok) {
-          val start_msg = "Build started for Isabelle/" + base_info.session + " ..."
+          val start_msg = "Build started for Isabelle/" + base_info.session_name + " ..."
           val fail_msg = "Session build failed -- prover process remains inactive!"
 
           val progress = channel.progress(verbose = true)
@@ -304,8 +304,8 @@
 
       try {
         Isabelle_Process.start(session, options, base_info.sessions_structure,
-          Sessions.store(options), modes = modes, logic = base_info.session).await_startup()
-        reply_ok("Welcome to Isabelle/" + base_info.session + Isabelle_System.isabelle_heading())
+          Sessions.store(options), modes = modes, logic = base_info.session_name).await_startup()
+        reply_ok("Welcome to Isabelle/" + base_info.session_name + Isabelle_System.isabelle_heading())
       }
       catch { case ERROR(msg) => reply_error(msg) }
     }
--- a/src/Tools/VSCode/src/preview_panel.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/VSCode/src/preview_panel.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -29,14 +29,8 @@
                 if (snapshot.is_outdated) m
                 else {
                   val html_context =
-                    new Presentation.HTML_Context {
-                      override def nodes: Presentation.Nodes = Presentation.Nodes.empty
-                      override def root_dir: Path = Path.current
-                      override def theory_session(name: Document.Node.Name): Sessions.Info =
-                        resources.sessions_structure(resources.session_base.theory_qualifier(name))
-                    }
-                  val document =
-                    Presentation.html_document(snapshot, html_context, Presentation.elements2)
+                    Presentation.html_context(resources.sessions_structure, Presentation.elements2)
+                  val document = Presentation.html_document(snapshot, html_context)
                   channel.write(LSP.Preview_Response(file, column, document.title, document.content))
                   m - file
                 }
--- a/src/Tools/VSCode/src/state_panel.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/VSCode/src/state_panel.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -59,8 +59,8 @@
     new Query_Operation(server.editor, (), "print_state", _ => (),
       (_, _, body) =>
         if (output_active.value && body.nonEmpty){
-          val context =
-            new Presentation.Entity_Context {
+          val node_context =
+            new Presentation.Node_Context {
               override def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] =
                 for {
                   thy_file <- Position.Def_File.unapply(props)
@@ -70,7 +70,7 @@
                 } yield HTML.link(uri.toString + "#" + def_line, body)
             }
           val elements = Presentation.elements2.copy(entity = Markup.Elements.full)
-          val html = Presentation.make_html(context, elements, Pretty.separate(body))
+          val html = node_context.make_html(elements, Pretty.separate(body))
           output(HTML.source(html).toString)
         })
 
--- a/src/Tools/VSCode/src/vscode_rendering.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/VSCode/src/vscode_rendering.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -89,7 +89,7 @@
   def completion(node_pos: Line.Node_Position, caret: Text.Offset): List[LSP.CompletionItem] = {
     val doc = model.content.doc
     val line = node_pos.pos.line
-    val unicode = node_pos.name.endsWith(".thy")
+    val unicode = File.is_thy(node_pos.name)
     doc.offset(Line.Position(line)) match {
       case None => Nil
       case Some(line_start) =>
--- a/src/Tools/VSCode/src/vscode_resources.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/VSCode/src/vscode_resources.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -71,7 +71,11 @@
   val options: Options,
   session_base_info: Sessions.Base_Info,
   log: Logger = No_Logger)
-extends Resources(session_base_info.sessions_structure, session_base_info.check.base, log = log) {
+extends Resources(
+  session_base_info.sessions_structure,
+  session_base_info.check_errors.base,
+  log = log
+) {
   resources =>
 
   private val state = Synchronized(VSCode_Resources.State())
--- a/src/Tools/jEdit/src/document_model.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/jEdit/src/document_model.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -314,16 +314,9 @@
       yield {
         val snapshot = model.await_stable_snapshot()
         val html_context =
-          new Presentation.HTML_Context {
-            override def nodes: Presentation.Nodes = Presentation.Nodes.empty
-            override def root_dir: Path = Path.current
-            override def theory_session(name: Document.Node.Name): Sessions.Info =
-              PIDE.resources.sessions_structure(
-                PIDE.resources.session_base.theory_qualifier(name))
-          }
+          Presentation.html_context(PIDE.resources.sessions_structure, Presentation.elements2)
         val document =
-          Presentation.html_document(
-            snapshot, html_context, Presentation.elements2,
+          Presentation.html_document(snapshot, html_context,
             plain_text = query.startsWith(plain_text_prefix),
             fonts_css = HTML.fonts_css_dir(HTTP.url_path(request.server_name)))
         HTTP.Response.html(document.content)
@@ -427,7 +420,7 @@
     else Some(Document.Blob(content.bytes, content.text, content.chunk, pending_edits.nonEmpty))
 
   def bibtex_entries: List[Text.Info[String]] =
-    if (Bibtex.is_bibtex(node_name.node)) content.bibtex_entries else Nil
+    if (File.is_bib(node_name.node)) content.bibtex_entries else Nil
 
 
   /* edits */
@@ -549,7 +542,7 @@
 
   def bibtex_entries: List[Text.Info[String]] =
     GUI_Thread.require {
-      if (Bibtex.is_bibtex(node_name.node)) {
+      if (File.is_bib(node_name.node)) {
         _bibtex_entries match {
           case Some(entries) => entries
           case None =>
--- a/src/Tools/jEdit/src/isabelle_session.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/jEdit/src/isabelle_session.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -84,10 +84,10 @@
       PIDE.maybe_snapshot(view) match {
         case None => ""
         case Some(snapshot) =>
-          val sessions = JEdit_Sessions.sessions_structure()
-          val session = PIDE.resources.session_base.theory_qualifier(snapshot.node_name)
+          val sessions_structure = JEdit_Sessions.sessions_structure()
+          val session = sessions_structure.theory_qualifier(snapshot.node_name)
           val chapter =
-            sessions.get(session) match {
+            sessions_structure.get(session) match {
               case Some(info) => info.chapter
               case None => Sessions.UNSORTED
             }
--- a/src/Tools/jEdit/src/jedit_bibtex.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/jEdit/src/jedit_bibtex.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -29,7 +29,7 @@
   def context_menu(text_area: JEditTextArea): List[JMenuItem] = {
     text_area.getBuffer match {
       case buffer: Buffer
-      if Bibtex.is_bibtex(JEdit_Lib.buffer_name(buffer)) && buffer.isEditable =>
+      if File.is_bib(JEdit_Lib.buffer_name(buffer)) && buffer.isEditable =>
         val menu = new JMenu("BibTeX entries")
         for (entry <- Bibtex.known_entries) {
           val item = new JMenuItem(entry.kind)
--- a/src/Tools/jEdit/src/query_dockable.scala	Fri Aug 19 05:49:17 2022 +0000
+++ b/src/Tools/jEdit/src/query_dockable.scala	Sat Aug 20 21:34:55 2022 +0200
@@ -300,7 +300,9 @@
   /* resize */
 
   private def handle_resize(): Unit =
-    GUI_Thread.require { operations.foreach(_.pretty_text_area.zoom(zoom)) }
+    GUI_Thread.require {
+      if (operations != null) operations.foreach(_.pretty_text_area.zoom(zoom))
+    }
 
   private val delay_resize =
     Delay.first(PIDE.options.seconds("editor_update_delay"), gui = true) { handle_resize() }