merged
authorpaulson
Mon, 02 Jan 2023 20:47:09 +0000
changeset 76875 edf430326683
parent 76873 713eb7f2230e (diff)
parent 76874 d6b02d54dbf8 (current diff)
child 76876 c9ffd9cf58db
merged
--- a/Admin/components/bundled	Mon Jan 02 20:46:24 2023 +0000
+++ b/Admin/components/bundled	Mon Jan 02 20:47:09 2023 +0000
@@ -1,2 +1,2 @@
 #additional components to be bundled for release
-naproche-20221024
+#naproche-20221024
--- a/etc/build.props	Mon Jan 02 20:46:24 2023 +0000
+++ b/etc/build.props	Mon Jan 02 20:47:09 2023 +0000
@@ -315,7 +315,7 @@
   isabelle.Scala$Handler \
   isabelle.Scala_Functions \
   isabelle.Server_Commands \
-  isabelle.Sessions$File_Format \
+  isabelle.Sessions$ROOTS_File_Format \
   isabelle.Simplifier_Trace$Handler \
   isabelle.Tools \
   isabelle.jedit.JEdit_Plugin0 \
--- a/src/Pure/Admin/build_log.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/Admin/build_log.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -772,7 +772,7 @@
       SQL.select(columns, distinct = true) +
         table1.query_named + SQL.join_outer + aux_table.query_named +
         " ON " + version(table1) + " = " + version(aux_table) +
-        " ORDER BY " + pull_date(afp)(table1) + " DESC"
+        SQL.order_by(List(pull_date(afp)(table1)), descending = true)
     }
 
 
--- a/src/Pure/General/graph.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/General/graph.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -27,7 +27,7 @@
 
   def make[Key, A](
     entries: List[((Key, A), List[Key])],
-    symmetric: Boolean = false)(
+    converse: Boolean = false)(
     implicit ord: Ordering[Key]
   ): Graph[Key, A] = {
     val graph1 =
@@ -38,7 +38,7 @@
       entries.foldLeft(graph1) {
         case (graph, ((x, _), ys)) =>
           ys.foldLeft(graph) {
-            case (g, y) => if (symmetric) g.add_edge(y, x) else g.add_edge(x, y)
+            case (g, y) => if (converse) g.add_edge(y, x) else g.add_edge(x, y)
           }
       }
     graph2
--- a/src/Pure/General/sql.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/General/sql.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -127,6 +127,9 @@
     override def toString: Source = ident
   }
 
+  def order_by(columns: List[Column], descending: Boolean = false): Source =
+    " ORDER BY " + columns.mkString(", ") + (if (descending) " DESC" else "")
+
 
   /* tables */
 
--- a/src/Pure/General/url.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/General/url.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -102,7 +102,7 @@
   def canonical_file_name(uri: String): String = canonical_file(uri).getPath
 
 
-  /* generic path notation: local, ssh, rsync, ftp, http */
+  /* generic path notation: standard, platform, ssh, rsync, ftp, http */
 
   private val separators1 = "/\\"
   private val separators2 = ":/\\"
--- a/src/Pure/PIDE/command.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/PIDE/command.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -464,7 +464,7 @@
           loaded_files.files.map(file =>
             (Exn.capture {
               val src_path = Path.explode(file)
-              val name = Document.Node.Name(resources.append(node_name.master_dir, src_path))
+              val name = Document.Node.Name(resources.append_path(node_name.master_dir, src_path))
               val content = get_blob(name).map(blob => (blob.bytes.sha1_digest, blob.chunk))
               Blob(name, src_path, content)
             }).user_error)
--- a/src/Pure/PIDE/document.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/PIDE/document.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -91,8 +91,9 @@
     def bad_header(msg: String): Header = Header(errors = List(msg))
 
     object Name {
-      def apply(node: String, master_dir: String = "", theory: String = ""): Name =
-        new Name(node, master_dir, theory)
+      def apply(node: String, theory: String = ""): Name = new Name(node, theory)
+
+      def loaded_theory(theory: String): Document.Node.Name = Name(theory, theory = theory)
 
       val empty: Name = Name("")
 
@@ -103,10 +104,10 @@
       type Graph[A] = isabelle.Graph[Node.Name, A]
 
       def make_graph[A](entries: List[((Name, A), List[Name])]): Graph[A] =
-        Graph.make(entries, symmetric = true)(Ordering)
+        Graph.make(entries, converse = true)(Ordering)
     }
 
-    final class Name private(val node: String, val master_dir: String, val theory: String) {
+    final class Name private(val node: String, val theory: String) {
       override def hashCode: Int = node.hashCode
       override def equals(that: Any): Boolean =
         that match {
@@ -117,6 +118,8 @@
       def file_name: String = Url.get_base_name(node).getOrElse("")
 
       def path: Path = Path.explode(File.standard_path(node))
+
+      def master_dir: String = Url.strip_base_name(node).getOrElse("")
       def master_dir_path: Path = Path.explode(File.standard_path(master_dir))
 
       def is_theory: Boolean = theory.nonEmpty
@@ -125,16 +128,11 @@
 
       override def toString: String = if (is_theory) theory else node
 
-      def map(f: String => String): Name =
-        new Name(f(node), f(master_dir), theory)
-
       def json: JSON.Object.T =
         JSON.Object("node_name" -> node, "theory_name" -> theory)
     }
 
     sealed case class Entry(name: Node.Name, header: Node.Header) {
-      def map(f: String => String): Entry = copy(name = name.map(f))
-
       override def toString: String = name.toString
     }
 
--- a/src/Pure/PIDE/headless.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/PIDE/headless.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -306,7 +306,7 @@
       val dep_theories_set = dep_theories.toSet
       val dep_files =
         for (path <- dependencies.loaded_files)
-          yield Document.Node.Name(resources.append("", path))
+          yield Document.Node.Name(resources.append_path("", path))
 
       val use_theories_state = {
         val dep_graph = dependencies.theory_graph
--- a/src/Pure/PIDE/protocol.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/PIDE/protocol.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -25,10 +25,8 @@
   object Loading_Theory {
     def unapply(props: Properties.T): Option[(Document.Node.Name, Document_ID.Exec)] =
       (props, props, props) match {
-        case (Markup.Name(name), Position.File(file), Position.Id(id))
-        if Path.is_wellformed(file) =>
-          val master_dir = Path.explode(file).dir.implode
-          Some((Document.Node.Name(file, master_dir, name), id))
+        case (Markup.Name(theory), Position.File(file), Position.Id(id))
+        if Path.is_wellformed(file) => Some((Document.Node.Name(file, theory = theory), id))
         case _ => None
       }
   }
--- a/src/Pure/PIDE/rendering.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/PIDE/rendering.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -348,7 +348,7 @@
           if (text == "" || text.endsWith("/")) (path, "")
           else (path.dir, path.file_name)
 
-        val directory = new JFile(session.resources.append(snapshot.node_name.master_dir, dir))
+        val directory = new JFile(session.resources.append_path(snapshot.node_name.master_dir, dir))
         val files = directory.listFiles
         if (files == null) Nil
         else {
@@ -616,7 +616,7 @@
   }
 
   def perhaps_append_file(node_name: Document.Node.Name, name: String): String =
-    if (Path.is_valid(name)) session.resources.append(node_name.master_dir, Path.explode(name))
+    if (Path.is_valid(name)) session.resources.append_path(node_name.master_dir, Path.explode(name))
     else name
 
   def tooltips(elements: Markup.Elements, range: Text.Range): Option[Text.Info[List[XML.Tree]]] = {
--- a/src/Pure/PIDE/resources.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/PIDE/resources.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -72,17 +72,8 @@
 
   def migrate_name(name: Document.Node.Name): Document.Node.Name = name
 
-  def append(dir: String, source_path: Path): String =
-    (Path.explode(dir) + source_path).expand.implode
-
-  def file_node(file: Path, dir: String = "", theory: String = ""): Document.Node.Name = {
-    val node = append(dir, file)
-    val master_dir = append(dir, file.dir)
-    Document.Node.Name(node, master_dir, theory)
-  }
-
-  def loaded_theory_node(theory: String): Document.Node.Name =
-    Document.Node.Name(theory, "", theory)
+  def append_path(prefix: String, source_path: Path): String =
+    (Path.explode(prefix) + source_path).expand.implode
 
 
   /* source files of Isabelle/ML bootstrap */
@@ -141,7 +132,7 @@
     for {
       (name, theory) <- Thy_Header.ml_roots
       path = (pure_dir + Path.explode(name)).expand
-      node_name = Document.Node.Name(path.implode, path.dir.implode, theory)
+      node_name = Document.Node.Name(path.implode, theory = theory)
       file <- loaded_files(syntax, node_name, load_commands(syntax, node_name)())
     } yield file
   }
@@ -164,24 +155,24 @@
         case Some(info) => info.dirs
         case None => Nil
       }
-    dirs.collectFirst({
-      case dir if (dir + thy_file).is_file => file_node(dir + thy_file, theory = theory) })
+    dirs.collectFirst { case dir if (dir + thy_file).is_file =>
+      Document.Node.Name(append_path("", dir + thy_file), theory = theory) }
   }
 
-  def import_name(qualifier: String, dir: String, s: String): Document.Node.Name = {
+  def import_name(qualifier: String, prefix: String, s: String): Document.Node.Name = {
     val theory = theory_name(qualifier, Thy_Header.import_name(s))
     val literal_import =
       literal_theory(theory) && qualifier != sessions_structure.theory_qualifier(theory)
     if (literal_import && !Url.is_base_name(s)) {
       error("Bad import of theory from other session via file-path: " + quote(s))
     }
-    if (session_base.loaded_theory(theory)) loaded_theory_node(theory)
+    if (session_base.loaded_theory(theory)) Document.Node.Name.loaded_theory(theory)
     else {
       find_theory_node(theory) match {
         case Some(node_name) => node_name
         case None =>
-          if (Url.is_base_name(s) && literal_theory(s)) loaded_theory_node(theory)
-          else file_node(Path.explode(s).thy, dir = dir, theory = theory)
+          if (Url.is_base_name(s) && literal_theory(s)) Document.Node.Name.loaded_theory(theory)
+          else Document.Node.Name(append_path(prefix, Path.explode(s).thy), theory = theory)
       }
     }
   }
--- a/src/Pure/PIDE/session.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/PIDE/session.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -487,7 +487,7 @@
               case Protocol.Export(args)
               if args.id.isDefined && Value.Long.unapply(args.id.get).isDefined =>
                 val id = Value.Long.unapply(args.id.get).get
-                val entry = Export.make_entry(Sessions.DRAFT, args, msg.chunk, cache)
+                val entry = Export.Entry.make(Sessions.DRAFT, args, msg.chunk, cache)
                 change_command(_.add_export(id, (args.serial, entry)))
 
               case Protocol.Loading_Theory(node_name, id) =>
@@ -563,7 +563,7 @@
                 val snapshot = global_state.change_result(_.end_theory(id))
                 finished_theories.post(snapshot)
               }
-              file_formats.stop_session
+              file_formats.stop_session()
               phase = Session.Terminated(result)
               prover.reset()
 
--- a/src/Pure/ROOT.ML	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/ROOT.ML	Mon Jan 02 20:47:09 2023 +0000
@@ -366,4 +366,3 @@
 ML_file "Tools/jedit.ML";
 ML_file "Tools/ghc.ML";
 ML_file "Tools/generated_files.ML";
-
--- a/src/Pure/Thy/bibtex.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/Thy/bibtex.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -26,8 +26,8 @@
 
     override def theory_suffix: String = "bibtex_file"
     override def theory_content(name: String): String =
-      """theory "bib" imports Pure begin bibtex_file """ +
-        Outer_Syntax.quote_string(name) + """ end"""
+      """theory "bib" imports Pure begin bibtex_file "." end"""
+    override def theory_excluded(name: String): Boolean = name == "bib"
 
     override def html_document(snapshot: Document.Snapshot): Option[Browser_Info.HTML_Document] = {
       val name = snapshot.node_name
--- a/src/Pure/Thy/export.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/Thy/export.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -88,7 +88,7 @@
   def read_theory_names(db: SQL.Database, session_name: String): List[String] = {
     val select =
       Data.table.select(List(Data.theory_name), Data.where_equal(session_name), distinct = true) +
-      " ORDER BY " + Data.theory_name
+      SQL.order_by(List(Data.theory_name))
     db.using_statement(select)(stmt =>
       stmt.execute_query().iterator(_.string(Data.theory_name)).toList)
   }
@@ -96,7 +96,7 @@
   def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] = {
     val select =
       Data.table.select(List(Data.theory_name, Data.name), Data.where_equal(session_name)) +
-      " ORDER BY " + Data.theory_name + ", " + Data.name
+      SQL.order_by(List(Data.theory_name, Data.name))
     db.using_statement(select)(stmt =>
       stmt.execute_query().iterator(res =>
         Entry_Name(session = session_name,
@@ -107,13 +107,36 @@
   def message(msg: String, theory_name: String, name: String): String =
     msg + " " + quote(name) + " for theory " + quote(theory_name)
 
-  def empty_entry(theory_name: String, name: String): Entry =
-    Entry(Entry_Name(theory = theory_name, name = name),
-      false, Future.value(false, Bytes.empty), XML.Cache.none)
+  object Entry {
+    def apply(
+      entry_name: Entry_Name,
+      executable: Boolean,
+      body: Future[(Boolean, Bytes)],
+      cache: XML.Cache
+    ): Entry = new Entry(entry_name, executable, body, cache)
+
+    def empty(theory_name: String, name: String): Entry =
+      Entry(Entry_Name(theory = theory_name, name = name),
+        false, Future.value(false, Bytes.empty), XML.Cache.none)
 
-  sealed case class Entry(
-    entry_name: Entry_Name,
-    executable: Boolean,
+    def make(
+      session_name: String,
+      args: Protocol.Export.Args,
+      bytes: Bytes,
+      cache: XML.Cache
+    ): Entry = {
+      val body =
+        if (args.compress) Future.fork(bytes.maybe_compress(cache = cache.compress))
+        else Future.value((false, bytes))
+      val entry_name =
+        Entry_Name(session = session_name, theory = args.theory_name, name = args.name)
+      Entry(entry_name, args.executable, body, cache)
+    }
+  }
+
+  final class Entry private(
+    val entry_name: Entry_Name,
+    val executable: Boolean,
     body: Future[(Boolean, Bytes)],
     cache: XML.Cache
   ) {
@@ -122,6 +145,9 @@
     def name: String = entry_name.name
     override def toString: String = name
 
+    def is_finished: Boolean = body.is_finished
+    def cancel(): Unit = body.cancel()
+
     def compound_name: String = entry_name.compound_name
 
     def name_has_prefix(s: String): Boolean = name.startsWith(s)
@@ -130,25 +156,24 @@
     def name_extends(elems: List[String]): Boolean =
       name_elems.startsWith(elems) && name_elems != elems
 
-    def text: String = uncompressed.text
-
-    def uncompressed: Bytes = {
-      val (compressed, bytes) = body.join
-      if (compressed) bytes.uncompress(cache = cache.compress) else bytes
+    def bytes: Bytes = {
+      val (compressed, bs) = body.join
+      if (compressed) bs.uncompress(cache = cache.compress) else bs
     }
 
-    def uncompressed_yxml: XML.Body =
-      YXML.parse_body(UTF8.decode_permissive(uncompressed), cache = cache)
+    def text: String = bytes.text
+
+    def yxml: XML.Body = YXML.parse_body(UTF8.decode_permissive(bytes), cache = cache)
 
     def write(db: SQL.Database): Unit = {
-      val (compressed, bytes) = body.join
+      val (compressed, bs) = body.join
       db.using_statement(Data.table.insert()) { stmt =>
         stmt.string(1) = session_name
         stmt.string(2) = theory_name
         stmt.string(3) = name
         stmt.bool(4) = executable
         stmt.bool(5) = compressed
-        stmt.bytes(6) = bytes
+        stmt.bytes(6) = bs
         stmt.execute()
       }
     }
@@ -176,19 +201,6 @@
     (entry_name: Entry_Name) => regs.exists(_.matches(entry_name.compound_name))
   }
 
-  def make_entry(
-    session_name: String,
-    args: Protocol.Export.Args,
-    bytes: Bytes,
-    cache: XML.Cache
-  ): Entry = {
-    val body =
-      if (args.compress) Future.fork(bytes.maybe_compress(cache = cache.compress))
-      else Future.value((false, bytes))
-    val entry_name = Entry_Name(session = session_name, theory = args.theory_name, name = args.name)
-    Entry(entry_name, args.executable, body, cache)
-  }
-
 
   /* database consumer thread */
 
@@ -200,7 +212,7 @@
 
     private val consumer =
       Consumer_Thread.fork_bulk[(Entry, Boolean)](name = "export")(
-        bulk = { case (entry, _) => entry.body.is_finished },
+        bulk = { case (entry, _) => entry.is_finished },
         consume =
           { (args: List[(Entry, Boolean)]) =>
             val results =
@@ -208,7 +220,7 @@
                 for ((entry, strict) <- args)
                 yield {
                   if (progress.stopped) {
-                    entry.body.cancel()
+                    entry.cancel()
                     Exn.Res(())
                   }
                   else if (entry.entry_name.readable(db)) {
@@ -227,7 +239,7 @@
     def make_entry(session_name: String, args0: Protocol.Export.Args, body: Bytes): Unit = {
       if (!progress.stopped && !body.is_empty) {
         val args = if (db.is_server) args0.copy(compress = false) else args0
-        consumer.send(Export.make_entry(session_name, args, body, cache) -> args.strict)
+        consumer.send(Entry.make(session_name, args, body, cache) -> args.strict)
       }
     }
 
@@ -403,7 +415,7 @@
 
     def apply(theory: String, name: String, permissive: Boolean = false): Entry =
       get(theory, name) match {
-        case None if permissive => empty_entry(theory, name)
+        case None if permissive => Entry.empty(theory, name)
         case None => error("Missing export entry " + quote(compound_name(theory, name)))
         case Some(entry) => entry
       }
@@ -411,6 +423,24 @@
     def theory(theory: String, other_cache: Option[Term.Cache] = None): Theory_Context =
       new Theory_Context(session_context, theory, other_cache)
 
+    def node_source(name: Document.Node.Name): String = {
+      def snapshot_source: Option[String] =
+        for {
+          snapshot <- document_snapshot
+          node = snapshot.get_node(name)
+          text = node.source if text.nonEmpty
+        } yield text
+
+      val store = database_context.store
+      def db_source: Option[String] =
+        (for {
+          database <- db_hierarchy.iterator
+          file <- store.read_sources(database.db, database.session, name = name.node).iterator
+        } yield file.text).nextOption()
+
+      snapshot_source orElse db_source getOrElse ""
+    }
+
     def classpath(): List[File.Content] = {
       (for {
         session <- session_stack.iterator
@@ -420,7 +450,7 @@
         entry_name <- entry_names(session = session).iterator
         if matcher(entry_name)
         entry <- get(entry_name.theory, entry_name.name).iterator
-      } yield File.content(entry.entry_name.make_path(), entry.uncompressed)).toList
+      } yield File.content(entry.entry_name.make_path(), entry.bytes)).toList
     }
 
     override def toString: String =
@@ -438,9 +468,9 @@
     def apply(name: String, permissive: Boolean = false): Entry =
       session_context.apply(theory, name, permissive = permissive)
 
-    def uncompressed_yxml(name: String): XML.Body =
+    def yxml(name: String): XML.Body =
       get(name) match {
-        case Some(entry) => entry.uncompressed_yxml
+        case Some(entry) => entry.yxml
         case None => Nil
       }
 
@@ -492,7 +522,7 @@
           val path = export_dir + entry_name.make_path(prune = export_prune)
           progress.echo("export " + path + (if (entry.executable) " (executable)" else ""))
           Isabelle_System.make_directory(path.dir)
-          val bytes = entry.uncompressed
+          val bytes = entry.bytes
           if (!path.is_file || Bytes.read(path) != bytes) Bytes.write(path, bytes)
           File.set_executable(path, entry.executable)
         }
--- a/src/Pure/Thy/export_theory.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/Thy/export_theory.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -103,7 +103,7 @@
 
   def read_theory_parents(theory_context: Export.Theory_Context): Option[List[String]] =
     theory_context.get(Export.THEORY_PREFIX + "parents")
-      .map(entry => Library.trim_split_lines(entry.uncompressed.text))
+      .map(entry => Library.trim_split_lines(entry.text))
 
   def theory_names(
     session_context: Export.Session_Context,
@@ -233,7 +233,7 @@
         case _ => err()
       }
     }
-    theory_context.uncompressed_yxml(export_name).map(decode_entity)
+    theory_context.yxml(export_name).map(decode_entity)
   }
 
 
@@ -393,7 +393,7 @@
 
     for { entry <- theory_context.get(Export.PROOFS_PREFIX + id.serial) }
     yield {
-      val body = entry.uncompressed_yxml
+      val body = entry.yxml
       val (typargs, (args, (prop_body, proof_body))) = {
         import XML.Decode._
         import Term_XML.Decode._
@@ -539,7 +539,7 @@
   }
 
   def read_classrel(theory_context: Export.Theory_Context): List[Classrel] = {
-    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "classrel")
+    val body = theory_context.yxml(Export.THEORY_PREFIX + "classrel")
     val classrel = {
       import XML.Decode._
       list(pair(decode_prop, pair(string, string)))(body)
@@ -559,7 +559,7 @@
   }
 
   def read_arities(theory_context: Export.Theory_Context): List[Arity] = {
-    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "arities")
+    val body = theory_context.yxml(Export.THEORY_PREFIX + "arities")
     val arities = {
       import XML.Decode._
       import Term_XML.Decode._
@@ -577,7 +577,7 @@
   }
 
   def read_constdefs(theory_context: Export.Theory_Context): List[Constdef] = {
-    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "constdefs")
+    val body = theory_context.yxml(Export.THEORY_PREFIX + "constdefs")
     val constdefs = {
       import XML.Decode._
       list(pair(string, string))(body)
@@ -606,7 +606,7 @@
   }
 
   def read_typedefs(theory_context: Export.Theory_Context): List[Typedef] = {
-    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "typedefs")
+    val body = theory_context.yxml(Export.THEORY_PREFIX + "typedefs")
     val typedefs = {
       import XML.Decode._
       import Term_XML.Decode._
@@ -640,7 +640,7 @@
   }
 
   def read_datatypes(theory_context: Export.Theory_Context): List[Datatype] = {
-    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "datatypes")
+    val body = theory_context.yxml(Export.THEORY_PREFIX + "datatypes")
     val datatypes = {
       import XML.Decode._
       import Term_XML.Decode._
@@ -730,7 +730,7 @@
   }
 
   def read_spec_rules(theory_context: Export.Theory_Context): List[Spec_Rule] = {
-    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "spec_rules")
+    val body = theory_context.yxml(Export.THEORY_PREFIX + "spec_rules")
     val spec_rules = {
       import XML.Decode._
       import Term_XML.Decode._
@@ -753,7 +753,7 @@
   def read_others(theory_context: Export.Theory_Context): Map[String, List[Entity[Other]]] = {
     val kinds =
       theory_context.get(Export.THEORY_PREFIX + "other_kinds") match {
-        case Some(entry) => split_lines(entry.uncompressed.text)
+        case Some(entry) => split_lines(entry.text)
         case None => Nil
       }
     val other = Other()
--- a/src/Pure/Thy/file_format.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/Thy/file_format.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -28,6 +28,8 @@
     def get_theory(name: Document.Node.Name): Option[File_Format] = get(name.theory)
     def is_theory(name: Document.Node.Name): Boolean = get_theory(name).isDefined
 
+    def theory_excluded(name: String): Boolean = file_formats.exists(_.theory_excluded(name))
+
     def parse_data(name: String, text: String): AnyRef =
       get(name) match {
         case Some(file_format) => file_format.parse_data(name, text)
@@ -49,7 +51,7 @@
     def prover_options(options: Options): Options =
       agents.foldLeft(options) { case (opts, agent) => agent.prover_options(opts) }
 
-    def stop_session: Unit = agents.foreach(_.stop())
+    def stop_session(): Unit = agents.foreach(_.stop())
   }
 
   trait Agent {
@@ -74,18 +76,19 @@
 
   def theory_suffix: String = ""
   def theory_content(name: String): String = ""
+  def theory_excluded(name: String): Boolean = false
 
   def make_theory_name(
     resources: Resources,
     name: Document.Node.Name
   ): Option[Document.Node.Name] = {
     for {
-      thy <- Url.get_base_name(name.node)
+      theory <- Url.get_base_name(name.node)
       if detect(name.node) && theory_suffix.nonEmpty
     }
     yield {
-      val thy_node = resources.append(name.node, Path.explode(theory_suffix))
-      Document.Node.Name(thy_node, name.master_dir, thy)
+      val node = resources.append_path(name.node, Path.explode(theory_suffix))
+      Document.Node.Name(node, theory = theory)
     }
   }
 
--- a/src/Pure/Thy/sessions.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/Thy/sessions.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -32,12 +32,13 @@
   def is_pure(name: String): Boolean = name == Thy_Header.PURE
 
   def illegal_session(name: String): Boolean = name == "" || name == DRAFT
-  def illegal_theory(name: String): Boolean = name == root_name || name == "bib"
+  def illegal_theory(name: String): Boolean =
+    name == root_name || File_Format.registry.theory_excluded(name)
 
 
   /* ROOTS file format */
 
-  class File_Format extends isabelle.File_Format {
+  class ROOTS_File_Format extends File_Format {
     val format_name: String = roots_name
     val file_ext = ""
 
@@ -49,8 +50,65 @@
 
     override def theory_suffix: String = "ROOTS_file"
     override def theory_content(name: String): String =
-      """theory "ROOTS" imports Pure begin ROOTS_file """ +
-        Outer_Syntax.quote_string(name) + """ end"""
+      """theory "ROOTS" imports Pure begin ROOTS_file "." end"""
+    override def theory_excluded(name: String): Boolean = name == "ROOTS"
+  }
+
+
+  /* source files */
+
+  sealed case class Source_File(
+    name: String,
+    digest: SHA1.Digest,
+    compressed: Boolean,
+    body: Bytes,
+    cache: Compress.Cache
+  ) {
+    override def toString: String = name
+
+    def bytes: Bytes = if (compressed) body.uncompress(cache = cache) else body
+    def text: String = bytes.text
+  }
+
+  object Sources {
+    val session_name = SQL.Column.string("session_name").make_primary_key
+    val name = SQL.Column.string("name").make_primary_key
+    val digest = SQL.Column.string("digest")
+    val compressed = SQL.Column.bool("compressed")
+    val body = SQL.Column.bytes("body")
+
+    val table =
+      SQL.Table("isabelle_sources", List(session_name, name, digest, compressed, body))
+
+    def where_equal(session_name: String, name: String = ""): SQL.Source =
+      "WHERE " + Sources.session_name.equal(session_name) +
+        (if (name == "") "" else " AND " + Sources.name.equal(name))
+
+    def load(session_base: Base, cache: Compress.Cache = Compress.Cache.none): Sources =
+      new Sources(
+        session_base.session_sources.foldLeft(Map.empty) {
+          case (sources, (path, digest)) =>
+            def err(): Nothing = error("Incoherent digest for source file: " + path)
+            val name = path.implode_symbolic
+            sources.get(name) match {
+              case Some(source_file) =>
+                if (source_file.digest == digest) sources else err()
+              case None =>
+                val bytes = Bytes.read(path)
+                if (bytes.sha1_digest == digest) {
+                  val (compressed, body) =
+                    bytes.maybe_compress(Compress.Options_Zstd(), cache = cache)
+                  val file = Source_File(name, digest, compressed, body, cache)
+                  sources + (name -> file)
+                }
+                else err()
+            }
+        })
+  }
+
+  class Sources private(rep: Map[String, Source_File]) extends Iterable[Source_File] {
+    override def toString: String = rep.values.toList.sortBy(_.name).mkString("Sources(", ", ", ")")
+    override def iterator: Iterator[Source_File] = rep.valuesIterator
   }
 
 
@@ -67,7 +125,7 @@
     document_theories: List[Document.Node.Name] = Nil,
     loaded_theories: Graph[String, Outer_Syntax] = Graph.string,  // cumulative imports
     used_theories: List[(Document.Node.Name, Options)] = Nil,  // new imports
-    load_commands: Map[Document.Node.Name, List[Command_Span.Span]] = Map.empty,
+    theory_load_commands: Map[String, List[Command_Span.Span]] = Map.empty,
     known_theories: Map[String, Document.Node.Entry] = Map.empty,
     known_loaded_files: Map[String, List[Path]] = Map.empty,
     overall_syntax: Outer_Syntax = Outer_Syntax.empty,
@@ -296,6 +354,9 @@
               try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) }
               catch { case ERROR(msg) => (Nil, List(msg)) }
 
+            val theory_load_commands =
+              (for ((name, span) <- load_commands.iterator) yield name.theory -> span).toMap
+
             val loaded_files =
               load_commands.map({ case (name, spans) => dependencies.loaded_files(name, spans) })
 
@@ -451,7 +512,7 @@
                 document_theories = document_theories,
                 loaded_theories = dependencies.loaded_theories,
                 used_theories = dependencies.theories_adjunct,
-                load_commands = load_commands.toMap,
+                theory_load_commands = theory_load_commands,
                 known_theories = known_theories,
                 known_loaded_files = known_loaded_files,
                 overall_syntax = overall_syntax,
@@ -1493,6 +1554,9 @@
           db.using_statement(Session_Info.augment_table)(_.execute())
         }
 
+        db.create_table(Sources.table)
+        db.using_statement(Sources.table.delete(Sources.where_equal(name)))(_.execute())
+
         db.create_table(Export.Data.table)
         db.using_statement(
           Export.Data.table.delete(Export.Data.session_name.where_equal(name)))(_.execute())
@@ -1519,14 +1583,15 @@
 
     def write_session_info(
       db: SQL.Database,
-      name: String,
+      session_name: String,
+      sources: Sources,
       build_log: Build_Log.Session_Info,
       build: Build.Session_Info
     ): Unit = {
       db.transaction {
-        val table = Session_Info.table
-        db.using_statement(table.insert()) { stmt =>
-          stmt.string(1) = name
+        write_sources(db, session_name, sources)
+        db.using_statement(Session_Info.table.insert()) { stmt =>
+          stmt.string(1) = session_name
           stmt.bytes(2) = Properties.encode(build_log.session_timing)
           stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = cache.compress)
           stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = cache.compress)
@@ -1586,5 +1651,40 @@
       }
       else None
     }
+
+
+    /* session sources */
+
+    def write_sources(db: SQL.Database, session_name: String, sources: Sources): Unit = {
+      for (source_file <- sources) {
+        db.using_statement(Sources.table.insert()) { stmt =>
+          stmt.string(1) = session_name
+          stmt.string(2) = source_file.name
+          stmt.string(3) = source_file.digest.toString
+          stmt.bool(4) = source_file.compressed
+          stmt.bytes(5) = source_file.body
+          stmt.execute()
+        }
+      }
+    }
+
+    def read_sources(
+      db: SQL.Database,
+      session_name: String,
+      name: String = ""
+    ): List[Source_File] = {
+      val select =
+        Sources.table.select(Nil,
+          Sources.where_equal(session_name, name = name) + SQL.order_by(List(Sources.name)))
+      db.using_statement(select) { stmt =>
+        (stmt.execute_query().iterator { res =>
+          val res_name = res.string(Sources.name)
+          val digest = SHA1.fake_digest(res.string(Sources.digest))
+          val compressed = res.bool(Sources.compressed)
+          val body = res.bytes(Sources.body)
+          Source_File(res_name, digest, compressed, body, cache.compress)
+        }).toList
+      }
+    }
   }
 }
--- a/src/Pure/Thy/thy_header.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/Thy/thy_header.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -93,7 +93,8 @@
 
   def theory_name(s: String): String =
     get_thy_name(s) match {
-      case Some(name) => bootstrap_name(name)
+      case Some(name) =>
+        bootstrap_thys.collectFirst({ case (a, b) if a == name => b }).getOrElse(name)
       case None =>
         Url.get_base_name(s) match {
           case Some(name) =>
@@ -109,9 +110,6 @@
   def is_bootstrap(theory: String): Boolean =
     bootstrap_thys.exists({ case (_, b) => b == theory })
 
-  def bootstrap_name(theory: String): String =
-    bootstrap_thys.collectFirst({ case (a, b) if a == theory => b }).getOrElse(theory)
-
 
   /* parser */
 
--- a/src/Pure/Tools/build.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/Tools/build.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -348,7 +348,7 @@
 
             // write database
             using(store.open_database(session_name, output = true))(db =>
-              store.write_session_info(db, session_name,
+              store.write_session_info(db, session_name, job.session_sources,
                 build_log =
                   if (process_result.timeout) build_log.error("Timeout") else build_log,
                 build =
--- a/src/Pure/Tools/build_job.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/Tools/build_job.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -22,7 +22,7 @@
 
     def read_xml(name: String): XML.Body =
       YXML.parse_body(
-        Symbol.output(unicode_symbols, UTF8.decode_permissive(read(name).uncompressed)),
+        Symbol.output(unicode_symbols, UTF8.decode_permissive(read(name).bytes)),
         cache = theory_context.cache)
 
     for {
@@ -33,8 +33,7 @@
       val master_dir =
         Url.strip_base_name(thy_file).getOrElse(
           error("Cannot determine theory master directory: " + quote(thy_file)))
-      val node_name =
-        Document.Node.Name(thy_file, master_dir = master_dir, theory = theory_context.theory)
+      val node_name = Document.Node.Name(thy_file, theory = theory_context.theory)
 
       val results =
         Command.Results.make(
@@ -247,6 +246,9 @@
   val info: Sessions.Info = session_background.sessions_structure(session_name)
   val options: Options = NUMA.policy_options(info.options, numa_node)
 
+  val session_sources: Sessions.Sources =
+    Sessions.Sources.load(session_background.base, cache = store.cache.compress)
+
   private val future_result: Future[Process_Result] =
     Future.thread("build", uninterruptible = true) {
       val parent = info.parent.getOrElse("")
@@ -274,8 +276,7 @@
           override val cache: Term.Cache = store.cache
 
           override def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info = {
-            val name1 = name.map(s => Path.explode(s).expand.implode)
-            session_background.base.load_commands.get(name1) match {
+            session_background.base.theory_load_commands.get(name.theory) match {
               case Some(spans) =>
                 val syntax = session_background.base.theory_syntax(name)
                 Command.build_blobs_info(syntax, name, spans)
--- a/src/Pure/Tools/dump.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/Tools/dump.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -57,13 +57,13 @@
           for {
             entry <- args.snapshot.exports
             if entry.name_has_prefix(Export.DOCUMENT_PREFIX)
-          } args.write(Path.explode(entry.name), entry.uncompressed)),
+          } args.write(Path.explode(entry.name), entry.bytes)),
       Aspect("theory", "foundational theory content",
         args =>
           for {
             entry <- args.snapshot.exports
             if entry.name_has_prefix(Export.THEORY_PREFIX)
-          } args.write(Path.explode(entry.name), entry.uncompressed),
+          } args.write(Path.explode(entry.name), entry.bytes),
         options = List("export_theory"))
     ).sortBy(_.name)
 
--- a/src/Pure/Tools/server_commands.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Pure/Tools/server_commands.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -266,7 +266,7 @@
                     val matcher = Export.make_matcher(List(args.export_pattern))
                     for { entry <- snapshot.exports if matcher(entry.entry_name) }
                     yield {
-                      val (base64, body) = entry.uncompressed.maybe_encode_base64
+                      val (base64, body) = entry.bytes.maybe_encode_base64
                       JSON.Object("name" -> entry.name, "base64" -> base64, "body" -> body)
                     }
                   }))
--- a/src/Tools/VSCode/src/vscode_resources.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Tools/VSCode/src/vscode_resources.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -93,24 +93,21 @@
     find_theory(file) getOrElse {
       val node = file.getPath
       val theory = theory_name(Sessions.DRAFT, Thy_Header.theory_name(node))
-      if (session_base.loaded_theory(theory)) loaded_theory_node(theory)
-      else {
-        val master_dir = file.getParent
-        Document.Node.Name(node, master_dir, theory)
-      }
+      if (session_base.loaded_theory(theory)) Document.Node.Name.loaded_theory(theory)
+      else Document.Node.Name(node, theory = theory)
     }
 
   override def migrate_name(standard_name: Document.Node.Name): Document.Node.Name =
     node_name(Path.explode(standard_name.node).canonical_file)
 
-  override def append(dir: String, source_path: Path): String = {
+  override def append_path(prefix: String, source_path: Path): String = {
     val path = source_path.expand
-    if (dir == "" || path.is_absolute) File.platform_path(path)
-    else if (path.is_current) dir
-    else if (path.is_basic && !dir.endsWith("/") && !dir.endsWith(JFile.separator))
-      dir + JFile.separator + File.platform_path(path)
-    else if (path.is_basic) dir + File.platform_path(path)
-    else File.absolute_name(new JFile(dir + JFile.separator + File.platform_path(path)))
+    if (prefix == "" || path.is_absolute) File.platform_path(path)
+    else if (path.is_current) prefix
+    else if (path.is_basic && !prefix.endsWith("/") && !prefix.endsWith(JFile.separator))
+      prefix + JFile.separator + File.platform_path(path)
+    else if (path.is_basic) prefix + File.platform_path(path)
+    else File.absolute_name(new JFile(prefix + JFile.separator + File.platform_path(path)))
   }
 
   def get_models(): Iterable[VSCode_Model] = state.value.models.values
--- a/src/Tools/jEdit/src/isabelle_export.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Tools/jEdit/src/isabelle_export.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -56,7 +56,7 @@
                 } yield {
                   val is_dir = entry.name_elems.length > depth
                   val path = Export.implode_name(theory :: entry.name_elems.take(depth))
-                  val file_size = if (is_dir) None else Some(entry.uncompressed.length.toLong)
+                  val file_size = if (is_dir) None else Some(entry.bytes.length.toLong)
                   (path, file_size)
                 }).toSet.toList
               (for ((path, file_size) <- entries.iterator) yield {
@@ -86,7 +86,7 @@
               if node_name.theory == theory
               (_, entry) <- snapshot.state.node_exports(version, node_name).iterator
               if entry.name_elems == name_elems
-            } yield entry.uncompressed).find(_ => true).getOrElse(Bytes.empty)
+            } yield entry.bytes).find(_ => true).getOrElse(Bytes.empty)
 
           bytes.stream()
       }
--- a/src/Tools/jEdit/src/jedit_resources.scala	Mon Jan 02 20:46:24 2023 +0000
+++ b/src/Tools/jEdit/src/jedit_resources.scala	Mon Jan 02 20:47:09 2023 +0000
@@ -35,11 +35,8 @@
       val vfs = VFSManager.getVFSForPath(path)
       val node = if (vfs.isInstanceOf[FileVFS]) MiscUtilities.resolveSymlinks(path) else path
       val theory = theory_name(Sessions.DRAFT, Thy_Header.theory_name(node))
-      if (session_base.loaded_theory(theory)) loaded_theory_node(theory)
-      else {
-        val master_dir = vfs.getParentOfPath(path)
-        Document.Node.Name(node, master_dir, theory)
-      }
+      if (session_base.loaded_theory(theory)) Document.Node.Name.loaded_theory(theory)
+      else Document.Node.Name(node, theory = theory)
     }
 
   def node_name(buffer: Buffer): Document.Node.Name =
@@ -53,19 +50,19 @@
   override def migrate_name(standard_name: Document.Node.Name): Document.Node.Name =
     node_name(File.platform_path(Path.explode(standard_name.node).canonical))
 
-  override def append(dir: String, source_path: Path): String = {
+  override def append_path(prefix: String, source_path: Path): String = {
     val path = source_path.expand
-    if (dir == "" || path.is_absolute) {
+    if (prefix == "" || path.is_absolute) {
       MiscUtilities.resolveSymlinks(File.platform_path(path))
     }
-    else if (path.is_current) dir
+    else if (path.is_current) prefix
     else {
-      val vfs = VFSManager.getVFSForPath(dir)
+      val vfs = VFSManager.getVFSForPath(prefix)
       if (vfs.isInstanceOf[FileVFS]) {
         MiscUtilities.resolveSymlinks(
-          vfs.constructPath(dir, File.platform_path(path)))
+          vfs.constructPath(prefix, File.platform_path(path)))
       }
-      else vfs.constructPath(dir, File.standard_path(path))
+      else vfs.constructPath(prefix, File.standard_path(path))
     }
   }