merged
authorwenzelm
Sat, 06 Aug 2022 23:13:35 +0200
changeset 75785 16135603d9c7
parent 75719 dcd3ef2905d6 (current diff)
parent 75784 d31193963e2d (diff)
child 75786 ff6c1a82270f
merged
--- a/src/Pure/Admin/build_doc.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/Admin/build_doc.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -54,9 +54,12 @@
             progress.expose_interrupt()
             progress.echo("Documentation " + quote(doc) + " ...")
 
-            using(store.open_database_context())(db_context =>
-              Document_Build.build_documents(Document_Build.context(session, deps, db_context),
-                output_pdf = Some(Path.explode("~~/doc"))))
+            using(Export.open_session_context(store, deps.base_info(session))) {
+              session_context =>
+                Document_Build.build_documents(
+                  Document_Build.context(session_context),
+                  output_pdf = Some(Path.explode("~~/doc")))
+            }
             None
           }
           catch {
--- a/src/Pure/Admin/build_log.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/Admin/build_log.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -910,7 +910,7 @@
             db2.create_view(Data.universal_table)
           }
         }
-        db2.rebuild
+        db2.rebuild()
       }
     }
 
--- a/src/Pure/General/sql.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/General/sql.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -303,13 +303,13 @@
     def close(): Unit = connection.close()
 
     def transaction[A](body: => A): A = {
-      val auto_commit = connection.getAutoCommit
+      val auto_commit = connection.getAutoCommit()
       try {
         connection.setAutoCommit(false)
-        val savepoint = connection.setSavepoint
+        val savepoint = connection.setSavepoint()
         try {
           val result = body
-          connection.commit
+          connection.commit()
           result
         }
         catch { case exn: Throwable => connection.rollback(savepoint); throw exn }
@@ -403,7 +403,7 @@
     def insert_permissive(table: SQL.Table, sql: SQL.Source = ""): SQL.Source =
       table.insert_cmd("INSERT OR IGNORE", sql = sql)
 
-    def rebuild: Unit = using_statement("VACUUM")(_.execute())
+    def rebuild(): Unit = using_statement("VACUUM")(_.execute())
   }
 }
 
--- a/src/Pure/PIDE/document.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/PIDE/document.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -633,8 +633,12 @@
     lazy val exports: List[Export.Entry] =
       state.node_exports(version, node_name).iterator.map(_._2).toList
 
-    lazy val exports_map: Map[String, Export.Entry] =
-      (for (entry <- exports.iterator) yield (entry.name, entry)).toMap
+    lazy val all_exports: Map[Export.Entry_Name, Export.Entry] =
+      (for {
+        (name, _) <- version.nodes.iterator
+        (_, entry) <- state.node_exports(version, name).iterator
+        if entry.entry_name.session == Sessions.DRAFT
+      } yield entry.entry_name -> entry).toMap
 
 
     /* find command */
--- a/src/Pure/PIDE/headless.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/PIDE/headless.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -216,12 +216,12 @@
               (for {
                 name <- dep_graph.keys_iterator
                 if !loaded_theory(name)
-              } yield { (name -> Document_Status.Node_Status.make(state, version, name)) }).toList
+              } yield name -> Document_Status.Node_Status.make(state, version, name)).toList
             val nodes_committed =
               (for {
                 name <- dep_graph.keys_iterator
                 status <- already_committed1.get(name)
-              } yield (name -> status)).toList
+              } yield name -> status).toList
             Some(Exn.Res(new Use_Theories_Result(state, version, nodes, nodes_committed)))
           }
           else result
@@ -314,7 +314,7 @@
           }
 
         isabelle.Session.Consumer[isabelle.Session.Commands_Changed](getClass.getName) {
-          case changed =>
+          changed =>
             if (changed.nodes.exists(dep_theories_set)) {
               val snapshot = session.snapshot()
               val state = snapshot.state
@@ -339,7 +339,7 @@
                       (name, node_status) <- nodes_status1.present.iterator
                       if changed.nodes.contains(name) && !st.already_committed.isDefinedAt(name)
                       p1 = node_status.percentage
-                      if p1 > 0 && Some(p1) != st.nodes_status.get(name).map(_.percentage)
+                      if p1 > 0 && !st.nodes_status.get(name).map(_.percentage).contains(p1)
                     } yield Progress.Theory(name.theory, percentage = Some(p1))).toList
 
                   (theory_progress, st.update(nodes_status1))
@@ -381,7 +381,7 @@
       val nodes =
         if (all) None
         else Some(theories.map(resources.import_name(qualifier, master_directory(master_dir), _)))
-      resources.purge_theories(session, nodes)
+      resources.purge_theories(nodes)
     }
   }
 
@@ -513,7 +513,6 @@
       }
 
       def unload_theories(
-        session: Session,
         id: UUID.T,
         theories: List[Document.Node.Name]
       ) : (List[Document.Edit_Text], State) = {
@@ -532,7 +531,6 @@
       }
 
       def purge_theories(
-        session: Session,
         nodes: Option[List[Document.Node.Name]]
       ) : ((List[Document.Node.Name], List[Document.Node.Name], List[Document.Edit_Text]), State) = {
         val all_nodes = theory_graph.topological_order
@@ -622,7 +620,7 @@
 
     def unload_theories(session: Session, id: UUID.T, theories: List[Document.Node.Name]): Unit = {
       state.change { st =>
-        val (edits, st1) = st.unload_theories(session, id, theories)
+        val (edits, st1) = st.unload_theories(id, theories)
         session.update(st.doc_blobs, edits)
         st1
       }
@@ -630,19 +628,18 @@
 
     def clean_theories(session: Session, id: UUID.T, theories: List[Document.Node.Name]): Unit = {
       state.change { st =>
-        val (edits1, st1) = st.unload_theories(session, id, theories)
-        val ((_, _, edits2), st2) = st1.purge_theories(session, None)
+        val (edits1, st1) = st.unload_theories(id, theories)
+        val ((_, _, edits2), st2) = st1.purge_theories(None)
         session.update(st.doc_blobs, edits1 ::: edits2)
         st2
       }
     }
 
     def purge_theories(
-      session: Session,
       nodes: Option[List[Document.Node.Name]]
     ) : (List[Document.Node.Name], List[Document.Node.Name]) = {
       state.change_result { st =>
-        val ((purged, retained, _), st1) = st.purge_theories(session, nodes)
+        val ((purged, retained, _), st1) = st.purge_theories(nodes)
         ((purged, retained), st1)
       }
     }
--- a/src/Pure/PIDE/resources.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/PIDE/resources.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -35,6 +35,9 @@
   resources =>
 
 
+  override def toString: String = "Resources(" + session_base.toString + ")"
+
+
   /* init session */
 
   def init_session_yxml: String = {
@@ -407,7 +410,7 @@
     def get_syntax(name: Document.Node.Name): Outer_Syntax =
       loaded_theories.get_node(name.theory)
 
-    def load_commands: List[(Document.Node.Name, List[Command_Span.Span])] =
+    lazy val load_commands: List[(Document.Node.Name, List[Command_Span.Span])] =
       theories.zip(
         Par_List.map((e: () => List[Command_Span.Span]) => e(),
           theories.map(name => resources.load_commands(get_syntax(name), name))))
--- a/src/Pure/PIDE/session.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/PIDE/session.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -480,7 +480,7 @@
               case Protocol.Export(args)
               if args.id.isDefined && Value.Long.unapply(args.id.get).isDefined =>
                 val id = Value.Long.unapply(args.id.get).get
-                val entry = Export.make_entry("", args, msg.chunk, cache)
+                val entry = Export.make_entry(Sessions.DRAFT, args, msg.chunk, cache)
                 change_command(_.add_export(id, (args.serial, entry)))
 
               case Protocol.Loading_Theory(node_name, id) =>
--- a/src/Pure/Thy/document_build.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/Thy/document_build.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -116,30 +116,23 @@
       map(name => texinputs + Path.basic(name))
 
   def context(
-    session: String,
-    deps: Sessions.Deps,
-    db_context: Sessions.Database_Context,
+    session_context: Export.Session_Context,
     progress: Progress = new Progress
-  ): Context = {
-    val structure = deps.sessions_structure
-    val info = structure(session)
-    val base = deps(session)
-    val hierarchy = deps.sessions_structure.build_hierarchy(session)
-    val classpath = db_context.get_classpath(structure, session)
-    new Context(info, base, hierarchy, db_context, classpath, progress)
-  }
+  ): Context = new Context(session_context, progress)
 
   final class Context private[Document_Build](
-    info: Sessions.Info,
-    base: Sessions.Base,
-    hierarchy: List[String],
-    db_context: Sessions.Database_Context,
-    val classpath: List[File.Content_Bytes],
+    val session_context: Export.Session_Context,
     val progress: Progress = new Progress
   ) {
     /* session info */
 
-    def session: String = info.name
+    def session: String = session_context.session_name
+
+    private val info = session_context.sessions_structure(session)
+    private val base = session_context.session_base
+
+    val classpath: List[File.Content_Bytes] = session_context.classpath()
+
     def options: Options = info.options
 
     def document_bibliography: Boolean = options.bool("document_bibliography")
@@ -159,22 +152,22 @@
         .find(_.name == name).getOrElse(error("Bad document_build engine " + quote(name)))
     }
 
-    def get_export(theory: String, name: String): Export.Entry =
-      db_context.get_export(hierarchy, theory, name)
-
 
     /* document content */
 
     def documents: List[Document_Variant] = info.documents
 
-    def session_theories: List[Document.Node.Name] = base.session_theories
-    def document_theories: List[Document.Node.Name] = session_theories ::: base.document_theories
+    def proper_session_theories: List[Document.Node.Name] = base.proper_session_theories
+
+    def document_theories: List[Document.Node.Name] =
+      proper_session_theories ::: base.document_theories
 
     lazy val document_latex: List[File.Content_XML] =
       for (name <- document_theories)
       yield {
         val path = Path.basic(tex_name(name))
-        val content = YXML.parse_body(get_export(name.theory, Export.DOCUMENT_LATEX).text)
+        val entry = session_context(name.theory, Export.DOCUMENT_LATEX, permissive = true)
+        val content = YXML.parse_body(entry.text)
         File.Content(path, content)
       }
 
@@ -188,7 +181,7 @@
       val path = Path.basic("session.tex")
       val content =
         Library.terminate_lines(
-          base.session_theories.map(name => "\\input{" + tex_name(name) + "}"))
+          base.proper_session_theories.map(name => "\\input{" + tex_name(name) + "}"))
       File.Content(path, content)
     }
 
@@ -253,7 +246,8 @@
 
     def old_document(directory: Directory): Option[Document_Output] =
       for {
-        old_doc <- db_context.input_database(session)(read_document(_, _, directory.doc.name))
+        db <- session_context.session_db()
+        old_doc <- read_document(db, session, directory.doc.name)
         if old_doc.sources == directory.sources
       }
       yield old_doc
@@ -479,12 +473,15 @@
             Sessions.load_structure(options + "document=pdf", dirs = dirs).
               selection_deps(Sessions.Selection.session(session))
 
+          val session_base_info = deps.base_info(session)
+
           if (output_sources.isEmpty && output_pdf.isEmpty) {
             progress.echo_warning("No output directory")
           }
 
-          using(store.open_database_context()) { db_context =>
-            build_documents(context(session, deps, db_context, progress = progress),
+          using(Export.open_session_context(store, session_base_info)) { session_context =>
+            build_documents(
+              context(session_context, progress = progress),
               output_sources = output_sources, output_pdf = output_pdf,
               verbose = verbose_latex)
           }
--- a/src/Pure/Thy/export.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/Thy/export.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -23,6 +23,7 @@
   val DOCUMENT_CITATIONS = DOCUMENT_PREFIX + "citations"
   val THEORY_PREFIX: String = "theory/"
   val PROOFS_PREFIX: String = "proofs/"
+  val THEORY_PARENTS: String = THEORY_PREFIX + "parents"
 
   def explode_name(s: String): List[String] = space_explode('/', s)
   def implode_name(elems: Iterable[String]): String = elems.mkString("/")
@@ -83,22 +84,13 @@
         else None
       }
     }
-
-    def read(dir: Path, cache: XML.Cache): Option[Entry] = {
-      val path = dir + Path.basic(theory) + Path.explode(name)
-      if (path.is_file) {
-        val executable = File.is_executable(path)
-        val uncompressed = Bytes.read(path)
-        val body = Future.value((false, uncompressed))
-        Some(Entry(this, executable, body, cache))
-      }
-      else None
-    }
   }
 
   def read_theory_names(db: SQL.Database, session_name: String): List[String] = {
     val select =
-      Data.table.select(List(Data.theory_name), Data.where_equal(session_name), distinct = true)
+      Data.table.select(List(Data.theory_name),
+        Data.where_equal(session_name, name = THEORY_PARENTS)) +
+      " ORDER BY " + Data.theory_name
     db.using_statement(select)(stmt =>
       stmt.execute_query().iterator(_.string(Data.theory_name)).toList)
   }
@@ -235,9 +227,9 @@
             (results, true)
           })
 
-    def apply(session_name: String, args: Protocol.Export.Args, body: Bytes): Unit = {
-      if (!progress.stopped) {
-        consumer.send(make_entry(session_name, args, body, cache) -> args.strict)
+    def make_entry(session_name: String, args: Protocol.Export.Args, body: Bytes): Unit = {
+      if (!progress.stopped && !body.is_empty) {
+        consumer.send(Export.make_entry(session_name, args, body, cache) -> args.strict)
       }
     }
 
@@ -249,96 +241,203 @@
   }
 
 
-  /* abstract provider */
+  /* context for retrieval */
+
+  def context(db_context: Sessions.Database_Context): Context = new Context(db_context)
 
-  object Provider {
-    def none: Provider =
-      new Provider {
-        def apply(export_name: String): Option[Entry] = None
-        def focus(other_theory: String): Provider = this
+  def open_context(store: Sessions.Store): Context =
+    new Context(store.open_database_context()) { override def close(): Unit = db_context.close() }
 
-        override def toString: String = "none"
-      }
+  def open_session_context0(store: Sessions.Store, session: String): Session_Context =
+    open_context(store).open_session0(session, close_context = true)
 
-    def database_context(
-        context: Sessions.Database_Context,
-        session_hierarchy: List[String],
-        theory_name: String): Provider =
-      new Provider {
-        def apply(export_name: String): Option[Entry] =
-          context.read_export(session_hierarchy, theory_name, export_name)
+  def open_session_context(
+    store: Sessions.Store,
+    session_base_info: Sessions.Base_Info,
+    document_snapshot: Option[Document.Snapshot] = None
+  ): Session_Context = {
+    open_context(store).open_session(
+      session_base_info, document_snapshot = document_snapshot, close_context = true)
+  }
 
-        def focus(other_theory: String): Provider = this
+  class Session_Database private[Export](val session: String, val db: SQL.Database) {
+    def close(): Unit = ()
 
-        override def toString: String = context.toString
-      }
+    lazy val theory_names: List[String] = read_theory_names(db, session)
+    lazy val entry_names: List[Entry_Name] = read_entry_names(db, session)
+  }
 
-    def database(
-      db: SQL.Database,
-      cache: XML.Cache,
-      session_name: String,
-      theory_name: String
-    ) : Provider = {
-      new Provider {
-        def apply(export_name: String): Option[Entry] =
-          Entry_Name(session = session_name, theory = theory_name, name = export_name)
-            .read(db, cache)
+  class Context private[Export](protected val db_context: Sessions.Database_Context)
+  extends AutoCloseable {
+    context =>
+
+    override def toString: String = db_context.toString
+
+    def cache: Term.Cache = db_context.cache
 
-        def focus(other_theory: String): Provider =
-          if (other_theory == theory_name) this
-          else Provider.database(db, cache, session_name, other_theory)
+    def close(): Unit = ()
 
-        override def toString: String = db.toString
-      }
-    }
-
-    def snapshot(snapshot: Document.Snapshot): Provider =
-      new Provider {
-        def apply(export_name: String): Option[Entry] =
-          snapshot.exports_map.get(export_name)
+    def open_session0(session: String, close_context: Boolean = false): Session_Context =
+      open_session(Sessions.base_info0(session), close_context = close_context)
 
-        def focus(other_theory: String): Provider =
-          if (other_theory == snapshot.node_name.theory) this
-          else {
-            val node_name =
-              snapshot.version.nodes.theory_name(other_theory) getOrElse
-                error("Bad theory " + quote(other_theory))
-            Provider.snapshot(snapshot.state.snapshot(node_name))
-          }
-
-        override def toString: String = snapshot.toString
-      }
-
-    def directory(
-      dir: Path,
-      cache: XML.Cache,
-      session_name: String,
-      theory_name: String
-    ) : Provider = {
-      new Provider {
-        def apply(export_name: String): Option[Entry] =
-          Entry_Name(session = session_name, theory = theory_name, name = export_name)
-            .read(dir, cache)
-
-        def focus(other_theory: String): Provider =
-          if (other_theory == theory_name) this
-          else Provider.directory(dir, cache, session_name, other_theory)
-
-        override def toString: String = dir.toString
+    def open_session(
+      session_base_info: Sessions.Base_Info,
+      document_snapshot: Option[Document.Snapshot] = None,
+      close_context: Boolean = false
+    ): Session_Context = {
+      val session_name = session_base_info.check.base.session_name
+      val session_hierarchy = session_base_info.sessions_structure.build_hierarchy(session_name)
+      val session_databases =
+        db_context.database_server match {
+          case Some(db) => session_hierarchy.map(name => new Session_Database(name, db))
+          case None =>
+            val store = db_context.store
+            val attempts =
+              session_hierarchy.map(name => name -> store.try_open_database(name, server = false))
+            attempts.collectFirst({ case (name, None) => name }) match {
+              case Some(bad) =>
+                for ((_, Some(db)) <- attempts) db.close()
+                store.bad_database(bad)
+              case None =>
+                for ((name, Some(db)) <- attempts) yield {
+                  new Session_Database(name, db) { override def close(): Unit = this.db.close() }
+                }
+            }
+        }
+      new Session_Context(context, session_base_info, session_databases, document_snapshot) {
+        override def close(): Unit = {
+          session_databases.foreach(_.close())
+          if (close_context) context.close()
+        }
       }
     }
   }
 
-  trait Provider {
-    def apply(export_name: String): Option[Entry]
+  class Session_Context private[Export](
+    val export_context: Context,
+    session_base_info: Sessions.Base_Info,
+    db_hierarchy: List[Session_Database],
+    document_snapshot: Option[Document.Snapshot]
+  ) extends AutoCloseable {
+    session_context =>
+
+    def close(): Unit = ()
+
+    def cache: Term.Cache = export_context.cache
+
+    def sessions_structure: Sessions.Structure = session_base_info.sessions_structure
+
+    def session_base: Sessions.Base = session_base_info.base
+
+    def session_name: String =
+      if (document_snapshot.isDefined) Sessions.DRAFT
+      else session_base.session_name
+
+    def session_database(session: String = session_name): Option[Session_Database] =
+      db_hierarchy.find(_.session == session)
+
+    def session_db(session: String = session_name): Option[SQL.Database] =
+      session_database(session = session).map(_.db)
+
+    def session_stack: List[String] =
+      ((if (document_snapshot.isDefined) List(session_name) else Nil) :::
+        db_hierarchy.map(_.session)).reverse
+
+    private def select[A](
+      session: String,
+      select1: Entry_Name => Option[A],
+      select2: Session_Database => List[A]
+    ): List[A] = {
+      def sel(name: String): List[A] =
+        if (name == Sessions.DRAFT) {
+          (for {
+            snapshot <- document_snapshot.iterator
+            entry_name <- snapshot.all_exports.keysIterator
+            res <- select1(entry_name).iterator
+          } yield entry_name -> res).toList.sortBy(_._1.compound_name).map(_._2)
+        }
+        else { session_database(name).map(select2).getOrElse(Nil) }
+      if (session.nonEmpty) sel(session) else session_stack.flatMap(sel)
+    }
+
+    def entry_names(session: String = session_name): List[Entry_Name] =
+      select(session, Some(_), _.entry_names)
+
+    def theory_names(session: String = session_name): List[String] =
+      select(session, a => if(a.name == THEORY_PARENTS) Some(a.theory) else None, _.theory_names)
 
-    def uncompressed_yxml(export_name: String): XML.Body =
-      apply(export_name) match {
+    def get(theory: String, name: String): Option[Entry] =
+    {
+      def snapshot_entry: Option[Entry] =
+        for {
+          snapshot <- document_snapshot
+          entry_name = Entry_Name(session = Sessions.DRAFT, theory = theory, name = name)
+          entry <- snapshot.all_exports.get(entry_name)
+        } yield entry
+      def db_entry: Option[Entry] =
+        db_hierarchy.view.map(database =>
+          Export.Entry_Name(session = database.session, theory = theory, name = name)
+            .read(database.db, cache))
+          .collectFirst({ case Some(entry) => entry })
+
+      snapshot_entry orElse db_entry
+    }
+
+    def apply(theory: String, name: String, permissive: Boolean = false): Entry =
+      get(theory, name) match {
+        case None if permissive => empty_entry(theory, name)
+        case None => error("Missing export entry " + quote(compound_name(theory, name)))
+        case Some(entry) => entry
+      }
+
+    def theory(theory: String): Theory_Context =
+      new Theory_Context(session_context, theory)
+
+    def classpath(): List[File.Content_Bytes] = {
+      (for {
+        session <- session_stack.iterator
+        info <- sessions_structure.get(session).iterator
+        if info.export_classpath.nonEmpty
+        matcher = make_matcher(info.export_classpath)
+        entry_name <- entry_names(session = session).iterator
+        if matcher(entry_name)
+        entry <- get(entry_name.theory, entry_name.name).iterator
+      } yield File.Content(entry.entry_name.make_path(), entry.uncompressed)).toList
+  }
+
+  override def toString: String =
+      "Export.Session_Context(" + commas_quote(session_stack) + ")"
+  }
+
+  class Theory_Context private[Export](
+    val session_context: Session_Context,
+    val theory: String
+  ) {
+    def cache: Term.Cache = session_context.cache
+
+    def get(name: String): Option[Entry] = session_context.get(theory, name)
+    def apply(name: String, permissive: Boolean = false): Entry =
+      session_context.apply(theory, name, permissive = permissive)
+
+    def uncompressed_yxml(name: String): XML.Body =
+      get(name) match {
         case Some(entry) => entry.uncompressed_yxml
         case None => Nil
       }
 
-    def focus(other_theory: String): Provider
+    def document_id(): Option[Long] =
+      apply(DOCUMENT_ID, permissive = true).text match {
+        case Value.Long(id) => Some(id)
+        case _ => None
+      }
+
+    def files(): Option[(String, List[String])] =
+      split_lines(apply(FILES, permissive = true).text) match {
+        case Nil => None
+        case thy_file :: blobs_files => Some((thy_file, blobs_files))
+      }
+
+    override def toString: String = "Export.Theory_Context(" + quote(theory) + ")"
   }
 
 
@@ -354,28 +453,26 @@
     export_patterns: List[String] = Nil
   ): Unit = {
     using(store.open_database(session_name)) { db =>
-      db.transaction {
-        val entry_names = read_entry_names(db, session_name)
+      val entry_names = read_entry_names(db, session_name)
 
-        // list
-        if (export_list) {
-          for (entry_name <- entry_names) progress.echo(entry_name.compound_name)
-        }
+      // list
+      if (export_list) {
+        for (entry_name <- entry_names) progress.echo(entry_name.compound_name)
+      }
 
-        // export
-        if (export_patterns.nonEmpty) {
-          val matcher = make_matcher(export_patterns)
-          for {
-            entry_name <- entry_names if matcher(entry_name)
-            entry <- entry_name.read(db, store.cache)
-          } {
-            val path = export_dir + entry_name.make_path(prune = export_prune)
-            progress.echo("export " + path + (if (entry.executable) " (executable)" else ""))
-            Isabelle_System.make_directory(path.dir)
-            val bytes = entry.uncompressed
-            if (!path.is_file || Bytes.read(path) != bytes) Bytes.write(path, bytes)
-            File.set_executable(path, entry.executable)
-          }
+      // export
+      if (export_patterns.nonEmpty) {
+        val matcher = make_matcher(export_patterns)
+        for {
+          entry_name <- entry_names if matcher(entry_name)
+          entry <- entry_name.read(db, store.cache)
+        } {
+          val path = export_dir + entry_name.make_path(prune = export_prune)
+          progress.echo("export " + path + (if (entry.executable) " (executable)" else ""))
+          Isabelle_System.make_directory(path.dir)
+          val bytes = entry.uncompressed
+          if (!path.is_file || Bytes.read(path) != bytes) Bytes.write(path, bytes)
+          File.set_executable(path, entry.executable)
         }
       }
     }
--- a/src/Pure/Thy/export_theory.ML	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/Thy/export_theory.ML	Sat Aug 06 23:13:35 2022 +0200
@@ -156,7 +156,7 @@
     val parents = Theory.parents_of thy;
     val _ =
       Export.export thy \<^path_binding>\<open>theory/parents\<close>
-        (XML.Encode.string (cat_lines (map Context.theory_long_name parents)));
+        (XML.Encode.string (terminate_lines (map Context.theory_long_name parents)));
 
 
     (* spec rules *)
--- a/src/Pure/Thy/export_theory.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/Thy/export_theory.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -25,23 +25,15 @@
   }
 
   def read_session(
-    store: Sessions.Store,
-    sessions_structure: Sessions.Structure,
-    session_name: String,
+    session_context: Export.Session_Context,
     progress: Progress = new Progress,
-    cache: Term.Cache = Term.Cache.make()): Session = {
+    cache: Term.Cache = Term.Cache.make()
+  ): Session = {
     val thys =
-      sessions_structure.build_requirements(List(session_name)).flatMap(session =>
-        using(store.open_database(session)) { db =>
-          db.transaction {
-            for (theory <- Export.read_theory_names(db, session))
-            yield {
-              progress.echo("Reading theory " + theory)
-              val provider = Export.Provider.database(db, store.cache, session, theory)
-              read_theory(provider, session, theory, cache = cache)
-            }
-          }
-        })
+      for (theory <- session_context.theory_names()) yield {
+        progress.echo("Reading theory " + theory)
+        read_theory(session_context.theory(theory), cache = cache)
+      }
 
     val graph0 =
       thys.foldLeft(Graph.string[Option[Theory]]) {
@@ -55,7 +47,7 @@
           }
       }
 
-    Session(session_name, graph1)
+    Session(session_context.session_name, graph1)
   }
 
 
@@ -80,7 +72,7 @@
   ) {
     override def toString: String = name
 
-    def entity_iterator: Iterator[Entity[No_Content]] =
+    def entity_iterator: Iterator[Entity0] =
       types.iterator.map(_.no_content) ++
       consts.iterator.map(_.no_content) ++
       axioms.iterator.map(_.no_content) ++
@@ -109,64 +101,45 @@
         (for ((k, xs) <- others.iterator) yield cache.string(k) -> xs.map(_.cache(cache))).toMap)
   }
 
-  def read_theory_parents(provider: Export.Provider, theory_name: String): Option[List[String]] = {
-    if (theory_name == Thy_Header.PURE) Some(Nil)
-    else {
-      provider(Export.THEORY_PREFIX + "parents")
-        .map(entry => split_lines(entry.uncompressed.text))
-    }
-  }
+  def read_theory_parents(theory_context: Export.Theory_Context): Option[List[String]] =
+    theory_context.get(Export.THEORY_PARENTS)
+      .map(entry => Library.trim_split_lines(entry.uncompressed.text))
 
   def no_theory: Theory =
     Theory("", Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Map.empty)
 
   def read_theory(
-    provider: Export.Provider,
-    session_name: String,
-    theory_name: String,
+    theory_context: Export.Theory_Context,
+    permissive: Boolean = false,
     cache: Term.Cache = Term.Cache.none
   ): Theory = {
-    val parents =
-      read_theory_parents(provider, theory_name) getOrElse
+    val session_name = theory_context.session_context.session_name
+    val theory_name = theory_context.theory
+    read_theory_parents(theory_context) match {
+      case None if permissive => no_theory
+      case None =>
         error("Missing theory export in session " + quote(session_name) + ": " + quote(theory_name))
-    val theory =
-      Theory(theory_name, parents,
-        read_types(provider),
-        read_consts(provider),
-        read_axioms(provider),
-        read_thms(provider),
-        read_classes(provider),
-        read_locales(provider),
-        read_locale_dependencies(provider),
-        read_classrel(provider),
-        read_arities(provider),
-        read_constdefs(provider),
-        read_typedefs(provider),
-        read_datatypes(provider),
-        read_spec_rules(provider),
-        read_others(provider))
-    if (cache.no_cache) theory else theory.cache(cache)
-  }
-
-  def read_pure[A](store: Sessions.Store, read: (Export.Provider, String, String) => A): A = {
-    val session_name = Thy_Header.PURE
-    val theory_name = Thy_Header.PURE
-
-    using(store.open_database(session_name)) { db =>
-      db.transaction {
-        val provider = Export.Provider.database(db, store.cache, session_name, theory_name)
-        read(provider, session_name, theory_name)
-      }
+      case Some(parents) =>
+        val theory =
+          Theory(theory_name, parents,
+            read_types(theory_context),
+            read_consts(theory_context),
+            read_axioms(theory_context),
+            read_thms(theory_context),
+            read_classes(theory_context),
+            read_locales(theory_context),
+            read_locale_dependencies(theory_context),
+            read_classrel(theory_context),
+            read_arities(theory_context),
+            read_constdefs(theory_context),
+            read_typedefs(theory_context),
+            read_datatypes(theory_context),
+            read_spec_rules(theory_context),
+            read_others(theory_context))
+        if (cache.no_cache) theory else theory.cache(cache)
     }
   }
 
-  def read_pure_theory(store: Sessions.Store, cache: Term.Cache = Term.Cache.none): Theory =
-    read_pure(store, read_theory(_, _, _, cache = cache))
-
-  def read_pure_proof(
-      store: Sessions.Store, id: Thm_Id, cache: Term.Cache = Term.Cache.none): Option[Proof] =
-    read_pure(store, (provider, _, _) => read_proof(provider, id, cache = cache))
-
 
   /* entities */
 
@@ -213,7 +186,7 @@
     def the_content: A =
       if (content.isDefined) content.get else error("No content for " + toString)
 
-    def no_content: Entity[No_Content] = copy(content = None)
+    def no_content: Entity0 = copy(content = None)
 
     def cache(cache: Term.Cache): Entity[A] =
       Entity(
@@ -225,9 +198,10 @@
         serial,
         content.map(_.cache(cache)))
   }
+  type Entity0 = Entity[No_Content]
 
   def read_entities[A <: Content[A]](
-    provider: Export.Provider,
+    theory_context: Export.Theory_Context,
     export_name: String,
     kind: String,
     decode: XML.Decode.T[A]
@@ -247,7 +221,7 @@
         case _ => err()
       }
     }
-    provider.uncompressed_yxml(export_name).map(decode_entity)
+    theory_context.uncompressed_yxml(export_name).map(decode_entity)
   }
 
 
@@ -283,8 +257,8 @@
         abbrev.map(cache.typ))
   }
 
-  def read_types(provider: Export.Provider): List[Entity[Type]] =
-    read_entities(provider, Export.THEORY_PREFIX + "types", Markup.TYPE_NAME,
+  def read_types(theory_context: Export.Theory_Context): List[Entity[Type]] =
+    read_entities(theory_context, Export.THEORY_PREFIX + "types", Markup.TYPE_NAME,
       { body =>
         import XML.Decode._
         val (syntax, args, abbrev) =
@@ -311,8 +285,8 @@
         propositional)
   }
 
-  def read_consts(provider: Export.Provider): List[Entity[Const]] =
-    read_entities(provider, Export.THEORY_PREFIX + "consts", Markup.CONSTANT,
+  def read_consts(theory_context: Export.Theory_Context): List[Entity[Const]] =
+    read_entities(theory_context, Export.THEORY_PREFIX + "consts", Markup.CONSTANT,
       { body =>
         import XML.Decode._
         val (syntax, (typargs, (typ, (abbrev, propositional)))) =
@@ -351,16 +325,14 @@
     override def cache(cache: Term.Cache): Axiom = Axiom(prop.cache(cache))
   }
 
-  def read_axioms(provider: Export.Provider): List[Entity[Axiom]] =
-    read_entities(provider, Export.THEORY_PREFIX + "axioms", Markup.AXIOM,
+  def read_axioms(theory_context: Export.Theory_Context): List[Entity[Axiom]] =
+    read_entities(theory_context, Export.THEORY_PREFIX + "axioms", Markup.AXIOM,
       body => Axiom(decode_prop(body)))
 
 
   /* theorems */
 
-  sealed case class Thm_Id(serial: Long, theory_name: String) {
-    def pure: Boolean = theory_name == Thy_Header.PURE
-  }
+  sealed case class Thm_Id(serial: Long, theory_name: String)
 
   sealed case class Thm(
     prop: Prop,
@@ -374,8 +346,8 @@
         cache.proof(proof))
   }
 
-  def read_thms(provider: Export.Provider): List[Entity[Thm]] =
-    read_entities(provider, Export.THEORY_PREFIX + "thms", Kind.THM,
+  def read_thms(theory_context: Export.Theory_Context): List[Entity[Thm]] =
+    read_entities(theory_context, Export.THEORY_PREFIX + "thms", Kind.THM,
       { body =>
         import XML.Decode._
         import Term_XML.Decode._
@@ -400,11 +372,12 @@
   }
 
   def read_proof(
-    provider: Export.Provider,
+    session_context: Export.Session_Context,
     id: Thm_Id,
     cache: Term.Cache = Term.Cache.none
   ): Option[Proof] = {
-    for { entry <- provider.focus(id.theory_name)(Export.PROOFS_PREFIX + id.serial) }
+    val theory_context = session_context.theory(id.theory_name)
+    for { entry <- theory_context.get(Export.PROOFS_PREFIX + id.serial) }
     yield {
       val body = entry.uncompressed_yxml
       val (typargs, (args, (prop_body, proof_body))) = {
@@ -422,8 +395,7 @@
   }
 
   def read_proof_boxes(
-    store: Sessions.Store,
-    provider: Export.Provider,
+    session_context: Export.Session_Context,
     proof: Term.Proof,
     suppress: Thm_Id => Boolean = _ => false,
     cache: Term.Cache = Term.Cache.none
@@ -441,10 +413,7 @@
           seen += thm.serial
           val id = Thm_Id(thm.serial, thm.theory_name)
           if (!suppress(id)) {
-            val read =
-              if (id.pure) Export_Theory.read_pure_proof(store, id, cache = cache)
-              else Export_Theory.read_proof(provider, id, cache = cache)
-            read match {
+            Export_Theory.read_proof(session_context, id, cache = cache) match {
               case Some(p) =>
                 result += (thm.serial -> (id -> p))
                 boxes(Some((thm.serial, p.proof)), p.proof)
@@ -475,8 +444,8 @@
         axioms.map(_.cache(cache)))
   }
 
-  def read_classes(provider: Export.Provider): List[Entity[Class]] =
-    read_entities(provider, Export.THEORY_PREFIX + "classes", Markup.CLASS,
+  def read_classes(theory_context: Export.Theory_Context): List[Entity[Class]] =
+    read_entities(theory_context, Export.THEORY_PREFIX + "classes", Markup.CLASS,
       { body =>
         import XML.Decode._
         import Term_XML.Decode._
@@ -499,8 +468,8 @@
         axioms.map(_.cache(cache)))
   }
 
-  def read_locales(provider: Export.Provider): List[Entity[Locale]] =
-    read_entities(provider, Export.THEORY_PREFIX + "locales", Markup.LOCALE,
+  def read_locales(theory_context: Export.Theory_Context): List[Entity[Locale]] =
+    read_entities(theory_context, Export.THEORY_PREFIX + "locales", Markup.LOCALE,
       { body =>
         import XML.Decode._
         import Term_XML.Decode._
@@ -532,8 +501,11 @@
       subst_types.isEmpty && subst_terms.isEmpty
   }
 
-  def read_locale_dependencies(provider: Export.Provider): List[Entity[Locale_Dependency]] =
-    read_entities(provider, Export.THEORY_PREFIX + "locale_dependencies", Kind.LOCALE_DEPENDENCY,
+  def read_locale_dependencies(
+    theory_context: Export.Theory_Context
+  ): List[Entity[Locale_Dependency]] = {
+    read_entities(theory_context, Export.THEORY_PREFIX + "locale_dependencies",
+      Kind.LOCALE_DEPENDENCY,
       { body =>
         import XML.Decode._
         import Term_XML.Decode._
@@ -542,6 +514,7 @@
             pair(list(pair(pair(string, sort), typ)), list(pair(pair(string, typ), term))))))(body)
         Locale_Dependency(source, target, prefix, subst_types, subst_terms)
       })
+  }
 
 
   /* sort algebra */
@@ -551,8 +524,8 @@
       Classrel(cache.string(class1), cache.string(class2), prop.cache(cache))
   }
 
-  def read_classrel(provider: Export.Provider): List[Classrel] = {
-    val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "classrel")
+  def read_classrel(theory_context: Export.Theory_Context): List[Classrel] = {
+    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "classrel")
     val classrel = {
       import XML.Decode._
       list(pair(decode_prop, pair(string, string)))(body)
@@ -571,8 +544,8 @@
         prop.cache(cache))
   }
 
-  def read_arities(provider: Export.Provider): List[Arity] = {
-    val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "arities")
+  def read_arities(theory_context: Export.Theory_Context): List[Arity] = {
+    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "arities")
     val arities = {
       import XML.Decode._
       import Term_XML.Decode._
@@ -589,8 +562,8 @@
       Constdef(cache.string(name), cache.string(axiom_name))
   }
 
-  def read_constdefs(provider: Export.Provider): List[Constdef] = {
-    val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "constdefs")
+  def read_constdefs(theory_context: Export.Theory_Context): List[Constdef] = {
+    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "constdefs")
     val constdefs = {
       import XML.Decode._
       list(pair(string, string))(body)
@@ -618,8 +591,8 @@
         cache.string(axiom_name))
   }
 
-  def read_typedefs(provider: Export.Provider): List[Typedef] = {
-    val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "typedefs")
+  def read_typedefs(theory_context: Export.Theory_Context): List[Typedef] = {
+    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "typedefs")
     val typedefs = {
       import XML.Decode._
       import Term_XML.Decode._
@@ -652,8 +625,8 @@
         constructors.map({ case (term, typ) => (cache.term(term), cache.typ(typ)) }))
   }
 
-  def read_datatypes(provider: Export.Provider): List[Datatype] = {
-    val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "datatypes")
+  def read_datatypes(theory_context: Export.Theory_Context): List[Datatype] = {
+    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "datatypes")
     val datatypes = {
       import XML.Decode._
       import Term_XML.Decode._
@@ -742,8 +715,8 @@
         rules.map(cache.term))
   }
 
-  def read_spec_rules(provider: Export.Provider): List[Spec_Rule] = {
-    val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "spec_rules")
+  def read_spec_rules(theory_context: Export.Theory_Context): List[Spec_Rule] = {
+    val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "spec_rules")
     val spec_rules = {
       import XML.Decode._
       import Term_XML.Decode._
@@ -763,15 +736,15 @@
     override def cache(cache: Term.Cache): Other = this
   }
 
-  def read_others(provider: Export.Provider): Map[String, List[Entity[Other]]] = {
+  def read_others(theory_context: Export.Theory_Context): Map[String, List[Entity[Other]]] = {
     val kinds =
-      provider(Export.THEORY_PREFIX + "other_kinds") match {
+      theory_context.get(Export.THEORY_PREFIX + "other_kinds") match {
         case Some(entry) => split_lines(entry.uncompressed.text)
         case None => Nil
       }
     val other = Other()
     def read_other(kind: String): List[Entity[Other]] =
-      read_entities(provider, Export.THEORY_PREFIX + "other/" + kind, kind, _ => other)
+      read_entities(theory_context, Export.THEORY_PREFIX + "other/" + kind, kind, _ => other)
 
     kinds.map(kind => kind -> read_other(kind)).toMap
   }
--- a/src/Pure/Thy/presentation.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/Thy/presentation.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -22,20 +22,16 @@
   abstract class HTML_Context {
     /* directory structure and resources */
 
+    def nodes: Nodes
     def root_dir: Path
     def theory_session(name: Document.Node.Name): Sessions.Info
 
     def session_dir(info: Sessions.Info): Path =
       root_dir + Path.explode(info.chapter_session)
-    def theory_path(name: Document.Node.Name): Path =
-      session_dir(theory_session(name)) + Path.explode(name.theory_base_name).html
+    def theory_dir(name: Document.Node.Name): Path =
+      session_dir(theory_session(name))
     def files_path(name: Document.Node.Name, path: Path): Path =
-      theory_path(name).dir + Path.explode("files") + path.squash.html
-
-    type Theory_Exports = Map[String, Entity_Context.Theory_Export]
-    def theory_exports: Theory_Exports = Map.empty
-    def theory_export(name: String): Entity_Context.Theory_Export =
-      theory_exports.getOrElse(name, Entity_Context.no_theory_export)
+      theory_dir(name) + Path.explode("files") + path.squash.html
 
 
     /* HTML content */
@@ -88,18 +84,96 @@
       language = Markup.Elements(Markup.Language.DOCUMENT))
 
 
+  /* per-session node info */
+
+  sealed case class File_Info(theory: String, is_theory: Boolean = false)
+
+  object Node_Info {
+    val empty: Node_Info = new Node_Info(Map.empty, Map.empty, Nil)
+    def make(theory: Export_Theory.Theory): Node_Info = {
+      val by_range = theory.entity_iterator.toList.groupBy(_.range)
+      val by_kind_name =
+        theory.entity_iterator.map(entity => ((entity.kind, entity.name), entity)).toMap
+      val others = theory.others.keySet.toList
+      new Node_Info(by_range, by_kind_name, others)
+    }
+  }
+
+  class Node_Info private(
+    by_range: Map[Symbol.Range, List[Export_Theory.Entity0]],
+    by_kind_name: Map[(String, String), Export_Theory.Entity0],
+    val others: List[String]) {
+    def for_range(range: Symbol.Range): List[Export_Theory.Entity0] =
+      by_range.getOrElse(range, Nil)
+    def get_kind_name(kind: String, name: String): Option[String] =
+      by_kind_name.get((kind, name)).map(_.kname)
+  }
+
+  object Nodes {
+    val empty: Nodes = new Nodes(Map.empty, Map.empty)
+
+    def read(
+      export_context: Export.Context,
+      deps: Sessions.Deps,
+      presentation_sessions: List[String]
+    ): Nodes = {
+
+      def open_session(session: String): Export.Session_Context =
+        export_context.open_session(deps.base_info(session))
+
+      type Batch = (String, List[String])
+      val batches =
+        presentation_sessions.foldLeft((Set.empty[String], List.empty[Batch]))(
+          { case ((seen, batches), session) =>
+              val thys = deps(session).loaded_theories.keys.filterNot(seen)
+              (seen ++ thys, (session, thys) :: batches)
+          })._2
+
+      val theory_node_info =
+        Par_List.map[Batch, List[(String, Node_Info)]](
+          { case (session, thys) =>
+              using(open_session(session)) { session_context =>
+                for (thy_name <- thys) yield {
+                  val theory_context = session_context.theory(thy_name)
+                  val theory =
+                    Export_Theory.read_theory(theory_context,
+                      permissive = true, cache = session_context.cache)
+                  thy_name -> Node_Info.make(theory)
+                }
+              }
+          }, batches).flatten.toMap
+
+      val files_info =
+        deps.sessions_structure.build_requirements(presentation_sessions).flatMap(session =>
+          using(open_session(session)) { session_context =>
+            session_context.theory_names().flatMap { theory =>
+              session_context.theory(theory).files() match {
+                case None => Nil
+                case Some((thy, blobs)) =>
+                  val thy_file_info = File_Info(theory, is_theory = true)
+                  (thy -> thy_file_info) :: blobs.map(_ -> File_Info(theory))
+              }
+            }
+          }).toMap
+
+      new Nodes(theory_node_info, files_info)
+    }
+  }
+
+  class Nodes private(
+    theory_node_info: Map[String, Node_Info],
+    val files_info: Map[String, File_Info]
+  ) {
+    def apply(name: String): Node_Info = theory_node_info.getOrElse(name, Node_Info.empty)
+    def get(name: String): Option[Node_Info] = theory_node_info.get(name)
+  }
+
+
   /* formal entities */
 
   type Entity = Export_Theory.Entity[Export_Theory.No_Content]
 
   object Entity_Context {
-    sealed case class Theory_Export(
-      entity_by_range: Map[Symbol.Range, List[Export_Theory.Entity[Export_Theory.No_Content]]],
-      entity_by_kind_name: Map[(String, String), Export_Theory.Entity[Export_Theory.No_Content]],
-      others: List[String])
-
-    val no_theory_export: Theory_Export = Theory_Export(Map.empty, Map.empty, Nil)
-
     object Theory_Ref {
       def unapply(props: Properties.T): Option[Document.Node.Name] =
         (props, props, props) match {
@@ -135,10 +209,7 @@
             case List(XML.Elem(Markup("span", List("id" -> _)), _)) => None
             case _ =>
               Some {
-                val entities =
-                  html_context.theory_exports.get(node.theory)
-                    .flatMap(_.entity_by_range.get(range))
-                    .getOrElse(Nil)
+                val entities = html_context.nodes(node.theory).for_range(range)
                 val body1 =
                   if (seen_ranges.contains(range)) {
                     HTML.entity_def(HTML.span(HTML.id(offset_id(range)), body))
@@ -165,10 +236,7 @@
         }
 
         private def logical_ref(thy_name: String, kind: String, name: String): Option[String] =
-          for {
-            thy <- html_context.theory_exports.get(thy_name)
-            entity <- thy.entity_by_kind_name.get((kind, name))
-          } yield entity.kname
+          html_context.nodes.get(thy_name).flatMap(_.get_kind_name(kind, name))
 
         override def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = {
           props match {
@@ -456,72 +524,18 @@
     session_relative(deps, session0, session1)
   }
 
-  def theory_link(
-    deps: Sessions.Deps,
-    session0: String,
-    name: Document.Node.Name,
-    body: XML.Body,
-    anchor: Option[String] = None
-  ): Option[XML.Tree] = {
-    val session1 = deps(session0).theory_qualifier(name)
-    val info0 = deps.sessions_structure.get(session0)
-    val info1 = deps.sessions_structure.get(session1)
-    val fragment = if (anchor.isDefined) "#" + anchor.get else ""
-    if (info0.isDefined && info1.isDefined) {
-      Some(HTML.link(info0.get.relative_path(info1.get) + html_name(name) + fragment, body))
-    }
-    else None
-  }
-
-  def read_exports(
-    sessions: List[String],
+  def session_html(
+    session_context: Export.Session_Context,
     deps: Sessions.Deps,
-    db_context: Sessions.Database_Context
-  ): Map[String, Entity_Context.Theory_Export] = {
-    type Batch = (String, List[String])
-    val batches =
-      sessions.foldLeft((Set.empty[String], List.empty[Batch]))(
-        { case ((seen, batches), session) =>
-            val thys = deps(session).loaded_theories.keys.filterNot(seen)
-            (seen ++ thys, (session, thys) :: batches)
-        })._2
-    Par_List.map[Batch, List[(String, Entity_Context.Theory_Export)]](
-      { case (session, thys) =>
-          for (thy_name <- thys) yield {
-            val theory =
-              if (thy_name == Thy_Header.PURE) Export_Theory.no_theory
-              else {
-                val provider = Export.Provider.database_context(db_context, List(session), thy_name)
-                if (Export_Theory.read_theory_parents(provider, thy_name).isDefined) {
-                  Export_Theory.read_theory(provider, session, thy_name, cache = db_context.cache)
-                }
-                else Export_Theory.no_theory
-              }
-            val entity_by_range =
-              theory.entity_iterator.toList.groupBy(_.range)
-            val entity_by_kind_name =
-              theory.entity_iterator.map(entity => ((entity.kind, entity.name), entity)).toMap
-            val others = theory.others.keySet.toList
-            thy_name -> Entity_Context.Theory_Export(entity_by_range, entity_by_kind_name, others)
-          }
-      }, batches).flatten.toMap
-  }
-
-  def session_html(
-    session: String,
-    deps: Sessions.Deps,
-    db_context: Sessions.Database_Context,
     progress: Progress = new Progress,
     verbose: Boolean = false,
     html_context: HTML_Context,
     session_elements: Elements
   ): Unit = {
-    val info = deps.sessions_structure(session)
+    val session = session_context.session_name
+    val info = session_context.sessions_structure(session)
     val options = info.options
-    val base = deps(session)
-
-    val hierarchy = deps.sessions_structure.build_hierarchy(session)
-    val hierarchy_theories = hierarchy.reverse.flatMap(a => deps(a).used_theories.map(_._1))
+    val base = session_context.session_base
 
     val session_dir = Isabelle_System.make_directory(html_context.session_dir(info))
 
@@ -531,7 +545,8 @@
     val documents =
       for {
         doc <- info.document_variants
-        document <- db_context.input_database(session)(Document_Build.read_document(_, _, doc.name))
+        db <- session_context.session_db()
+        document <- Document_Build.read_document(db, session, doc.name)
       } yield {
         val doc_path = (session_dir + doc.path.pdf).expand
         if (verbose) progress.echo("Presenting document " + session + "/" + doc.name)
@@ -580,14 +595,13 @@
     def present_theory(name: Document.Node.Name): Option[XML.Body] = {
       progress.expose_interrupt()
 
-      Build_Job.read_theory(db_context, hierarchy, name.theory).flatMap { command =>
+      Build_Job.read_theory(session_context.theory(name.theory)).flatMap { command =>
         if (verbose) progress.echo("Presenting theory " + name)
         val snapshot = Document.State.init.snippet(command)
 
         val thy_elements =
           session_elements.copy(entity =
-            html_context.theory_export(name.theory).others
-              .foldLeft(session_elements.entity)(_ + _))
+            html_context.nodes(name.theory).others.foldLeft(session_elements.entity)(_ + _))
 
         val files_html =
           for {
@@ -607,13 +621,13 @@
             make_html(entity_context(name), thy_elements,
               snapshot.xml_markup(elements = thy_elements.html)))
 
-        val thy_session = html_context.theory_session(name)
-        val thy_dir = Isabelle_System.make_directory(html_context.session_dir(thy_session))
+        val thy_session = html_context.theory_session(name).name
+        val thy_dir = Isabelle_System.make_directory(html_context.theory_dir(name))
         val files =
           for { (src_path, file_html) <- files_html }
             yield {
-              seen_files.find(_.check(src_path, name, thy_session.name)) match {
-                case None => seen_files ::= Seen_File(src_path, name, thy_session.name)
+              seen_files.find(_.check(src_path, name, thy_session)) match {
+                case None => seen_files ::= Seen_File(src_path, name, thy_session)
                 case Some(seen_file) =>
                   error("Incoherent use of file name " + src_path + " as " + files_path(src_path) +
                     " in theory " + seen_file.thy_name + " vs. " + name)
@@ -634,7 +648,7 @@
           List(HTML.title(thy_title)), List(html_context.head(thy_title), thy_html),
           base = Some(html_context.root_dir))
 
-        if (thy_session.name == session) {
+        if (thy_session == session) {
           Some(
             List(HTML.link(html_name(name),
               HTML.text(name.theory_base_name) :::
@@ -644,7 +658,7 @@
       }
     }
 
-    val theories = base.session_theories.flatMap(present_theory)
+    val theories = base.proper_session_theories.flatMap(present_theory)
 
     val title = "Session " + session
     HTML.write_document(session_dir, "index.html",
--- a/src/Pure/Thy/sessions.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/Thy/sessions.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -59,24 +59,26 @@
   /* base info and source dependencies */
 
   sealed case class Base(
-    pos: Position.T = Position.none,
+    session_name: String = "",
+    session_pos: Position.T = Position.none,
     session_directories: Map[JFile, String] = Map.empty,
     global_theories: Map[String, String] = Map.empty,
-    session_theories: List[Document.Node.Name] = Nil,
+    proper_session_theories: List[Document.Node.Name] = Nil,
     document_theories: List[Document.Node.Name] = Nil,
-    loaded_theories: Graph[String, Outer_Syntax] = Graph.string,
-    used_theories: List[(Document.Node.Name, Options)] = Nil,
+    loaded_theories: Graph[String, Outer_Syntax] = Graph.string,  // cumulative imports
+    used_theories: List[(Document.Node.Name, Options)] = Nil,  // new imports
     load_commands: Map[Document.Node.Name, List[Command_Span.Span]] = Map.empty,
     known_theories: Map[String, Document.Node.Entry] = Map.empty,
     known_loaded_files: Map[String, List[Path]] = Map.empty,
     overall_syntax: Outer_Syntax = Outer_Syntax.empty,
     imported_sources: List[(Path, SHA1.Digest)] = Nil,
-    sources: List[(Path, SHA1.Digest)] = Nil,
+    session_sources: List[(Path, SHA1.Digest)] = Nil,
     session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph,
     errors: List[String] = Nil
   ) {
     override def toString: String =
-      "Sessions.Base(loaded_theories = " + loaded_theories.size +
+      "Sessions.Base(session_name = " + quote(session_name) +
+        ", loaded_theories = " + loaded_theories.size +
         ", used_theories = " + used_theories.length + ")"
 
     def theory_qualifier(name: String): String =
@@ -108,15 +110,15 @@
     def imported_sources(name: String): List[SHA1.Digest] =
       session_bases(name).imported_sources.map(_._2)
 
-    def sources(name: String): List[SHA1.Digest] =
-      session_bases(name).sources.map(_._2)
+    def session_sources(name: String): List[SHA1.Digest] =
+      session_bases(name).session_sources.map(_._2)
 
     def errors: List[String] =
       (for {
         (name, base) <- session_bases.iterator
         if base.errors.nonEmpty
       } yield cat_lines(base.errors) +
-          "\nThe error(s) above occurred in session " + quote(name) + Position.here(base.pos)
+          "\nThe error(s) above occurred in session " + quote(name) + Position.here(base.session_pos)
       ).toList
 
     def check_errors: Deps =
@@ -124,6 +126,9 @@
         case Nil => this
         case errs => error(cat_lines(errs))
       }
+
+    def base_info(session: String): Base_Info =
+      Base_Info(base = apply(session), sessions_structure = sessions_structure, errors = errors)
   }
 
   def deps(sessions_structure: Structure,
@@ -151,8 +156,13 @@
       }
     }
 
+    val bootstrap_bases = {
+      val base = sessions_structure.bootstrap
+      Map(base.session_name -> base)
+    }
+
     val session_bases =
-      sessions_structure.imports_topological_order.foldLeft(Map("" -> sessions_structure.bootstrap)) {
+      sessions_structure.imports_topological_order.foldLeft(bootstrap_bases) {
         case (session_bases, session_name) =>
           progress.expose_interrupt()
 
@@ -172,13 +182,11 @@
 
             val overall_syntax = dependencies.overall_syntax
 
-            val session_theories =
+            val proper_session_theories =
               dependencies.theories.filter(name => deps_base.theory_qualifier(name) == session_name)
 
             val theory_files = dependencies.theories.map(_.path)
 
-            dependencies.load_commands
-
             val (load_commands, load_commands_errors) =
               try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) }
               catch { case ERROR(msg) => (Nil, List(msg)) }
@@ -273,7 +281,7 @@
                     case None => err("Unknown document theory")
                     case Some(name) =>
                       val qualifier = deps_base.theory_qualifier(name)
-                      if (session_theories.contains(name)) {
+                      if (proper_session_theories.contains(name)) {
                         err("Redundant document theory from this session:")
                       }
                       else if (build_hierarchy.contains(qualifier)) None
@@ -288,7 +296,7 @@
               val ok = info.dirs.map(_.canonical_file).toSet
               val bad =
                 (for {
-                  name <- session_theories.iterator
+                  name <- proper_session_theories.iterator
                   path = name.master_dir_path
                   if !ok(path.canonical_file)
                   path1 = File.relative_path(info.dir.canonical, path).getOrElse(path)
@@ -304,7 +312,7 @@
               val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p
               val errs4 =
                 (for {
-                  name <- session_theories.iterator
+                  name <- proper_session_theories.iterator
                   name1 <- resources.find_theory_node(name.theory)
                   if name.node != name1.node
                 } yield "Incoherent theory file import:\n  " + name.path + " vs. \n  " + name1.path)
@@ -326,10 +334,11 @@
 
             val base =
               Base(
-                pos = info.pos,
+                session_name = info.name,
+                session_pos = info.pos,
                 session_directories = sessions_structure.session_directories,
                 global_theories = sessions_structure.global_theories,
-                session_theories = session_theories,
+                proper_session_theories = proper_session_theories,
                 document_theories = document_theories,
                 loaded_theories = dependencies.loaded_theories,
                 used_theories = dependencies.theories_adjunct,
@@ -338,7 +347,7 @@
                 known_loaded_files = known_loaded_files,
                 overall_syntax = overall_syntax,
                 imported_sources = check_sources(imported_files),
-                sources = check_sources(session_files),
+                session_sources = check_sources(session_files),
                 session_graph_display = session_graph_display,
                 errors = dependencies.errors ::: load_commands_errors ::: import_errors :::
                   document_errors ::: dir_errors ::: sources_errors ::: path_errors :::
@@ -360,15 +369,18 @@
   /* base info */
 
   sealed case class Base_Info(
-    session: String,
-    sessions_structure: Structure,
-    errors: List[String],
     base: Base,
-    infos: List[Info]
+    sessions_structure: Structure = Structure.empty,
+    errors: List[String] = Nil,
+    infos: List[Info] = Nil
   ) {
     def check: Base_Info = if (errors.isEmpty) this else error(cat_lines(errors))
+    def session: String = base.session_name
   }
 
+  def base_info0(session: String): Base_Info =
+    Base_Info(Base(session_name = session))
+
   def base_info(options: Options,
     session: String,
     progress: Progress = new Progress,
@@ -444,7 +456,8 @@
 
     val deps1 = Sessions.deps(selected_sessions1, progress = progress)
 
-    Base_Info(session1, full_sessions1, deps1.errors, deps1(session1), infos1)
+    Base_Info(deps1(session1), sessions_structure = full_sessions1,
+      errors = deps1.errors, infos = infos1)
   }
 
 
@@ -824,17 +837,19 @@
       deps
     }
 
+    def build_hierarchy(session: String): List[String] =
+      if (build_graph.defined(session)) build_graph.all_preds(List(session))
+      else List(session)
+
     def build_selection(sel: Selection): List[String] = selected(build_graph, sel)
     def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss)
     def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss)
     def build_topological_order: List[String] = build_graph.topological_order
-    def build_hierarchy(session: String): List[String] = build_graph.all_preds(List(session))
 
     def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel)
     def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss)
     def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds_rev(ss)
     def imports_topological_order: List[String] = imports_graph.topological_order
-    def imports_hierarchy(session: String): List[String] = imports_graph.all_preds(List(session))
 
     def bibtex_entries: List[(String, List[String])] =
       build_topological_order.flatMap(name =>
@@ -1194,77 +1209,18 @@
 
   class Database_Context private[Sessions](
     val store: Sessions.Store,
-    database_server: Option[SQL.Database]
+    val database_server: Option[SQL.Database]
   ) extends AutoCloseable {
     def cache: Term.Cache = store.cache
 
     def close(): Unit = database_server.foreach(_.close())
 
-    def output_database[A](session: String)(f: SQL.Database => A): A =
+    def database_output[A](session: String)(f: SQL.Database => A): A =
       database_server match {
         case Some(db) => f(db)
         case None => using(store.open_database(session, output = true))(f)
       }
 
-    def input_database[A](session: String)(f: (SQL.Database, String) => Option[A]): Option[A] =
-      database_server match {
-        case Some(db) => f(db, session)
-        case None =>
-          store.try_open_database(session) match {
-            case Some(db) => using(db)(f(_, session))
-            case None => None
-          }
-      }
-
-    def read_export(
-      sessions: List[String],
-      theory_name: String,
-      name: String
-    ): Option[Export.Entry] = {
-      val attempts =
-        database_server match {
-          case Some(db) =>
-            sessions.view.map(session_name =>
-              Export.Entry_Name(session = session_name, theory = theory_name, name = name)
-                .read(db, store.cache))
-          case None =>
-            sessions.view.map(session_name =>
-              store.try_open_database(session_name) match {
-                case Some(db) =>
-                  using(db) { _ =>
-                    Export.Entry_Name(session = session_name, theory = theory_name, name = name)
-                      .read(db, store.cache) }
-                case None => None
-              })
-        }
-      attempts.collectFirst({ case Some(entry) => entry })
-    }
-
-    def get_export(
-        session_hierarchy: List[String], theory_name: String, name: String): Export.Entry =
-      read_export(session_hierarchy, theory_name, name) getOrElse
-        Export.empty_entry(theory_name, name)
-
-    def get_classpath(structure: Structure, session: String): List[File.Content_Bytes] =
-    {
-      (for {
-        name <- structure.build_requirements(List(session))
-        patterns = structure(name).export_classpath if patterns.nonEmpty
-      } yield {
-        input_database(name)((db, _) =>
-          db.transaction {
-            val matcher = Export.make_matcher(patterns)
-            val res =
-              for {
-                entry_name <- Export.read_entry_names(db, name) if matcher(entry_name)
-                entry <- entry_name.read(db, store.cache)
-              } yield File.Content(entry.entry_name.make_path(), entry.uncompressed)
-            Some(res)
-          }
-        ).getOrElse(Nil)
-      }).flatten
-    }
-
     override def toString: String = {
       val s =
         database_server match {
@@ -1354,14 +1310,18 @@
               port = options.int("build_database_ssh_port"))),
         ssh_close = true)
 
-    def open_database_context(): Database_Context =
-      new Database_Context(store, if (database_server) Some(open_database_server()) else None)
+    def open_database_context(server: Boolean = database_server): Database_Context =
+      new Database_Context(store, if (server) Some(open_database_server()) else None)
 
-    def try_open_database(name: String, output: Boolean = false): Option[SQL.Database] = {
+    def try_open_database(
+      name: String,
+      output: Boolean = false,
+      server: Boolean = database_server
+    ): Option[SQL.Database] = {
       def check(db: SQL.Database): Option[SQL.Database] =
         if (output || session_info_exists(db)) Some(db) else { db.close(); None }
 
-      if (database_server) check(open_database_server())
+      if (server) check(open_database_server())
       else if (output) Some(SQLite.open_database(output_database(name)))
       else {
         (for {
@@ -1372,9 +1332,11 @@
       }
     }
 
+    def bad_database(name: String): Nothing =
+      error("Missing build database for session " + quote(name))
+
     def open_database(name: String, output: Boolean = false): SQL.Database =
-      try_open_database(name, output = output) getOrElse
-        error("Missing build database for session " + quote(name))
+      try_open_database(name, output = output) getOrElse bad_database(name)
 
     def clean_output(name: String): (Boolean, Boolean) = {
       val relevant_db =
@@ -1445,12 +1407,10 @@
     }
 
     def session_info_defined(db: SQL.Database, name: String): Boolean =
-      db.transaction {
-        session_info_exists(db) && {
-          db.using_statement(
-            Session_Info.table.select(List(Session_Info.session_name),
-              Session_Info.session_name.where_equal(name)))(stmt => stmt.execute_query().next())
-        }
+      session_info_exists(db) && {
+        db.using_statement(
+          Session_Info.table.select(List(Session_Info.session_name),
+            Session_Info.session_name.where_equal(name)))(stmt => stmt.execute_query().next())
       }
 
     def write_session_info(
--- a/src/Pure/Tools/build.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/Tools/build.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -54,7 +54,7 @@
           catch {
             case ERROR(msg) => ignore_error(msg)
             case exn: java.lang.Error => ignore_error(Exn.message(exn))
-            case _: XML.Error => ignore_error("")
+            case _: XML.Error => ignore_error("XML.Error")
           }
           finally { db.close() }
       }
@@ -203,7 +203,7 @@
     def sources_stamp(deps: Sessions.Deps, session_name: String): String = {
       val digests =
         full_sessions(session_name).meta_digest ::
-        deps.sources(session_name) :::
+        deps.session_sources(session_name) :::
         deps.imported_sources(session_name)
       SHA1.digest_set(digests).toString
     }
@@ -247,7 +247,7 @@
       val source_files =
         (for {
           (_, base) <- deps.session_bases.iterator
-          (path, _) <- base.sources.iterator
+          (path, _) <- base.session_sources.iterator
         } yield path).toList
       val exclude_files = List(Path.explode("$POLYML_EXE")).map(_.canonical_file)
       val unknown_files =
@@ -494,9 +494,9 @@
           Presentation.update_chapter(presentation_dir, chapter, entries)
         }
 
-        using(store.open_database_context()) { db_context =>
-          val exports =
-            Presentation.read_exports(presentation_sessions.map(_.name), deps, db_context)
+        using(Export.open_context(store)) { export_context =>
+          val presentation_nodes =
+            Presentation.Nodes.read(export_context, deps, presentation_sessions.map(_.name))
 
           Par_List.map({ (session: String) =>
             progress.expose_interrupt()
@@ -504,15 +504,19 @@
 
             val html_context =
               new Presentation.HTML_Context {
+                override def nodes: Presentation.Nodes = presentation_nodes
                 override def root_dir: Path = presentation_dir
                 override def theory_session(name: Document.Node.Name): Sessions.Info =
                   deps.sessions_structure(deps(session).theory_qualifier(name))
-                override def theory_exports: Theory_Exports = exports
               }
-            Presentation.session_html(
-              session, deps, db_context, progress = progress,
-              verbose = verbose, html_context = html_context,
-              Presentation.elements1)
+
+            using(export_context.open_session(deps.base_info(session))) { session_context =>
+              Presentation.session_html(session_context, deps,
+                progress = progress,
+                verbose = verbose,
+                html_context = html_context,
+                Presentation.elements1)
+            }
           }, presentation_sessions.map(_.name))
         }
       }
--- a/src/Pure/Tools/build_job.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/Tools/build_job.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -11,67 +11,64 @@
 
 
 object Build_Job {
-  /* theory markup/messages from database */
+  /* theory markup/messages from session database */
 
   def read_theory(
-    db_context: Sessions.Database_Context,
-    session_hierarchy: List[String],
-    theory: String,
+    theory_context: Export.Theory_Context,
     unicode_symbols: Boolean = false
   ): Option[Command] = {
-    def read(name: String): Export.Entry =
-      db_context.get_export(session_hierarchy, theory, name)
+    def read(name: String): Export.Entry = theory_context(name, permissive = true)
 
     def read_xml(name: String): XML.Body =
       YXML.parse_body(
         Symbol.output(unicode_symbols, UTF8.decode_permissive(read(name).uncompressed)),
-        cache = db_context.cache)
-
-    (read(Export.DOCUMENT_ID).text, split_lines(read(Export.FILES).text)) match {
-      case (Value.Long(id), thy_file :: blobs_files) =>
-        val node_name = Resources.file_node(Path.explode(thy_file), theory = theory)
-
-        val results =
-          Command.Results.make(
-            for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES))
-              yield i -> elem)
+        cache = theory_context.cache)
 
-        val blobs =
-          blobs_files.map { file =>
-            val path = Path.explode(file)
-            val name = Resources.file_node(path)
-            val src_path = File.relative_path(node_name.master_dir_path, path).getOrElse(path)
-            Command.Blob(name, src_path, None)
-          }
-        val blobs_xml =
-          for (i <- (1 to blobs.length).toList)
-            yield read_xml(Export.MARKUP + i)
+    for {
+      id <- theory_context.document_id()
+      (thy_file, blobs_files) <- theory_context.files()
+    }
+    yield {
+      val node_name = Resources.file_node(Path.explode(thy_file), theory = theory_context.theory)
+
+      val results =
+        Command.Results.make(
+          for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES))
+            yield i -> elem)
 
-        val blobs_info =
-          Command.Blobs_Info(
-            for { (Command.Blob(name, src_path, _), xml) <- blobs zip blobs_xml }
-              yield {
-                val text = XML.content(xml)
-                val chunk = Symbol.Text_Chunk(text)
-                val digest = SHA1.digest(Symbol.encode(text))
-                Exn.Res(Command.Blob(name, src_path, Some((digest, chunk))))
-              })
-
-        val thy_xml = read_xml(Export.MARKUP)
-        val thy_source = XML.content(thy_xml)
+      val blobs =
+        blobs_files.map { file =>
+          val path = Path.explode(file)
+          val name = Resources.file_node(path)
+          val src_path = File.relative_path(node_name.master_dir_path, path).getOrElse(path)
+          Command.Blob(name, src_path, None)
+        }
+      val blobs_xml =
+        for (i <- (1 to blobs.length).toList)
+          yield read_xml(Export.MARKUP + i)
 
-        val markups_index =
-          Command.Markup_Index.markup :: blobs.map(Command.Markup_Index.blob)
-        val markups =
-          Command.Markups.make(
-            for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml))
-            yield index -> Markup_Tree.from_XML(xml))
+      val blobs_info =
+        Command.Blobs_Info(
+          for { (Command.Blob(name, src_path, _), xml) <- blobs zip blobs_xml }
+            yield {
+              val text = XML.content(xml)
+              val chunk = Symbol.Text_Chunk(text)
+              val digest = SHA1.digest(Symbol.encode(text))
+              Exn.Res(Command.Blob(name, src_path, Some((digest, chunk))))
+            })
 
-        val command =
-          Command.unparsed(thy_source, theory = true, id = id, node_name = node_name,
-            blobs_info = blobs_info, results = results, markups = markups)
-        Some(command)
-      case _ => None
+      val thy_xml = read_xml(Export.MARKUP)
+      val thy_source = XML.content(thy_xml)
+
+      val markups_index =
+        Command.Markup_Index.markup :: blobs.map(Command.Markup_Index.blob)
+      val markups =
+        Command.Markups.make(
+          for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml))
+          yield index -> Markup_Tree.from_XML(xml))
+
+      Command.unparsed(thy_source, theory = true, id = id, node_name = node_name,
+        blobs_info = blobs_info, results = results, markups = markups)
     }
   }
 
@@ -92,13 +89,14 @@
     val store = Sessions.store(options)
     val session = new Session(options, Resources.empty)
 
-    using(store.open_database_context()) { db_context =>
+    using(Export.open_session_context0(store, session_name)) { session_context =>
       val result =
-        db_context.input_database(session_name) { (db, _) =>
-          val theories = store.read_theories(db, session_name)
-          val errors = store.read_errors(db, session_name)
-          store.read_build(db, session_name).map(info => (theories, errors, info.return_code))
-        }
+        for {
+          db <- session_context.session_db()
+          theories = store.read_theories(db, session_name)
+          errors = store.read_errors(db, session_name)
+          info <- store.read_build(db, session_name)
+        } yield (theories, errors, info.return_code)
       result match {
         case None => error("Missing build database for session " + quote(session_name))
         case Some((used_theories, errors, rc)) =>
@@ -108,10 +106,11 @@
           }
           val print_theories =
             if (theories.isEmpty) used_theories else used_theories.filter(theories.toSet)
+
           for (thy <- print_theories) {
             val thy_heading = "\nTheory " + quote(thy) + ":"
-            read_theory(db_context, List(session_name), thy, unicode_symbols = unicode_symbols)
-            match {
+
+            read_theory(session_context.theory(thy), unicode_symbols = unicode_symbols) match {
               case None => progress.echo(thy_heading + " MISSING")
               case Some(command) =>
                 val snapshot = Document.State.init.snippet(command)
@@ -315,7 +314,7 @@
           private def export_(msg: Prover.Protocol_Output): Boolean =
             msg.properties match {
               case Protocol.Export(args) =>
-                export_consumer(session_name, args, msg.chunk)
+                export_consumer.make_entry(session_name, args, msg.chunk)
                 true
               case _ => false
             }
@@ -353,8 +352,8 @@
                 val theory_name = snapshot.node_name.theory
                 val args =
                   Protocol.Export.Args(theory_name = theory_name, name = name, compress = compress)
-                val bytes = Bytes(Symbol.encode(YXML.string_of_body(xml)))
-                if (!bytes.is_empty) export_consumer(session_name, args, bytes)
+                val body = Bytes(Symbol.encode(YXML.string_of_body(xml)))
+                export_consumer.make_entry(session_name, args, body)
               }
             }
             def export_text(name: String, text: String, compress: Boolean = true): Unit =
@@ -444,11 +443,14 @@
           if (build_errors.isInstanceOf[Exn.Res[_]] && process_result.ok && info.documents.nonEmpty) {
             using(store.open_database_context()) { db_context =>
               val documents =
-                Document_Build.build_documents(
-                  Document_Build.context(session_name, deps, db_context, progress = progress),
-                  output_sources = info.document_output,
-                  output_pdf = info.document_output)
-              db_context.output_database(session_name)(db =>
+                using(Export.context(db_context).open_session(deps.base_info(session_name))) {
+                  session_context =>
+                    Document_Build.build_documents(
+                      Document_Build.context(session_context, progress = progress),
+                      output_sources = info.document_output,
+                      output_pdf = info.document_output)
+                }
+              db_context.database_output(session_name)(db =>
                 documents.foreach(_.write(db, session_name)))
               (documents.flatMap(_.log_lines), Nil)
             }
--- a/src/Pure/Tools/profiling_report.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/Tools/profiling_report.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -17,10 +17,8 @@
   ): Unit = {
     val store = Sessions.store(options)
 
-    using(store.open_database_context()) { db_context =>
-      val result =
-        db_context.input_database(session)((db, name) => Some(store.read_theories(db, name)))
-      result match {
+    using(Export.open_session_context0(store, session)) { session_context =>
+      session_context.session_db().map(db => store.read_theories(db, session)) match {
         case None => error("Missing build database for session " + quote(session))
         case Some(used_theories) =>
           theories.filterNot(used_theories.toSet) match {
@@ -31,7 +29,7 @@
             (for {
               thy <- used_theories.iterator
               if theories.isEmpty || theories.contains(thy)
-              command <- Build_Job.read_theory(db_context, List(session), thy).iterator
+              command <- Build_Job.read_theory(session_context.theory(thy)).iterator
               snapshot = Document.State.init.snippet(command)
               (Protocol.ML_Profiling(report), _) <- snapshot.messages.iterator
             } yield if (clean_name) report.clean_name else report).toList
--- a/src/Pure/library.ML	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Pure/library.ML	Sat Aug 06 23:13:35 2022 +0200
@@ -145,6 +145,7 @@
   val commas: string list -> string
   val commas_quote: string list -> string
   val cat_lines: string list -> string
+  val terminate_lines: string list -> string
   val space_explode: string -> string -> string list
   val split_lines: string -> string list
   val plain_words: string -> string
@@ -748,6 +749,8 @@
 
 val cat_lines = space_implode "\n";
 
+fun terminate_lines lines = cat_lines lines ^ "\n";
+
 (*space_explode "." "h.e..l.lo" = ["h", "e", "", "l", "lo"]*)
 fun space_explode _ "" = []
   | space_explode sep s = String.fields (fn c => str c = sep) s;
--- a/src/Tools/VSCode/src/preview_panel.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Tools/VSCode/src/preview_panel.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -30,6 +30,7 @@
                 else {
                   val html_context =
                     new Presentation.HTML_Context {
+                      override def nodes: Presentation.Nodes = Presentation.Nodes.empty
                       override def root_dir: Path = Path.current
                       override def theory_session(name: Document.Node.Name): Sessions.Info =
                         resources.sessions_structure(resources.session_base.theory_qualifier(name))
--- a/src/Tools/jEdit/src/document_model.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Tools/jEdit/src/document_model.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -315,8 +315,8 @@
         val snapshot = model.await_stable_snapshot()
         val html_context =
           new Presentation.HTML_Context {
+            override def nodes: Presentation.Nodes = Presentation.Nodes.empty
             override def root_dir: Path = Path.current
-
             override def theory_session(name: Document.Node.Name): Sessions.Info =
               PIDE.resources.sessions_structure(
                 PIDE.resources.session_base.theory_qualifier(name))
--- a/src/Tools/jEdit/src/jedit_resources.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Tools/jEdit/src/jedit_resources.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -28,7 +28,6 @@
 
 class JEdit_Resources private(val session_base_info: Sessions.Base_Info)
 extends Resources(session_base_info.sessions_structure, session_base_info.base) {
-  def session_name: String = session_base_info.session
   def session_errors: List[String] = session_base_info.errors
 
 
--- a/src/Tools/jEdit/src/jedit_sessions.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Tools/jEdit/src/jedit_sessions.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -125,7 +125,7 @@
     no_build: Boolean = false
   ): Int = {
     Build.build(session_options(options),
-      selection = Sessions.Selection.session(PIDE.resources.session_name),
+      selection = Sessions.Selection.session(PIDE.resources.session_base.session_name),
       progress = progress, build_heap = true, no_build = no_build, dirs = session_dirs,
       infos = PIDE.resources.session_base_info.infos).rc
   }
@@ -139,7 +139,7 @@
     session.phase_changed += PIDE.plugin.session_phase_changed
 
     Isabelle_Process.start(session, options, sessions_structure, store,
-      logic = PIDE.resources.session_name,
+      logic = PIDE.resources.session_base.session_name,
       modes =
         (space_explode(',', options.string("jedit_print_mode")) :::
          space_explode(',', Isabelle_System.getenv("JEDIT_PRINT_MODE"))).reverse)
--- a/src/Tools/jEdit/src/main_plugin.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Tools/jEdit/src/main_plugin.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -242,7 +242,7 @@
             val model = Document_Model.init(session, node_name, buffer)
             for {
               text_area <- JEdit_Lib.jedit_text_areas(buffer)
-              if Document_View.get(text_area).map(_.model) != Some(model)
+              if !Document_View.get(text_area).map(_.model).contains(model)
             } Document_View.init(model, text_area)
           }
         }
@@ -284,7 +284,7 @@
   private def init_title(view: View): Unit = {
     val title =
       proper_string(Isabelle_System.getenv("ISABELLE_IDENTIFIER")).getOrElse("Isabelle") +
-        "/" + PIDE.resources.session_name
+        "/" + PIDE.resources.session_base.session_name
     val marker = "\u200B"
 
     val old_title = view.getViewConfig.title
@@ -298,7 +298,7 @@
 
     if (startup_failure.isDefined && !startup_notified) {
       message match {
-        case msg: EditorStarted =>
+        case _: EditorStarted =>
           GUI.error_dialog(null, "Isabelle plugin startup failure",
             GUI.scrollable_text(Exn.message(startup_failure.get)),
             "Prover IDE inactive!")
@@ -309,7 +309,7 @@
 
     if (startup_failure.isEmpty) {
       message match {
-        case msg: EditorStarted =>
+        case _: EditorStarted =>
           if (resources.session_errors.nonEmpty) {
             GUI.warning_dialog(jEdit.getActiveView,
               "Bad session structure: may cause problems with theory imports",
@@ -369,7 +369,7 @@
               Completion_Popup.Text_Area.exit(text_area)
           }
 
-        case msg: PropertiesChanged =>
+        case _: PropertiesChanged =>
           for {
             view <- JEdit_Lib.jedit_views()
             edit_pane <- JEdit_Lib.jedit_edit_panes(view)
--- a/src/Tools/jEdit/src/session_build.scala	Tue Aug 02 13:24:19 2022 +0200
+++ b/src/Tools/jEdit/src/session_build.scala	Sat Aug 06 23:13:35 2022 +0200
@@ -155,7 +155,8 @@
     setVisible(true)
 
     Isabelle_Thread.fork(name = "session_build") {
-      progress.echo("Build started for Isabelle/" + PIDE.resources.session_name + " ...")
+      progress.echo("Build started for Isabelle/" +
+        PIDE.resources.session_base.session_name + " ...")
 
       val (out, rc) =
         try { ("", JEdit_Sessions.session_build(options, progress = progress)) }